<?xml version="1.0" encoding="UTF-8"?>
<itemContainer xmlns="http://omeka.org/schemas/omeka-xml/v5" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://omeka.org/schemas/omeka-xml/v5 http://omeka.org/schemas/omeka-xml/v5/omeka-xml-5-0.xsd" uri="https://repository.horizon.ac.id/items/browse?collection=779&amp;output=omeka-xml&amp;sort_field=Dublin+Core%2CTitle" accessDate="2026-04-14T20:34:29+00:00">
  <miscellaneousContainer>
    <pagination>
      <pageNumber>1</pageNumber>
      <perPage>10</perPage>
      <totalResults>26</totalResults>
    </pagination>
  </miscellaneousContainer>
  <item itemId="10391" public="1" featured="1">
    <fileContainer>
      <file fileId="10404">
        <src>https://repository.horizon.ac.id/files/original/2f6713f692c13f87c3e5392f995f81d7.pdf</src>
        <authentication>b97576354b49ba673b06b81ce69d67d6</authentication>
      </file>
    </fileContainer>
    <collection collectionId="779">
      <elementSetContainer>
        <elementSet elementSetId="1">
          <name>Dublin Core</name>
          <description>The Dublin Core metadata element set is common to all Omeka records, including items, files, and collections. For more information see, http://dublincore.org/documents/dces/.</description>
          <elementContainer>
            <element elementId="50">
              <name>Title</name>
              <description>A name given to the resource</description>
              <elementTextContainer>
                <elementText elementTextId="110835">
                  <text>VOL. 23, NO.6 2025</text>
                </elementText>
              </elementTextContainer>
            </element>
          </elementContainer>
        </elementSet>
      </elementSetContainer>
    </collection>
    <elementSetContainer>
      <elementSet elementSetId="1">
        <name>Dublin Core</name>
        <description>The Dublin Core metadata element set is common to all Omeka records, including items, files, and collections. For more information see, http://dublincore.org/documents/dces/.</description>
        <elementContainer>
          <element elementId="50">
            <name>Title</name>
            <description>A name given to the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111036">
                <text>A hybrid ARIMA and DNN approach with residual learning for electric vehicle charging demand forecasting</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="49">
            <name>Subject</name>
            <description>The topic of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111037">
                <text>Autoregressive integrated moving average&#13;
Charging demand forecasting&#13;
Deep neural network&#13;
Electric vehicle&#13;
Hybrid model&#13;
Residual learning</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="41">
            <name>Description</name>
            <description>An account of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111038">
                <text>The rapid growth of electric vehicle (EV) adoption has created significant challenges for power grid management and charging infrastructure planning. Accurate forecasting of EV charging demand is therefore essential to ensure reliable electricity supply and effective station deployment. This study proposes a novel hybrid forecasting framework that combines autoregressive integrated moving average (ARIMA) with deep neural networks (DNN) through a residual learning strategy. In this approach, ARIMA models the linear temporal patterns, while DNN captures the nonlinear residuals, resulting in improved efficiency and predictive accuracy. The proposed hybrid model is one of the first applications of the residual learning approach for EV demand forecasting in Indonesia. Experimental evaluation using real-world daily consumption data shows that the hybrid method achieved the highest prediction accuracy of 98.22%, consistently outperforming single-model baselines. Beyond technical performance, the model can support stakeholders in planning charging infrastructure and help maintain grid stability in rapidly growing EV ecosystems.</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="39">
            <name>Creator</name>
            <description>An entity primarily responsible for making the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111039">
                <text>Wahyu Cesar, Dwidharma Priyasta, Prasetyo Aji, Melyana, Agus Suprianto, Osen Fili Nami, Riza</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="48">
            <name>Source</name>
            <description>A related resource from which the described resource is derived</description>
            <elementTextContainer>
              <elementText elementTextId="111040">
                <text>Journal homepage: http://journal.uad.ac.id/index.php/TELKOMNIKA</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="40">
            <name>Date</name>
            <description>A point or period of time associated with an event in the lifecycle of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111041">
                <text>Oct 19, 2025</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="37">
            <name>Contributor</name>
            <description>An entity responsible for making contributions to the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111042">
                <text>PERI IRAWAN</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="42">
            <name>Format</name>
            <description>The file format, physical medium, or dimensions of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111043">
                <text>PDF</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="44">
            <name>Language</name>
            <description>A language of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111044">
                <text>ENGLISH</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="51">
            <name>Type</name>
            <description>The nature or genre of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111045">
                <text>TEXT</text>
              </elementText>
            </elementTextContainer>
          </element>
        </elementContainer>
      </elementSet>
    </elementSetContainer>
    <tagContainer>
      <tag tagId="13298">
        <name>Autoregressive integrated moving average Charging demand forecasting Deep neural network Electric vehicle Hybrid model Residual learning</name>
      </tag>
    </tagContainer>
  </item>
  <item itemId="10387" public="1" featured="1">
    <fileContainer>
      <file fileId="10400">
        <src>https://repository.horizon.ac.id/files/original/3acb868c0a0ae4c0995a098d5e539e7e.pdf</src>
        <authentication>eadeea477fa4ad757981234736c0f33a</authentication>
      </file>
    </fileContainer>
    <collection collectionId="779">
      <elementSetContainer>
        <elementSet elementSetId="1">
          <name>Dublin Core</name>
          <description>The Dublin Core metadata element set is common to all Omeka records, including items, files, and collections. For more information see, http://dublincore.org/documents/dces/.</description>
          <elementContainer>
            <element elementId="50">
              <name>Title</name>
              <description>A name given to the resource</description>
              <elementTextContainer>
                <elementText elementTextId="110835">
                  <text>VOL. 23, NO.6 2025</text>
                </elementText>
              </elementTextContainer>
            </element>
          </elementContainer>
        </elementSet>
      </elementSetContainer>
    </collection>
    <elementSetContainer>
      <elementSet elementSetId="1">
        <name>Dublin Core</name>
        <description>The Dublin Core metadata element set is common to all Omeka records, including items, files, and collections. For more information see, http://dublincore.org/documents/dces/.</description>
        <elementContainer>
          <element elementId="50">
            <name>Title</name>
            <description>A name given to the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110996">
                <text>Adaptive DICOM images encryption using quadtree and lightweight ITUBee algorithm</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="49">
            <name>Subject</name>
            <description>The topic of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110997">
                <text>DICOM image&#13;
Henon map&#13;
ITUBee&#13;
Lightweight encryption&#13;
Medical image&#13;
Quadtree</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="41">
            <name>Description</name>
            <description>An account of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110998">
                <text>The encryption of medical images protects the privacy of patient information transmitted over networks and communications. In this paper, a lightweight encryption method for medical images is proposed, combining a quadtree-based segmentation and a modified ITUBee algorithm for encryption. A digital imaging and communications in medicine (DICOM) image is divided into variable-size blocks using the quadtree technique, and the key is generated through a two-dimensional Henon map; the first dimension is used in the confusion process (bit permutation) of the pixel values, and the second sequence is used to generate the key schedule through the application round function. Different numbers of rounds are applied to the ITUBee method based on the size of the segments in the quadtree, making the algorithm adaptive by increasing the round number when the block size is reduced. The method is used as a lightweight encryption method for encrypting all blocks, utilizing different round numbers for each block size to balance the degree of complexity with the total time consumption of the DICOM image. The result reinforces the proposed method, which produced a high mean squared error (MSE) between the DICOM image and the Encrypted One, and a lower peak signal-to-noise ratio (PSNR). The proposed generated numbers were also tested using national institute of standards and technology (NIST) to evaluate the randomness.</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="39">
            <name>Creator</name>
            <description>An entity primarily responsible for making the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110999">
                <text>Muntaha Abdulzahra Hatem1, Balsam Abdulkadhim Hameedi2, Jamal Nasir Hasoon3, Fahad Ghalib Abdulkadhum4</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="48">
            <name>Source</name>
            <description>A related resource from which the described resource is derived</description>
            <elementTextContainer>
              <elementText elementTextId="111000">
                <text>Journal homepage: http://journal.uad.ac.id/index.php/TELKOMNIKA</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="40">
            <name>Date</name>
            <description>A point or period of time associated with an event in the lifecycle of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111001">
                <text>Oct 19, 2025</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="37">
            <name>Contributor</name>
            <description>An entity responsible for making contributions to the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111002">
                <text>PERI IRAWAN</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="42">
            <name>Format</name>
            <description>The file format, physical medium, or dimensions of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111003">
                <text>PDF</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="44">
            <name>Language</name>
            <description>A language of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111004">
                <text>ENGLISH</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="51">
            <name>Type</name>
            <description>The nature or genre of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111005">
                <text>TEXT</text>
              </elementText>
            </elementTextContainer>
          </element>
        </elementContainer>
      </elementSet>
    </elementSetContainer>
    <tagContainer>
      <tag tagId="13294">
        <name>DICOM image Henon map ITUBee Lightweight encryption Medical image Quadtree</name>
      </tag>
    </tagContainer>
  </item>
  <item itemId="10372" public="1" featured="1">
    <fileContainer>
      <file fileId="10385">
        <src>https://repository.horizon.ac.id/files/original/6b9c8ffaaa11cf2c68a24da8cdd0e173.pdf</src>
        <authentication>d4c2faed33fa2c565a60cdff9763a30e</authentication>
      </file>
    </fileContainer>
    <collection collectionId="779">
      <elementSetContainer>
        <elementSet elementSetId="1">
          <name>Dublin Core</name>
          <description>The Dublin Core metadata element set is common to all Omeka records, including items, files, and collections. For more information see, http://dublincore.org/documents/dces/.</description>
          <elementContainer>
            <element elementId="50">
              <name>Title</name>
              <description>A name given to the resource</description>
              <elementTextContainer>
                <elementText elementTextId="110835">
                  <text>VOL. 23, NO.6 2025</text>
                </elementText>
              </elementTextContainer>
            </element>
          </elementContainer>
        </elementSet>
      </elementSetContainer>
    </collection>
    <elementSetContainer>
      <elementSet elementSetId="1">
        <name>Dublin Core</name>
        <description>The Dublin Core metadata element set is common to all Omeka records, including items, files, and collections. For more information see, http://dublincore.org/documents/dces/.</description>
        <elementContainer>
          <element elementId="50">
            <name>Title</name>
            <description>A name given to the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110846">
                <text>Advanced signal transformation techniques to improve spectral efficiency in visible light communication systems</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="49">
            <name>Subject</name>
            <description>The topic of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110847">
                <text>Complex signal mapping&#13;
Hermitian symmetry&#13;
Light emitting diodes&#13;
Quad-light emitting diode complex modulation&#13;
Visible light communication</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="41">
            <name>Description</name>
            <description>An account of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110848">
                <text>Visible light communication (VLC) offers high-speed wireless communication using the visible light spectrum. Achieving high spectral efficiency while maintaining a low bit error rate (BER) remains a challenge. This paper explores the use of quadrature amplitude modulation (QAM) combined with orthogonal frequency division multiplexing (OFDM) to address these challenges. Matrix laboratory (MATLAB) simulations show that QAM-OFDM achieves a BER of 0.001 at comparable signal-to-noise ratios (SNR), outperforming traditional hermitian symmetry (HS), complex signal mapping (CSM), and quad-light emitting diode (LED) complex modulation (QCM) techniques. Unlike CSM, and QCM, which increase complexity, and BER, QAM-OFDM efficiently utilizes available bandwidth, reducing errors, and enhancing spectral efficiency. The study concludes, that QAM-OFDM happens to be the optimal solution for the future VLC systems, offering better performance within both efficiency, and reliability.</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="39">
            <name>Creator</name>
            <description>An entity primarily responsible for making the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110849">
                <text>Shaher Fleyeh Nawaf1,2, Ammar Bouallegue1, Sameh Najeh3</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="48">
            <name>Source</name>
            <description>A related resource from which the described resource is derived</description>
            <elementTextContainer>
              <elementText elementTextId="110850">
                <text>Journal homepage: http://journal.uad.ac.id/index.php/TELKOMNIKA</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="40">
            <name>Date</name>
            <description>A point or period of time associated with an event in the lifecycle of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110851">
                <text>Oct 19, 2025</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="37">
            <name>Contributor</name>
            <description>An entity responsible for making contributions to the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110852">
                <text>PERI IRAWAN</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="42">
            <name>Format</name>
            <description>The file format, physical medium, or dimensions of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110853">
                <text>PDF</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="44">
            <name>Language</name>
            <description>A language of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110854">
                <text>ENGLISH</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="51">
            <name>Type</name>
            <description>The nature or genre of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110855">
                <text>TEXT</text>
              </elementText>
            </elementTextContainer>
          </element>
        </elementContainer>
      </elementSet>
    </elementSetContainer>
    <tagContainer>
      <tag tagId="13283">
        <name>Complex signal mapping Hermitian symmetry Light emitting diodes Quad-light emitting diode complex modulation Visible light communication</name>
      </tag>
    </tagContainer>
  </item>
  <item itemId="10389" public="1" featured="1">
    <fileContainer>
      <file fileId="10402">
        <src>https://repository.horizon.ac.id/files/original/5cb01a20c034fe765ca575f91b71d6f9.pdf</src>
        <authentication>fa69d74c19df8f8146cee959fecb2e06</authentication>
      </file>
    </fileContainer>
    <collection collectionId="779">
      <elementSetContainer>
        <elementSet elementSetId="1">
          <name>Dublin Core</name>
          <description>The Dublin Core metadata element set is common to all Omeka records, including items, files, and collections. For more information see, http://dublincore.org/documents/dces/.</description>
          <elementContainer>
            <element elementId="50">
              <name>Title</name>
              <description>A name given to the resource</description>
              <elementTextContainer>
                <elementText elementTextId="110835">
                  <text>VOL. 23, NO.6 2025</text>
                </elementText>
              </elementTextContainer>
            </element>
          </elementContainer>
        </elementSet>
      </elementSetContainer>
    </collection>
    <elementSetContainer>
      <elementSet elementSetId="1">
        <name>Dublin Core</name>
        <description>The Dublin Core metadata element set is common to all Omeka records, including items, files, and collections. For more information see, http://dublincore.org/documents/dces/.</description>
        <elementContainer>
          <element elementId="50">
            <name>Title</name>
            <description>A name given to the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111016">
                <text>An insight on using deep learning algorithm in diagnosing gastritis</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="49">
            <name>Subject</name>
            <description>The topic of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111017">
                <text>Deep learning&#13;
Diagnosis system&#13;
Gastritis detection&#13;
GoogleNet&#13;
ResNet&#13;
TResNet&#13;
VGGNet</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="41">
            <name>Description</name>
            <description>An account of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111018">
                <text>Chronic autoimmune gastritis (CAG) is a condition in which the stomach membrane is significantly impacted by inflammation. Despite the availability of numerous modern medical techniques, the detection of this condition continues to be a difficult challenge. White light endoscopy (WLE) has been employed to diagnose gastritis, but it has been subject to certain constraints. This technique is most effective when executed by an endoscopist who possesses a high level of expertise. In the present day, WLE is frequently accompanied by artificial intelligence (AI) due to its superior ability to detect defects that lead to damage. Recently, there has been a substantial increase in the efficacy of AI in conjunction with the expertise of endoscopists in the detection of CAG. The 25,216 intriguing case studies were examined in the eight selected studies. The collection comprised 84,678 frames and 10,937 images. The AI was 94% sensitive (95% CI: 0.88-0.97, I2 = 96.2%) and 96% specific (95% CI: 0.88-0.98, I2 = 98.04%). The receiver operating characteristic curve had an area of 0.98 (95% confidence interval: 0.96–0.99). A camera is highly effective when combined with AI to assist in the identification of CAG and is advantageous for clinical review.</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="39">
            <name>Creator</name>
            <description>An entity primarily responsible for making the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111019">
                <text>Ragu P. J.1, Ashok Vajravelu1, Muhammad Mahadi bin Abdul Jamil1, Syed Riyaz Ahammed2</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="48">
            <name>Source</name>
            <description>A related resource from which the described resource is derived</description>
            <elementTextContainer>
              <elementText elementTextId="111020">
                <text>Journal homepage: http://journal.uad.ac.id/index.php/TELKOMNIKA</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="40">
            <name>Date</name>
            <description>A point or period of time associated with an event in the lifecycle of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111021">
                <text>Oct 19, 2025</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="37">
            <name>Contributor</name>
            <description>An entity responsible for making contributions to the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111022">
                <text>PERI IRAWAN</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="42">
            <name>Format</name>
            <description>The file format, physical medium, or dimensions of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111023">
                <text>PDF</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="44">
            <name>Language</name>
            <description>A language of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111024">
                <text>ENGLISH</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="51">
            <name>Type</name>
            <description>The nature or genre of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111025">
                <text>TEXT</text>
              </elementText>
            </elementTextContainer>
          </element>
        </elementContainer>
      </elementSet>
    </elementSetContainer>
    <tagContainer>
      <tag tagId="13296">
        <name>Deep learning Diagnosis system Gastritis detection GoogleNet ResNet TResNet VGGNet</name>
      </tag>
    </tagContainer>
  </item>
  <item itemId="10375" public="1" featured="1">
    <fileContainer>
      <file fileId="10388">
        <src>https://repository.horizon.ac.id/files/original/7cc16c6bec82882d2314c47ad9e085cc.pdf</src>
        <authentication>f6e99774705e1294a700f4ff3142ca6c</authentication>
      </file>
    </fileContainer>
    <collection collectionId="779">
      <elementSetContainer>
        <elementSet elementSetId="1">
          <name>Dublin Core</name>
          <description>The Dublin Core metadata element set is common to all Omeka records, including items, files, and collections. For more information see, http://dublincore.org/documents/dces/.</description>
          <elementContainer>
            <element elementId="50">
              <name>Title</name>
              <description>A name given to the resource</description>
              <elementTextContainer>
                <elementText elementTextId="110835">
                  <text>VOL. 23, NO.6 2025</text>
                </elementText>
              </elementTextContainer>
            </element>
          </elementContainer>
        </elementSet>
      </elementSetContainer>
    </collection>
    <elementSetContainer>
      <elementSet elementSetId="1">
        <name>Dublin Core</name>
        <description>The Dublin Core metadata element set is common to all Omeka records, including items, files, and collections. For more information see, http://dublincore.org/documents/dces/.</description>
        <elementContainer>
          <element elementId="50">
            <name>Title</name>
            <description>A name given to the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110876">
                <text>Analyzing temporal properties of speech trajectory using&#13;
graph structures towards speech recognition</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="49">
            <name>Subject</name>
            <description>The topic of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110877">
                <text>Graph eigenvalues&#13;
Graph signal processing&#13;
Speech analysis&#13;
Structural processing&#13;
Speech trajectory</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="41">
            <name>Description</name>
            <description>An account of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110878">
                <text>Speech signal analysis aims to identify patterns within data to develop effective&#13;
recognition algorithms. This process primarily utilizes feature extraction&#13;
techniques such as linear predictive coding (LPC), linear predictive cepstral coefficients&#13;
(LPCCs), and Mel-frequency cepstral coefficients (MFCCs). These&#13;
features are crucial for constructing recognition algorithms that leverage both&#13;
statistical and deep learning methods. While deep learning models require extensive&#13;
datasets, they often prove unsuitable for low-resource languages. The&#13;
Hidden Markov model (HMM) is the most widely adopted statistical framework&#13;
in speech processing. However, HMMs are characterized by state-dependent&#13;
models, where each state interacts only with its neighboring states. This limitation&#13;
restricts HMMs from capturing long-term signal properties, highlighting the&#13;
need for addressing these constraints at the feature extraction stage. Most feature&#13;
extraction methods rely on short-term signal processing, which further limits&#13;
the comprehension of speech utterances. To overcome these limitations, alternative&#13;
methods are necessary to capture more comprehensive patterns. This paper&#13;
presents a graph-based approach for analyzing speech trajectories and their&#13;
temporal properties, which are subsequently validated using HMMs in speech&#13;
recognition tasks. Graph-based representations on a low-resource Telugu dataset&#13;
improve recognition accuracy by 13% while reducing processing time compared&#13;
to traditional LPC.</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="39">
            <name>Creator</name>
            <description>An entity primarily responsible for making the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110879">
                <text>Parabattina Bhagath1, Malempati Shanmukha2, Gnana Nagasri Puthi2</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="48">
            <name>Source</name>
            <description>A related resource from which the described resource is derived</description>
            <elementTextContainer>
              <elementText elementTextId="110880">
                <text>Journal homepage: http://journal.uad.ac.id/index.php/TELKOMNIKA</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="40">
            <name>Date</name>
            <description>A point or period of time associated with an event in the lifecycle of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110881">
                <text>Sep 10, 2025</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="37">
            <name>Contributor</name>
            <description>An entity responsible for making contributions to the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110882">
                <text>PERI IRAWAN</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="42">
            <name>Format</name>
            <description>The file format, physical medium, or dimensions of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110883">
                <text>PDF</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="44">
            <name>Language</name>
            <description>A language of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110884">
                <text>ENGLISH</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="51">
            <name>Type</name>
            <description>The nature or genre of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110885">
                <text>TEXT</text>
              </elementText>
            </elementTextContainer>
          </element>
        </elementContainer>
      </elementSet>
    </elementSetContainer>
    <tagContainer>
      <tag tagId="13286">
        <name>Graph eigenvalues Graph signal processing Speech analysis Structural processing Speech trajectory</name>
      </tag>
    </tagContainer>
  </item>
  <item itemId="10386" public="1" featured="1">
    <fileContainer>
      <file fileId="10399">
        <src>https://repository.horizon.ac.id/files/original/622ce51f897d53147c45c66687b31b16.pdf</src>
        <authentication>bfb39f5950f5369b36a1d30debaf00ea</authentication>
      </file>
    </fileContainer>
    <collection collectionId="779">
      <elementSetContainer>
        <elementSet elementSetId="1">
          <name>Dublin Core</name>
          <description>The Dublin Core metadata element set is common to all Omeka records, including items, files, and collections. For more information see, http://dublincore.org/documents/dces/.</description>
          <elementContainer>
            <element elementId="50">
              <name>Title</name>
              <description>A name given to the resource</description>
              <elementTextContainer>
                <elementText elementTextId="110835">
                  <text>VOL. 23, NO.6 2025</text>
                </elementText>
              </elementTextContainer>
            </element>
          </elementContainer>
        </elementSet>
      </elementSetContainer>
    </collection>
    <elementSetContainer>
      <elementSet elementSetId="1">
        <name>Dublin Core</name>
        <description>The Dublin Core metadata element set is common to all Omeka records, including items, files, and collections. For more information see, http://dublincore.org/documents/dces/.</description>
        <elementContainer>
          <element elementId="50">
            <name>Title</name>
            <description>A name given to the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110986">
                <text>Application of artificial intelligence in emission prediction for hybrid electric vehicles: integrating ANN and GPR</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="49">
            <name>Subject</name>
            <description>The topic of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110987">
                <text>Artificial neural networks&#13;
Emission prediction&#13;
Gaussian process regression&#13;
Hybrid electric vehicles&#13;
Prediction uncertainty&#13;
Vehicle emissions</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="41">
            <name>Description</name>
            <description>An account of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110988">
                <text>In recent years, hybrid electric vehicles (HEVs) have emerged as a promising solution to mitigate vehicular emissions and improve fuel efficiency. This study focuses on the Toyota Prius HEV, employing advanced artificial neural networks (ANN) and Gaussian process regression (GPR) to develop a predictive model for vehicle emissions. The model considers multiple pollutants, including carbon monoxide (CO), carbon dioxide (CO₂), hydrocarbons (HC), and nitrogen oxides (NOx), measured under diverse driving conditions. The ANN model predicts emission trends, while GPR estimates prediction uncertainty, enhancing the model’s robustness. The GPR models achieved uncertainty levels of ±0.829 ppm for CO, ±9.978 ppm for HC, ±0.144 ppm for NOx, and ±411.256 ppm for CO₂, respectively, underscoring the robustness of the integrated approach for emission prediction. This research aims to support the development of more sustainable vehicle technologies and inform policy making for environmental sustainability (e.g., Euro 6/Euro 7 standards). Overall, the study addresses how artificial intelligence (AI) can be utilized to achieve accurate multi-pollutant emission predictions in HEVs. The findings reveal that an integrated ANN-GPR approach yields superior predictive performance (R² values approaching 1.0) with quantifiable uncertainty, outperforming a stand-alone ANN model and providing a robust solution to the emission prediction challenge.</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="39">
            <name>Creator</name>
            <description>An entity primarily responsible for making the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110989">
                <text>Heru Priyanto1, Rizqon Fajar1, Yaaro Telaumbanua1, Ariyanto1, Mohammad Mukhlas Af1, Sigit Tri Atmaja1, Muhammad Samsul Maarif1,2, Kurnia Fajar Adhi Sukra1,2, Fauzi Dwi Setiawan1</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="48">
            <name>Source</name>
            <description>A related resource from which the described resource is derived</description>
            <elementTextContainer>
              <elementText elementTextId="110990">
                <text>Journal homepage: http://journal.uad.ac.id/index.php/TELKOMNIKA</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="40">
            <name>Date</name>
            <description>A point or period of time associated with an event in the lifecycle of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110991">
                <text>Oct 19, 2025</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="37">
            <name>Contributor</name>
            <description>An entity responsible for making contributions to the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110992">
                <text>PERI IRAWAN</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="42">
            <name>Format</name>
            <description>The file format, physical medium, or dimensions of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110993">
                <text>PERI IRAWAN</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="44">
            <name>Language</name>
            <description>A language of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110994">
                <text>ENGLISH</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="51">
            <name>Type</name>
            <description>The nature or genre of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110995">
                <text>TEXT</text>
              </elementText>
            </elementTextContainer>
          </element>
        </elementContainer>
      </elementSet>
    </elementSetContainer>
    <tagContainer>
      <tag tagId="13293">
        <name>Artificial neural networks Emission prediction Gaussian process regression Hybrid electric vehicles Prediction uncertainty Vehicle emissions</name>
      </tag>
    </tagContainer>
  </item>
  <item itemId="10378" public="1" featured="1">
    <fileContainer>
      <file fileId="10391">
        <src>https://repository.horizon.ac.id/files/original/344a7bfd943557759640109c03b27d5e.pdf</src>
        <authentication>3fe737eee4dd334c1c50c5fa70d24cf3</authentication>
      </file>
    </fileContainer>
    <collection collectionId="779">
      <elementSetContainer>
        <elementSet elementSetId="1">
          <name>Dublin Core</name>
          <description>The Dublin Core metadata element set is common to all Omeka records, including items, files, and collections. For more information see, http://dublincore.org/documents/dces/.</description>
          <elementContainer>
            <element elementId="50">
              <name>Title</name>
              <description>A name given to the resource</description>
              <elementTextContainer>
                <elementText elementTextId="110835">
                  <text>VOL. 23, NO.6 2025</text>
                </elementText>
              </elementTextContainer>
            </element>
          </elementContainer>
        </elementSet>
      </elementSetContainer>
    </collection>
    <elementSetContainer>
      <elementSet elementSetId="1">
        <name>Dublin Core</name>
        <description>The Dublin Core metadata element set is common to all Omeka records, including items, files, and collections. For more information see, http://dublincore.org/documents/dces/.</description>
        <elementContainer>
          <element elementId="50">
            <name>Title</name>
            <description>A name given to the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110906">
                <text>Automatic diagnosis of rice plant diseases using VGG-16 and computer vision</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="49">
            <name>Subject</name>
            <description>The topic of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110907">
                <text>Automatic diagnosis&#13;
Computer vision&#13;
Optimization algorithm&#13;
Rice plant disease&#13;
Visual geometry group-16</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="41">
            <name>Description</name>
            <description>An account of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110908">
                <text>Pathogens are organisms that cause disease in plants. In the case of rice, these pathogens can include fungi, bacteria, nematodes, protozoa, and viruses. This study aims to investigate rice plant diseases using a hybrid system that employs the visual geometry group-16 (VGG-16) architecture and computer vision techniques, alongside various optimization algorithms and hyperparameters. We utilize the convolutional neural network (CNN) architecture of VGG-16 for feature extraction, implementing a process known as transfer learning. Additionally, this research compares different optimization algorithms with the VGG-16 model to identify the most effective optimization for the CNN architecture applied to the tested dataset. The main contribution of this study is the development of a model for identifying rice plant diseases based on data collected using VGG-16 for feature extraction and neural networks for classification with specific parameters. Our findings indicate that the best optimization algorithm is stochastic gradient descent (SGD) with momentum, achieving training and validation loss results of 0.173 and 0.168, respectively. Furthermore, the training and validation accuracies were 0.95 and 0.957. The model’s performance metrics include an accuracy of 95.75, precision of 95.75, recall of 95.75, and an F1-score of 95.73.</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="39">
            <name>Creator</name>
            <description>An entity primarily responsible for making the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110909">
                <text>Al-Bahra1, Henderi2, Nur Azizah2, Muhammad Hudzaifah Nasrullah3, Didik Setiyadi4</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="48">
            <name>Source</name>
            <description>A related resource from which the described resource is derived</description>
            <elementTextContainer>
              <elementText elementTextId="110910">
                <text>Journal homepage: http://journal.uad.ac.id/index.php/TELKOMNIKA</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="40">
            <name>Date</name>
            <description>A point or period of time associated with an event in the lifecycle of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110911">
                <text>Oct 19, 2025</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="37">
            <name>Contributor</name>
            <description>An entity responsible for making contributions to the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110912">
                <text>PERI IRAWAN</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="42">
            <name>Format</name>
            <description>The file format, physical medium, or dimensions of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110913">
                <text>PDF</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="44">
            <name>Language</name>
            <description>A language of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110914">
                <text>ENGLISH</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="51">
            <name>Type</name>
            <description>The nature or genre of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110915">
                <text>TEXT</text>
              </elementText>
            </elementTextContainer>
          </element>
        </elementContainer>
      </elementSet>
    </elementSetContainer>
    <tagContainer>
      <tag tagId="13289">
        <name>Automatic diagnosis Computer vision Optimization algorithm Rice plant disease Visual geometry group-16</name>
      </tag>
    </tagContainer>
  </item>
  <item itemId="10388" public="1" featured="1">
    <fileContainer>
      <file fileId="10401">
        <src>https://repository.horizon.ac.id/files/original/4bc334676184a81d32c1d45c3daf6e91.pdf</src>
        <authentication>8c26d53b27a24b8564f62c879c2ec6d8</authentication>
      </file>
    </fileContainer>
    <collection collectionId="779">
      <elementSetContainer>
        <elementSet elementSetId="1">
          <name>Dublin Core</name>
          <description>The Dublin Core metadata element set is common to all Omeka records, including items, files, and collections. For more information see, http://dublincore.org/documents/dces/.</description>
          <elementContainer>
            <element elementId="50">
              <name>Title</name>
              <description>A name given to the resource</description>
              <elementTextContainer>
                <elementText elementTextId="110835">
                  <text>VOL. 23, NO.6 2025</text>
                </elementText>
              </elementTextContainer>
            </element>
          </elementContainer>
        </elementSet>
      </elementSetContainer>
    </collection>
    <elementSetContainer>
      <elementSet elementSetId="1">
        <name>Dublin Core</name>
        <description>The Dublin Core metadata element set is common to all Omeka records, including items, files, and collections. For more information see, http://dublincore.org/documents/dces/.</description>
        <elementContainer>
          <element elementId="50">
            <name>Title</name>
            <description>A name given to the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111006">
                <text>Business intelligence through data visualization: a case study using marketing campaign dataset</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="49">
            <name>Subject</name>
            <description>The topic of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111007">
                <text>Business intelligence&#13;
Data science&#13;
Data visualization&#13;
Machine learning&#13;
Marketing analytics</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="41">
            <name>Description</name>
            <description>An account of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111008">
                <text>In today’s competitive business environment, data-driven marketing strategies are essential for successful campaign outcomes. This study presents a comprehensive analysis of marketing campaign data, emphasizing its role in enhancing customer engagement, improving decision-making, and increasing conversion rates. It explores the complexity of campaign dynamics and consumer behavior, demonstrating how business intelligence and data visualization techniques support informed marketing decisions and actionable insights. Advanced data science methods such as data cleaning, feature engineering, and cross-validation enhance predictive accuracy and campaign optimization. Visualization plays a central role in transforming raw data into interpretable insights, enabling businesses to identify trends in customer preferences and purchasing behavior. Key findings reveal that customers aged 51–70, particularly those with higher education and income levels, show the greatest purchasing power, especially for wine and meat products. These insights help align marketing strategies with data-driven understanding to design personalized campaigns that resonate with target audiences. By combining analytical methods with effective visualization, businesses can develop impactful campaigns that drive engagement, boost conversions, and foster revenue growth. The study concludes with directions for future research, including real-time data processing and automated decision-making systems to ensure continuous improvement in digital marketing strategies.</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="39">
            <name>Creator</name>
            <description>An entity primarily responsible for making the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111009">
                <text>Aditi Bansal, Ankit Gupta</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="48">
            <name>Source</name>
            <description>A related resource from which the described resource is derived</description>
            <elementTextContainer>
              <elementText elementTextId="111010">
                <text>Journal homepage: http://journal.uad.ac.id/index.php/TELKOMNIKA</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="40">
            <name>Date</name>
            <description>A point or period of time associated with an event in the lifecycle of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111011">
                <text>Oct 19, 2025</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="37">
            <name>Contributor</name>
            <description>An entity responsible for making contributions to the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111012">
                <text>PERI IRAWAN</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="42">
            <name>Format</name>
            <description>The file format, physical medium, or dimensions of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111013">
                <text>PDF</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="44">
            <name>Language</name>
            <description>A language of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111014">
                <text>ENGLISH</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="51">
            <name>Type</name>
            <description>The nature or genre of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111015">
                <text>TEXT</text>
              </elementText>
            </elementTextContainer>
          </element>
        </elementContainer>
      </elementSet>
    </elementSetContainer>
    <tagContainer>
      <tag tagId="13295">
        <name>Business intelligence Data science Data visualization Machine learning Marketing analytics</name>
      </tag>
    </tagContainer>
  </item>
  <item itemId="10382" public="1" featured="1">
    <fileContainer>
      <file fileId="10395">
        <src>https://repository.horizon.ac.id/files/original/dfe66c6650b3d39439fa254268ea0d8c.pdf</src>
        <authentication>2764ab4bfa5acd8c97644a2bbbed3f81</authentication>
      </file>
    </fileContainer>
    <collection collectionId="779">
      <elementSetContainer>
        <elementSet elementSetId="1">
          <name>Dublin Core</name>
          <description>The Dublin Core metadata element set is common to all Omeka records, including items, files, and collections. For more information see, http://dublincore.org/documents/dces/.</description>
          <elementContainer>
            <element elementId="50">
              <name>Title</name>
              <description>A name given to the resource</description>
              <elementTextContainer>
                <elementText elementTextId="110835">
                  <text>VOL. 23, NO.6 2025</text>
                </elementText>
              </elementTextContainer>
            </element>
          </elementContainer>
        </elementSet>
      </elementSetContainer>
    </collection>
    <elementSetContainer>
      <elementSet elementSetId="1">
        <name>Dublin Core</name>
        <description>The Dublin Core metadata element set is common to all Omeka records, including items, files, and collections. For more information see, http://dublincore.org/documents/dces/.</description>
        <elementContainer>
          <element elementId="50">
            <name>Title</name>
            <description>A name given to the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110946">
                <text>Comparative performance analysis of convolutional neural network-architectures on coffee-bean roast classification</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="49">
            <name>Subject</name>
            <description>The topic of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110947">
                <text>Agtron level&#13;
Classification&#13;
Coffee-bean roast&#13;
Convolutional neural networks&#13;
Performance analysis</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="41">
            <name>Description</name>
            <description>An account of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110948">
                <text>The classification of coffee bean roast levels using Agtron standards has evolved from traditional subjective methods to technology-driven approaches employing advanced artificial intelligence. Recent advancements in computer vision have demonstrated the capability of convolutional neural networks (CNNs) in providing objective and consistent roast level classification compared to human visual assessment, which is prone to variability and subjectivity. This research presents a performance analysis of five CNN architectures (AlexNet, ResNet, MobileNet, VGGNet, and DenseNet) for classifying coffee beans into eight distinct Agtron roast levels. The comprehensive methodology encompasses four phases: i) data acquisition, ii) image preprocessing, iii) model training and validation, and iv) evaluation metric. During training-validation, DenseNet outperformed other models, achieving 99.702% training accuracy and 77.68% validation accuracy. In the testing evaluation, DenseNet also led with an average testing accuracy of 93.8%, followed by ResNet at 92.6%, VGGNet and AlexNet both at 92.4%, and MobileNet at 89.7%. The results show that the DenseNet shows promise in classifying Agtron coffee-bean roast classification.</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="39">
            <name>Creator</name>
            <description>An entity primarily responsible for making the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110949">
                <text>Irfan Asfy Fakhry Anto1, Jony Winaryo Wibowo2, Aris Munandar2, Taufik Ibnu Salim2</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="48">
            <name>Source</name>
            <description>A related resource from which the described resource is derived</description>
            <elementTextContainer>
              <elementText elementTextId="110950">
                <text>Journal homepage: http://journal.uad.ac.id/index.php/TELKOMNIKA</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="40">
            <name>Date</name>
            <description>A point or period of time associated with an event in the lifecycle of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110951">
                <text>Oct 19, 2025</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="37">
            <name>Contributor</name>
            <description>An entity responsible for making contributions to the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110952">
                <text>PERI IRAWAN</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="42">
            <name>Format</name>
            <description>The file format, physical medium, or dimensions of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110953">
                <text>PDF</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="44">
            <name>Language</name>
            <description>A language of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110954">
                <text>ENGLISH</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="51">
            <name>Type</name>
            <description>The nature or genre of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="110955">
                <text>TEXT</text>
              </elementText>
            </elementTextContainer>
          </element>
        </elementContainer>
      </elementSet>
    </elementSetContainer>
    <tagContainer>
      <tag tagId="13291">
        <name>Agtron level Classification Coffee-bean roast Convolutional neural networks Performance analysis</name>
      </tag>
    </tagContainer>
  </item>
  <item itemId="10392" public="1" featured="1">
    <fileContainer>
      <file fileId="10405">
        <src>https://repository.horizon.ac.id/files/original/e34d5bb716dc0b4651909bcaf070676a.pdf</src>
        <authentication>67485892dd25563e6e63a71cea0e4fba</authentication>
      </file>
    </fileContainer>
    <collection collectionId="779">
      <elementSetContainer>
        <elementSet elementSetId="1">
          <name>Dublin Core</name>
          <description>The Dublin Core metadata element set is common to all Omeka records, including items, files, and collections. For more information see, http://dublincore.org/documents/dces/.</description>
          <elementContainer>
            <element elementId="50">
              <name>Title</name>
              <description>A name given to the resource</description>
              <elementTextContainer>
                <elementText elementTextId="110835">
                  <text>VOL. 23, NO.6 2025</text>
                </elementText>
              </elementTextContainer>
            </element>
          </elementContainer>
        </elementSet>
      </elementSetContainer>
    </collection>
    <elementSetContainer>
      <elementSet elementSetId="1">
        <name>Dublin Core</name>
        <description>The Dublin Core metadata element set is common to all Omeka records, including items, files, and collections. For more information see, http://dublincore.org/documents/dces/.</description>
        <elementContainer>
          <element elementId="50">
            <name>Title</name>
            <description>A name given to the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111046">
                <text>Design and analysis of a new scheme of the FOSTA for DFIG based wind turbine</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="49">
            <name>Subject</name>
            <description>The topic of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111047">
                <text>Active and reactive power&#13;
Doubly fed induction generator&#13;
Fractional calculus theory&#13;
Super twisting algorithm&#13;
Wind turbine</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="41">
            <name>Description</name>
            <description>An account of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111048">
                <text>An super-twisting algorithm (STA)-based controller was designed and implemented in this study to achieve precise control over the stator active and reactive power of a doubly fed induction generator (DFIG)-equipped wind turbine device. The fractional calculus theory (FCT) allowed the STA to maximize its effectiveness and performance. A distinct form is sent to the FCT-based STA controller. The stator flux orientation technique uses control that is independent of stator active and reactive powers. In order to achieve a quick system with sufficient precision and a robust control strategy, the hybrid method control is based on the fractional-order super twisting algorithm (FOSTA) and FCT. To demonstrate the performance, efficacy, and resilience of the stated nonlinear approach, a number of simulations are provided.</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="39">
            <name>Creator</name>
            <description>An entity primarily responsible for making the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111049">
                <text>Kheira Belgacem, Houaria Abdelli, Mebarka Atig, Abdelkader Mezouar</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="48">
            <name>Source</name>
            <description>A related resource from which the described resource is derived</description>
            <elementTextContainer>
              <elementText elementTextId="111050">
                <text>Journal homepage: http://journal.uad.ac.id/index.php/TELKOMNIKA</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="40">
            <name>Date</name>
            <description>A point or period of time associated with an event in the lifecycle of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111051">
                <text>Oct 19, 2025</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="37">
            <name>Contributor</name>
            <description>An entity responsible for making contributions to the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111052">
                <text>PERI IRAWAN</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="42">
            <name>Format</name>
            <description>The file format, physical medium, or dimensions of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111053">
                <text>PDF</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="44">
            <name>Language</name>
            <description>A language of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111054">
                <text>ENGLISH</text>
              </elementText>
            </elementTextContainer>
          </element>
          <element elementId="51">
            <name>Type</name>
            <description>The nature or genre of the resource</description>
            <elementTextContainer>
              <elementText elementTextId="111055">
                <text>TEXT</text>
              </elementText>
            </elementTextContainer>
          </element>
        </elementContainer>
      </elementSet>
    </elementSetContainer>
  </item>
</itemContainer>
