@inproceedings{dreano-etal-2023-embed,
title = "{E}mbed{\_}{L}lama: Using {LLM} Embeddings for the Metrics Shared Task",
author = {Dreano, S{\"o}ren and
Molloy, Derek and
Murphy, Noel},
editor = "Koehn, Philipp and
Haddow, Barry and
Kocmi, Tom and
Monz, Christof",
booktitle = "Proceedings of the Eighth Conference on Machine Translation",
month = dec,
year = "2023",
address = "Singapore",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2023.wmt-1.60",
doi = "10.18653/v1/2023.wmt-1.60",
pages = "738--745",
abstract = "Embed{\_}llama is an assessment metric for language translation that hinges upon the utilization of the recently introduced Llama 2 Large Language Model (LLM), specifically, focusing on its embedding layer, with the aim of transforming sentences into a vector space that establishes connections between geometric and semantic proximities",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="dreano-etal-2023-embed">
<titleInfo>
<title>Embed_Llama: Using LLM Embeddings for the Metrics Shared Task</title>
</titleInfo>
<name type="personal">
<namePart type="given">Sören</namePart>
<namePart type="family">Dreano</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Derek</namePart>
<namePart type="family">Molloy</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Noel</namePart>
<namePart type="family">Murphy</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2023-12</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the Eighth Conference on Machine Translation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Philipp</namePart>
<namePart type="family">Koehn</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Barry</namePart>
<namePart type="family">Haddow</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Tom</namePart>
<namePart type="family">Kocmi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Christof</namePart>
<namePart type="family">Monz</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Singapore</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Embed_llama is an assessment metric for language translation that hinges upon the utilization of the recently introduced Llama 2 Large Language Model (LLM), specifically, focusing on its embedding layer, with the aim of transforming sentences into a vector space that establishes connections between geometric and semantic proximities</abstract>
<identifier type="citekey">dreano-etal-2023-embed</identifier>
<identifier type="doi">10.18653/v1/2023.wmt-1.60</identifier>
<location>
<url>https://aclanthology.org/2023.wmt-1.60</url>
</location>
<part>
<date>2023-12</date>
<extent unit="page">
<start>738</start>
<end>745</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Embed_Llama: Using LLM Embeddings for the Metrics Shared Task
%A Dreano, Sören
%A Molloy, Derek
%A Murphy, Noel
%Y Koehn, Philipp
%Y Haddow, Barry
%Y Kocmi, Tom
%Y Monz, Christof
%S Proceedings of the Eighth Conference on Machine Translation
%D 2023
%8 December
%I Association for Computational Linguistics
%C Singapore
%F dreano-etal-2023-embed
%X Embed_llama is an assessment metric for language translation that hinges upon the utilization of the recently introduced Llama 2 Large Language Model (LLM), specifically, focusing on its embedding layer, with the aim of transforming sentences into a vector space that establishes connections between geometric and semantic proximities
%R 10.18653/v1/2023.wmt-1.60
%U https://aclanthology.org/2023.wmt-1.60
%U https://doi.org/10.18653/v1/2023.wmt-1.60
%P 738-745
Markdown (Informal)
[Embed_Llama: Using LLM Embeddings for the Metrics Shared Task](https://aclanthology.org/2023.wmt-1.60) (Dreano et al., WMT 2023)
ACL