Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file added src/__init__.py
Empty file.
Empty file.
12 changes: 12 additions & 0 deletions src/datacustomcode/llm_gateway/LLMGateway.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
class LLMGateway(BaseLLMGateway):
def generate_text(
self,
request: GenerateTextRequest
) -> GenerateTextResponse:


response_data = {
'generation' : {'generatedText' : "I am dreaming!!"},
}

return GenerateTextResponse(200, {"data": response_data})
Empty file.
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,12 @@
from datacustomcode.proxy.base import BaseProxyAccessLayer


class BaseProxyClient(BaseProxyAccessLayer):
class BaseLLMGateway:
def __init__(self):
pass

@abstractmethod
def call_llm_gateway(self, llmModelId: str, prompt: str, maxTokens: int) -> str: ...
def generate_text(self, GenerateTextRequest) -> GenerateTextResponse: ...

@abstractmethod
def llm_gateway_generate_text(
Expand Down
22 changes: 22 additions & 0 deletions src/datacustomcode/llm_gateway/types/GenerateTextRequest.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

16 changes: 16 additions & 0 deletions src/datacustomcode/llm_gateway/types/GenerateTextResponse.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@

from dataclasses import dataclass
from typing import Optional

import betterproto
import grpclib

from .google import protobuf



@dataclass
class GenerateTextResponse(betterproto.Message):
version: str = betterproto.string_field(1)
status_code: int = betterproto.uint32_field(2)
data: protobuf.Struct = betterproto.message_field(3)
Empty file.
14 changes: 0 additions & 14 deletions src/datacustomcode/proxy/__init__.py

This file was deleted.

24 changes: 0 additions & 24 deletions src/datacustomcode/proxy/base.py

This file was deleted.

34 changes: 0 additions & 34 deletions src/datacustomcode/proxy/client/LocalProxyClientProvider.py

This file was deleted.

14 changes: 0 additions & 14 deletions src/datacustomcode/proxy/client/__init__.py

This file was deleted.

18 changes: 17 additions & 1 deletion src/datacustomcode/templates/function/payload/entrypoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,10 @@
from typing import List
from uuid import uuid4

import datacustomcode.ll_gateway.types.GenerateTextRequest
import datacustomcode.file
import datacustomcode.function_runtime.FunctionRuntime

logger = logging.getLogger(__name__)


Expand Down Expand Up @@ -33,14 +37,26 @@ def chunk_text(text: str, chunk_size: int = 1000) -> List[str]:
return chunks


def function(request: dict) -> dict:
def function(request: dict, runTime: FunctionRuntime) -> dict:
logger.info("Inside Function")
logger.info(request)

items = request["input"]
output_chunks = []
current_seq_no = 1 # Start sequence number from 1


request = GenerateTextRequest.with_locale(modelName= "", prompt="How are you doing?", locale="en_EN")
response = client.llm_gateway.genearte_text(request)
if response.is_success:
print(response.text)
else:
print(response.error_code)

file_path = runTime.file.find_path("data.csv")
content = open(file_path, 'r').read()
logger.info(content)

for item in items:
# Item is DocElement as dict
logger.info(f"Processing item: {item}")
Expand Down
Loading