invokeRaw method

  1. @override
FutureOr<JsonScheme> invokeRaw({
  1. required InvokeParametersLlamaLibraryData<JsonScheme> invokeParametersLlamaLibraryData,
})
override

General Library Documentation Undocument By General Corporation & Global Corporation & General Developer

Implementation

@override
FutureOr<JsonScheme> invokeRaw({
  required InvokeParametersLlamaLibraryData<JsonScheme>
      invokeParametersLlamaLibraryData,
}) async {
  await _completerSendPortInitialized.future;
  final invokeParametersLlamaLibraryDataOptions =
      invokeParametersLlamaLibraryData
              .invokeParametersLlamaLibraryDataOptions ??
          this.invokeParametersLlamaLibraryDataOptions;
  if (_isInIsolate == true) {
    invokeParametersLlamaLibraryDataOptions.isThrowOnError = false;
  }
  final parameters = invokeParametersLlamaLibraryData.parameters;

  final String extra = () {
    if (parameters["@extra"] is String == false) {
      parameters["@extra"] = "";
    }
    String extraProcces = (parameters["@extra"] as String).trim();
    if (extraProcces.isEmpty) {
      extraProcces = generateUuid(10);
    }
    invokeParametersLlamaLibraryData.parameters["@extra"] = extraProcces;
    parameters["@extra"] = extraProcces;
    return extraProcces;
  }();
  if (parameters["@type"] is String == false) {
    parameters["@type"] = "";
  }
  final String method = (parameters["@type"] as String).trim();
  final Map<String, dynamic> patchData = {
    "@extra": extra,
  };
  if (method.isEmpty) {
    return InvokeParametersLlamaLibraryData.send(
      data: JsonScheme({
        "@type": "error",
        "message": "method_empty",
      }),
      patchData: patchData,
      invokeParametersLlamaLibraryDataOptions:
          invokeParametersLlamaLibraryDataOptions,
    );
  }

  if (_isInIsolate == false) {
    send(
      invokeParametersLlamaLibraryData.copyWith(
        parameters: parameters,
        extra: extra,
        isVoid: invokeParametersLlamaLibraryData.isVoid,
        invokeParametersLlamaLibraryDataOptions:
            invokeParametersLlamaLibraryDataOptions,
      ),
    );

    if (invokeParametersLlamaLibraryData.isVoid == true) {
      return InvokeParametersLlamaLibraryData.send(
        data: JsonScheme({
          "@type": "ok",
        }),
        patchData: patchData,
        invokeParametersLlamaLibraryDataOptions:
            invokeParametersLlamaLibraryDataOptions,
      );
    }
    final Completer<JsonScheme> completerResult = Completer();
    final listener = on(
      eventType: eventUpdate,
      onUpdate: (updateLlamaLibrary) {
        if (completerResult.isCompleted) {
          return;
        }
        final update = updateLlamaLibrary.update;
        if (update["@extra"] == extra) {
          if (update.rawData.containsKey("is_stream")) {
            if (update["is_stream"] == true) {
              completerResult.complete(update);
            } else {
              if (update["is_done"] == true) {
                completerResult.complete(update);
              }
            }
          } else {
            completerResult.complete(update);
          }
        }
      },
    );

    final result = await completerResult.future.timeout(
      invokeParametersLlamaLibraryDataOptions.invokeTimeOut,
      onTimeout: () {
        return JsonScheme({
          "@type": "error",
          "message": "timeout",
        });
      },
    );
    try {
      listener.close();
    } catch (e) {}
    try {
      eventEmitter.off(listener: listener);
    } catch (e) {}
    return InvokeParametersLlamaLibraryData.send(
      data: result,
      patchData: patchData,
      invokeParametersLlamaLibraryDataOptions:
          invokeParametersLlamaLibraryDataOptions,
    );

    ///
  } else {
    final result = await Future(() async {
      if (parameters is SendLlamaLibraryMessage) {
        return await _sendMessage(
          parameters: parameters,
          extra: extra,
          invokeParametersLlamaLibraryDataOptions:
              invokeParametersLlamaLibraryDataOptions,
        );
      }
      if (parameters is LoadModelFromFileLlamaLibrary) {
        final bool isLoadModel = _loadModel(
          modelPath: parameters.model_file_path ?? "",
        );
        if (isLoadModel) {
          return JsonScheme({
            "@type": "ok",
          });
        } else {
          return JsonScheme({
            "@type": "error",
            "message": "cant_load_model_maybe_empty_or_not_exist",
          });
        }
      }
      return JsonScheme({
        "@type": "error",
        "message": "unimplmented",
      });
    });

    final resultPatch = InvokeParametersLlamaLibraryData.send(
      data: result,
      patchData: patchData,
      invokeParametersLlamaLibraryDataOptions:
          invokeParametersLlamaLibraryDataOptions,
    );

    send(resultPatch);
    return resultPatch;
    // if (parameters is SendLlamaLibraryMessage) {
    //   _sendMessage(
    //     parameters: parameters,
    //     invokeParametersLlamaLibraryDataOptions: invokeParametersLlamaLibraryDataOptions,
    //   );
    //   {
    //     clear();
    //     final chatHistory = LlamaLibraryChatHistory();

    //     // Initialize system prompt
    //     chatHistory.addMessage(
    //       role: LlamaLibraryRole.assistant,
    //       content: """""".trim(),
    //     );

    //     chatHistory.addMessage(role: LlamaLibraryRole.user, content: parameters.text ?? "");
    //     final strm = sendPromptAndStream(
    //       prompt: chatHistory.exportFormat(
    //         LlamaLibraryChatFormat.chatml,
    //       ),
    //     );
    //     StringBuffer stringBuffer = StringBuffer();
    //     strm.stream.listen(
    //       (LLamaResponse element) {
    //         send(UpdateLlamaLibraryMessage.create(
    //           is_done: false,
    //           text: element.result,
    //           special_extra: extra,
    //         ));
    //         stringBuffer.write(element.result);
    //         if (element.isDone) {
    //           return;
    //         }
    //       },
    //       onError: (e, stack) {},
    //     );
    //     await strm.done;
    //     send(UpdateLlamaLibraryMessage.create(
    //       is_done: true,
    //       text: stringBuffer.toString().trim(),
    //       special_extra: extra,
    //     ));
    //     chatHistory.addMessage(
    //       role: LlamaLibraryRole.assistant,
    //       content: stringBuffer.toString().trim(),
    //     );
    //   }
    // }
  }
}