From 975993e22eecbb4187414ae9fa686e15a650c5f6 Mon Sep 17 00:00:00 2001 From: William Kennedy Date: Tue, 4 Jun 2024 14:50:28 -0400 Subject: [PATCH] updated apis for complete input --- README.md | 26 +- docs/assets/navigation.js | 2 +- docs/assets/search.js | 2 +- docs/classes/Client.html | 86 +++--- docs/enums/Languages.html | 4 +- docs/enums/PIIs.html | 4 + docs/enums/ReplaceMethods.html | 4 +- docs/interfaces/Base64Encoder.html | 4 +- docs/interfaces/Chat.html | 14 +- docs/interfaces/ChatChoice.html | 8 +- docs/interfaces/ChatInput.html | 18 +- docs/interfaces/ChatInputMessage.html | 6 + docs/interfaces/ChatInputOptions.html | 14 + docs/interfaces/ChatMessage.html | 8 +- docs/interfaces/ChatSSE.html | 14 +- docs/interfaces/ChatSSEChoice.html | 12 +- docs/interfaces/ChatSSEDelta.html | 4 +- docs/interfaces/ChatSSEInput.html | 14 + docs/interfaces/ChatVision.html | 14 +- docs/interfaces/ChatVisionChoice.html | 8 +- docs/interfaces/ChatVisionInput.html | 14 + docs/interfaces/ChatVisionMessage.html | 8 +- docs/interfaces/Completion.html | 12 +- docs/interfaces/CompletionChoice.html | 10 +- docs/interfaces/CompletionInput.html | 12 + docs/interfaces/Embedding.html | 14 +- docs/interfaces/EmbeddingData.html | 10 +- docs/interfaces/EmbeddingInput.html | 6 +- docs/interfaces/Factuality.html | 12 +- docs/interfaces/FactualityCheck.html | 8 +- docs/interfaces/Injection.html | 12 +- docs/interfaces/InjectionCheck.html | 8 +- docs/interfaces/ReplacePI.html | 12 +- docs/interfaces/ReplacePICheck.html | 8 +- docs/interfaces/Toxicity.html | 12 +- docs/interfaces/ToxicityCheck.html | 8 +- docs/interfaces/Translate.html | 18 +- docs/interfaces/Translation.html | 10 +- docs/modules.html | 6 + examples/chat.js | 27 +- examples/chat_sse.js | 44 ++-- examples/chat_vision.js | 18 +- examples/completion.js | 13 +- makefile | 14 +- package.json | 2 +- src/api_client.ts | 347 +++++++++++++++++-------- src/api_model.ts | 114 +++++++- test/api_test.js | 82 ++++-- 48 files changed, 756 insertions(+), 371 deletions(-) create mode 100644 docs/enums/PIIs.html create mode 100644 docs/interfaces/ChatInputMessage.html create mode 100644 docs/interfaces/ChatInputOptions.html create mode 100644 docs/interfaces/ChatSSEInput.html create mode 100644 docs/interfaces/ChatVisionInput.html create mode 100644 docs/interfaces/CompletionInput.html diff --git a/README.md b/README.md index 613fcef..46189d3 100644 --- a/README.md +++ b/README.md @@ -30,20 +30,32 @@ import * as pg from 'predictionguard'; const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY); async function Chat() { - const input = [ - { - role: pg.Roles.User, - content: 'How do you feel about the world in general', + const input = { + model: pg.Models.NeuralChat7B, + messages: [ + { + role: pg.Roles.User, + content: 'How do you feel about the world in general', + }, + ], + maxTokens: 1000, + temperature: 0.1, + topP: 0.1, + options: { + factuality: true, + toxicity: true, + pii: pg.PIIs.Replace, + piiReplaceMethod: pg.ReplaceMethods.Random, }, - ]; + }; - var [result, err] = await client.Chat(pg.Models.NeuralChat7B, input, 1000, 1.1); + var [result, err] = await client.Chat(input); if (err != null) { console.log('ERROR:' + err.error); return; } - console.log('RESULT:' + result.model + ': ' + result.choices[0].message.content); + console.log('RESULT:' + result.createdDate() + ': ' + result.model + ': ' + result.choices[0].message.content); } Chat(); diff --git a/docs/assets/navigation.js b/docs/assets/navigation.js index a59dca3..5080aad 100644 --- a/docs/assets/navigation.js +++ b/docs/assets/navigation.js @@ -1 +1 @@ -window.navigationData = "data:application/octet-stream;base64,H4sIAAAAAAAAE42VXUvDMBSG/0uvh8OhQ3bp3KDgRJx4I17E9LjGpUlpTnEi/ndrO7PG9iS5zfO+D8lJP56/EoQDJovklqldzXZgkklSMsybJVB1YaYWnOVYyIbuhcqSxdX3xHY3OgM5KHardOsBSsk4bABznQ3aLvVYtBzuuV2kO0spQOGpxCUzBsy0W3d757N+My2aSayFhGHZomD/DvBDV3tCcaQ+yzUzML9YKd5MuDpphEKo3pqhmamTcFWzy3l/FDnDUcMvCBWXuRYcyHqHQ5JUlTW9hZaGFBswppkcKTnykGa7XZGKhkXUAwOxiQjVDUhkPlMbCImehBFakZoOx0kCZ+uH4oShW3NSXqUuSglIHtTiOInvoP9CPuGqeIUsE2o3arI0SnHDiGfBSUSp6NfNjXhlVaXHvzst8VXXjGPNpMDP0f4Jx0mWOfB9wNRmfLpUvQMnHx9LoxT0htyIT3b89d2nox5LoxT0ftyIT/aoD4JTN/YHYwT0ZpyEV1UxZSTD8ffT0hgFdeE9PtS8/AClLQRsMgkAAA==" \ No newline at end of file +window.navigationData = "data:application/octet-stream;base64,H4sIAAAAAAAAE42WQU/DMAyF/0vPExMIENqRbUiVGEwMcUEcQmvWsDSpmkyAEP+drhtZQ2vHV7/3Pjm2q+35O3Hw6ZJJciv0eivWYJNRUglXNCXQ29KOvXBSuFI16kbqPJlc/Yx8dmFyUL3gvoqnlmnay+xqeOIBKiUyWIArTN7LhipBMar/yraIZ6ZKgnbHUKaEtWDH+3qYOz3rJtOymd2NVNAPeymavwP3YeoNgjioFOVaWLg8n+us2Ul9xEjtoH5rhmbHgSNEnV1cdkdRCDdI2Amx4LQwMgM0vpdjkFRXW7yFVmUhFmBtMz6adDCxgPeVk0ZbGngwxYCx5ph9rVZzFNFojHhkZd7BQM1AOUGRWgMDRF/AnyEGepK22QSK2cs8SGRIXRMPSL+w4+HhYtcUuEikKSsFDp2bl3kQam7/TDwgMbfQQ+Hm5SvkudTrQZBXWYiZQE4+cLBQ+NtCCwmrazP8A9AqVPRGZG4rlHRfg/mjzINMC8g2EVLroXCpfocMvUavshB4Q6GFgh3+gyzTQY5XWQi8n9BCwR7Np8ywjf2JHADeTOAgUbXQVgk3/Ll7lYPAFt7R+5iXX4guxDntCgAA" \ No newline at end of file diff --git a/docs/assets/search.js b/docs/assets/search.js index 4a82639..f5919f0 100644 --- a/docs/assets/search.js +++ b/docs/assets/search.js @@ -1 +1 @@ -window.searchData = "data:application/octet-stream;base64,H4sIAAAAAAAAE61dXXPjtpL9L/ar70QAv+dtJx83szeTSiX3ZmtramqKtjA2x5KoJalxJqn89wVIgmy0ugGK9oOrZKmB0+jTaACHFPXXVVM/tVev3/919Vgdtlevhcxvrg7lXl29vvp2V6lDd3VzdWp2+t+7Xdm2qv1mePvVQ7ff6c/Gd/XnV3/f2F4SIade7upD2zWnu65ufF1du3ag25urY9kYRyaPZiCxkfGEZPr2IQyfzz137fYfVfuPY1N9KTu1HKc8Vv9SX71Qk8kqNLmJAQsPpZeD69FgYcTO+v7tt++D3Q82axF+r9qqPgRBJrNVOPX+uFNdEAearcH5fn+rttvqcO+FgVZrUH4o77pTuas6f5Y5ZmtwflTlrnv49kHdPXqBXLs1SG8Pn9VdkB5otQblV3XclXfql7deFGi1BuXf9R/VXYgbYLQKoykP7W4oFB4QYLUqXuXTd/U/lb++ACOimtWdZkxtL8T8pW4XgI5WL4aqy9gy4NlwLXaSziWjacDSVx061XzS6dd+03/gXUedZUeF+7m2NnQyDJ7M3c/heVdv1a6dOleH0779ZnjT7+A8zDdNtb1XXf2kmp/K5l697fbvdvu33R3f67WvDT2E0VHSgx9Vs1et/KWpf9qV+zLK33igSePLMX+uT+3QVd+NFJEPlLZ+zkjfVXrTVO6yZUOF1pejap+/lCLJ3vz4gwfNtVoRUXXSLpoNgXdQyOxynP+toviNs7k6B4E2lyN8p9SxVerxW23TpNmbt+MG1wPoabIQH5S8eqfwlO7fWzqj/6ttdb6UB+zw3Mk1NKEdHLwg+/9Pqxq+6/HTi3v97WvbqT3f7/T5sp7PNhfvVPdQb88C63y4NMK/lodtfebreVfXkyHjtOsaifVD+agWII1m63G+1duR+7r5ugALmK7He1e2jwuwRrMLcWb2fyoP96fy/mxGTe8vnlWfmuqxPBz8HV1DM9rp2SMaZ/9QNhVefTHKZLQOoylvgxDWZiXCXh2q8hDCmKzWofypmtuy+hzEgXarkN6U7f+d8DxEKJPNOgS1K5tTG4yZY7cS6XCvT58hGGu0DqNuw/TPRqswdBkqdyGM2Wgdhj47P6gQxmS0FqOpH1Ugu4DVSpTqoNogiDVah9HUZRekHVitQ/lThzsAMZqs6v+78lC1AYDJZh3CqQuNwJqs6v/7w/0uOITZaB1G29XhKQ6sVqH8UH0OYkw2KxF21bE61CGMyWolyiGcVLPROgxtFkqryWYVwj/1inAX5ANYrUNRdXMfRpmtVqI0+zDGaLMOodEnwgDAaLKu/9PnstGVNAAxW61C+bGswiV9NlqHoW4b9RSAsDbrEPQbgUBZk3X9n/R2qQnHCZitwnl7p7d/2s/A5h2arcPR51a9EwgOyLFbh9QE66I1Wde/3v+FhzEZrcL47/JYhjdXwGoVyr/0sbLcln6Q2Wglxp/lY4COyWYdQt2oEB2TzSqEn8ruS5Dx2WgdRtU9nMrw/sexW4X0rrxT2wU7LcduJdKu/CpCKKPNegS5AEE+C0H/YXGOAhnN1uJ0wQk/G63E0Ov2Q2DVmo1WYfysjkEpYLJZh1A3Tyq8nYNmq3B+UU14uZqN1mHU4WPVZLMSoelO96dgajl2q5B+rfcL6hewWodyasOszEarMH4r93UIYrJZh6Ca2+AoZqN1GLv6Sxk4Lkw2qxEWiLLQbCVO+UUvRIEtKrBah3JcINPMRuswnsqHKlQeZ6OVGGobHsdktArj3+W+2vkRrMm6/tVOF6MAgLVZh/BQBmgYLdb1fmoegxzMRqsw/vPYlFV49kGzdTjNNsDEaLGq9/9Ru1CcrMmq/n+vVGdehtY/x245Ery/503ZqjT+/nBnrtlPaOD+HMfAe5nQvdWwbzA0XtbtNWpCj8f1lx6Tuefh7eF46ijg6cPl9y419U6Fu7oezWi/Z58YkLtad3xY4PL1bLkcCofnnWpbnRUc2vjx8hAFvIf9LfLf+reCEQcryEkAqD51njxyoCbTS8AwL98+1NUdO7Th0+Ws6HfUHws6u7aGvOujYwzQ3p9OEGo2XQvWdmV3apdgTZYXQGFGOJwLWNh6+7juP+c9ZFPz1txu7O96srm8+7tGlcONop7+Z6PLAfbm5i9/99Zkhfc9rWySjN5PRosAnPVtHPl38CZnX4i+897njIBQCv722/ffqV1XckD28xcr106Hi+r15KJ3FP7qNhm8UIFz+1tQ42YPGbhtgAUIZ22fAXevDqoxyfOxU3/4yIK4Z42e4cCuvj829S07jRA0MH8G6KfKHFY/6mnTgi9c+JFxm8vgiUz14L5E4bfdhGq/8WRV+Z8AFqwAHpDAIjChLFkHPDDepWACCa8GvpH4F4R5JAvWBBfm0mUBBy24MiA4lKzDF88C+3nH6GUOPuddhjfbrq/r1iYCd8kCtQjav9snkBfs+Tlgmkf/CgltXmiRPOtywTrpuLruRHAOu+RcsATYfzo4x11wRmBgaQb9yC+xeICeQuvH6NKqJQTCLFhF/FCBhQRiLVlL/GDe5QRChVeUwKj8i4ozqgXryhnYpUsLEcbg6nIOCtN6+saxpzAhm5coTFSXocKEXb04OUjQQIosBPUUJRI1VJQWwrLHBhLUf17gIemE8eM+uw66PXnr4OzS5XUQwYTqYBDKVwcRVrAOhsE85QmDhcoTBXZReaJH5y9PJCj8crJ9UgGr9rsWy7OOmzlEf/55g1zkknzP7JUoPGt8ISAZOB1/UtBwDF6gop/3FyjnroeX1nICzl/IF8HxVZzAC5TwRYDq7HkdfkwVfHIHB0smhxf1uRXc7chXwGd/Lq7fCCRQvoNAnuqNkELFOwgVTu2L0ppVNEPzfxQzS4+WSYFcsjTQofOuDCQkSOH52TLu42EAKjJ5gRpH9RiocthPrvDc1Q0ZOhLSWj8Pkq11NKa/2vGgNGt+3OeWHtSTr/YAly4uPhgmUH3CUJ7yg7FC9WcBmCEqkAB27zhaXgB1SX1gxuYtEDQoyLXpYU1sgXAtXqA+EB0GygNykgE011zK24qbNxSs2+Q54HydoHADZYKFJJnzgj63Rrgd+UrE7M/FFQKBBApEEMhTHxBSqDyEodjqgJH8xYECuqQ20OPylgYSEuTX9IA1tjK4Fi9QGYgOA5UBOckAHtTTRz3V90cy+yhUp8VzoPm6QMEG6gILSfLmBX1uXXA78tWF2Z+L6wICCdSFIJCnLiCkUF0IQ7F1ASP56wIFdEldoMflrQskJMgv+0xEtiw4Bi9QFc77CxQF18NLzxIEnP8ksQyOrQMUnr8McIAUSz7E59YApx9fCZicubgCuBCBAhCC8cx/Fyc0/YNA7OxHOP7JT8BcMvfJMXmnPgUIc2p8UCmz5wQfL88sfhqi3kKTEPjGadmXuX/t2q+GZRWsM0C/hrUAylNkzqIZKDEkGJUMXvIuuQhKz0unI2+Nmfy5vMi4IKEqEwLylRkXKVhnQlC3qu0+LszrEZNo8kLgH4OZzrjwcVni+x0JFhIIvqyW8NeTZ9+9s02dFZLwjFNrqz2ZWv5yT40T/HTAW3OB7WfVPdXN+ZPF4YfeWb70ZwTOOlz0YwKOjwxdBieMNljNKPyj/pdg3rrf+OJhb8+/53UxcvgbZzz+om+bcSPGyfJDBW6UdDDNJy+XJlNvy3Okd41TDsvuIQQ02ixmyYfnS44Z8fLMQJgXpsWMvDwnBsQPN+Pp7fVfV1/MQxX0svL6Sr6KXhW6xadK7ba69fvBFd1fvd+bPj6Mn/2uDH3GYjD5ZnN1835zk4hX8sOHm/fWvn+7f6M3Evo/cRPLV5nMHDPhmEn9n7yJ01ebwu1NOmaR/i86h4wcIx2M9zHVV+yYJfq/hDJLHLNU/5dSZqljlun/Msosc8xy/V9OhSN3zDQh7wuqt8INrom1IDgQiISeBUF1KFwehIm3IJkQLhXCBF1EpKXLhzBxFyQjwqVEmNCLhBiPy4kwsRcpYeeSIkzwRUbYuawIE32Rkx66xIiCt3S5kT03xTm2dLmRhgBJcCjRDOmniCDsXF6kCb6UhJ3LijShl8Rkki4n0oReV8VzO5cTaUIvCe6ky4k0oZcEd9LlRJrAS4I76TIiTdj1Cndu5/IRmbBLgo/I5SMyYY8IPiKXj8iEPSL4iFDJ6msWwUfk8hGZsEdUcXP5iEzYI4KPyOUjMmGPCD4il4/IhD0i+IhcPiIT9ojgI3L5iEzYI4KPyOUjNmGPCD5il4/YhD0m+IhdPmIT9pjgI3b5iE3YY4KPGK0i/TJC8BG7fMQm7DHBR+zyEZuwxwQfsctHbMIeE3zELh+xCXtM8BG7fMQm7DHBR+zykZiwxwQfictHYsKeEHwkLh+JCXtC8JG4fCQm7AnBR+LykZiwJwQfCVrW+3Wd4CNx+UhM2BOCj8TlIzFhTwg+EpePxIQ9IfhIXD4SE/aE4CNx+UhN2BOCj9TlIzVhTwk+UpeP1IQ9JfhIXT5SE/aU4CN1+TB7zvcpwUfq8pGasKcEHynaZ/UbLYKP1OUjNWFPCT5Sl4/UhD0l+EhdPlIT9pTgI3X5yEzYU4KPzOUjM2HPCD4yl4/MhD0j+MhcPjIT9ozgI3P5yEzYM4KPzOUjM2HPCD4yl4/MhD0j+MjQxrff+RJ8ZC4fmQl7RvCRuXxkJuwZwUfm8pGbsGcEH7nLR27CnhN85C4fuQl7TvCRu3zkJuw5wUfu8pGbsOcEH7nLR27CnhN85C4fuQl7TvCRu3zkJuw5wUeOTiL9UYTgI3f5yE3Yc4KP3OWjMGHPCT4Kl4/ChL0g+ChcPgoT9oLgo3D5KEzYC4KPwuWjMGEvCD4Kl4/ChL0g+ChcPgoT9oLgo3D5KEzYC4KPwuWjMGEvCD4KdDTsz4YEHwU+G5q4FwQhwyfQsj8ebshzJDoebvrz4YagZfgImvYHxA3BzPARNO1PiBuCnOEjaNofETcEP8NH0LQ/JW6o4+QGnRM3/UFxQ50oN+iouOnPihvqULlBZ8VNf1jcEFwNHwHT8SRPsXV2lh8O8xtSVMHH+eE8Lyi+8Hl+ONCbsz/VL2JsONOb4//mlRQxMkacDcd6QXGGD/bDyZ7rF7E2HO85jxFvwxlfkIqPwKf84ZhPKgzomC+Gc77IbiLtRIwlE8SdHLij9QOJpZiBu4LuGbHXn+5pjQUd/EV/wNeu3URC9yuQMeKuP+Vr12hjxF5/1Neu0caIvf68r527iZJXepYiY8Ref+jXzpF5gRQB0Z/8tXO0G4i//vgvKEFCIGVARMKTnEgeENGgpFF1IsJKWuRJISQTiGhgjyoqSCkQ0cAdVVSQWCB6UUBQsodAeoHodQFBKR8CSQailwboGYdUAxEVnqRE0oGIN56kRPqB6HUCLimRiCBi6UlKpCSIXjHgkjLGImjsSUqkKYheOxCUaiSQrCDi1FMEkbYg4syTwUhgELGvYiKVQfRqgqD0K4GEBpH4KiZSG0Tiq5hIchCJr2Ii3UH0+gKp7AskPYjEVzETrGH7KiYSIUTiq5hIiRCJr2IiOUIkvoqJNAmR+ComEiZEL0AISoIUSJsQqfBQjQQKkfrmHlIpRBp5qEZSheglCV3zSLKRXiF6XYK8lCNSfBki9eQF0i1EmnnyAokXIs09eYEUDJEWHqqRjCGyjYdqpGWIXrMQlDoskJwhetmCizHSNEQ2XDwir8wg7rKBO2q9Q8qGyBJPtiF5Q2SpJ9syfB0p82QbEjpExl7kE0jrEL2mwZgi3vKNJ9eQ5iFy4ck1JHyIXHpyDakfIo88YUMSiMgH7qgNCFJBRJ540hJJIaKXPAR1UUAgNUTkmScnkCQieulD7wlIQpAuIvLCkxNIHBG9CEJe0RVIHxGF8BCNRBJRSA/RSCkRReQhGsklohiu2kqyqCDNRBQ+9pBwIoqBPWqrgLQTUfjYQwKKKAb2qHUJaSii8HGHhBS5GfYr5N0JSEqRGw95EokpcuMhTyI5RW485EkkqMiNhzyJJBW58ZAnkagiB1GFuo4lkagiNx7yJJJV5CCrUNe9JJJV5MZDnkTCihyEFfrWEqSsSOEjDykrUvjIQ9qKFD7ykLYihY88pK1I4SMPqStyUFeoi4YSaStS+MhD2ooUnropkbYihY8+fBPFoK6Qd5lIfCOF9NF3djeFjz58S4X00Yfvq5A++vDNFdJHH77DYpBXYvIsJPFtFoO8wnCC77UY5BWuZ0TgKK+QWwCJ5BU5yCsM20hgkYPAQqpYEukrctRXaLaRwiIHhYVhGyksclRYaLaRxiIHjYW6bi6RxiIHjYW6dC6RxiIHjYW6ei6RxiJ7IUVQF9Al0lhk5DnlSaSxyEFjoS64S6SwyJi9yU8ifUXG/I1+SF2Rg7qS0NmA1BU5qCu0KC6RuiIHdYW69i+RuiIHdYXuFXE2aCvUnQISKStyUFa4oSHWBmWFG9rAWn9r5xfVdGr7drjF8/37q3L+Wfu/rj6Ot35G072mf11pyl//9fff862e5j/TfWl/qh60K+Z2mn+m3bF6VF+dZgCNazT+aP3cSG/sZyzBNtuPv3gEGgrQUHIN27Zqu9I8gXVuKcHwInZ44CfpAagEoNwg7V3CczND/9RODg1vrgz7vh6U/ekT0NEG+G6uFrHt+1+6B44DcvQUYZqBn64HTYHvespwTcefowftEtAuZdvN3/2A8QLkmiroaex8cQZ0ERewC95v+osv0JcN7IgdSN2eZWgKAsCOoam296qrn1Sjg3+vqm6/3+2rzpkkErrATZK7sit3yIMMeMAlm26n7uvGncqAO3bId/1vUAA0ADZkuLkcxbe9Gx/cCWItUpjeXMkyjavhuXqwrYBtuYpg2k4P9IWtY9iaSxfTum2dliBSyTjqiA310JwYuARMmWtG3vbjA/xhcxg3tqaZ5l/GRwHPjUHTdBxA4qNt6IEYQwRqjLkQEOqCoiGSsA+eBh1D5aY6WOs82MO3e2GZgIDFmLfm2Dy8EPaF5Iqm7rOpH5VbbUHtSXg2quHXhkEugXmesPPcPhAT8g+zV+YjjXahMZcAhhcpG5ldpdxVErjCjgA8UnVuCDI5s7AskVMPVD7BQaW8E+DrRqA1aGuX2+kFW9Hs49phYYBZbdPB3J4wznVhg81V+OkrpZAvmHXS9mUdNBeDxsBZ4nI5vijs5mFjXwj7QtoXEZurgyvb/ptK0J0EulNYd+xgE/tOtrHu2FJXJNYd+0LYF9K+YLeed01ddmjRhCt/wtbBP/X8d1qBeLLFazv+1iRoBrhNuKANTxiEeQn3YDnbTKljq9Rjv4dLs9tqTFNnXQdzRXKLBlHtc1jtWQdOHQoSmE8JNyPBgzrnlgDPJmQ2veDYnXo6DyD0P+P8n9qfL/URXO4yrlAO+2diNy42cEpPVcGWenb50HvcHc4guPxzZUU1DSpOMAA2kCxq29V4a5mAACRc4fkEHlo4NwXJa6d1wdXWuYe74dEycOGEQcy5oX8qH51SA2s6uyn/VH3G4wUTJeEmyqdqVx2rQ+00BJFOuAij3+iBiQaXEfYcotufVRYQZnbp+tToFHWapSCoKZfX5qB1hwKUAkdTrmziH2CCJQXWNJaYe1U39xgZlN6UK733qtmjZqD0plwJuG90AXVagfRhNxX3p8/mR9edw2gK5mnKJetDWeHlKIW7Y47/B1XuuofzKQJnCJcDD+q2UU8OJMj1lMt1venU22d5bOrdrtyXUX7rrCvw+Mxlw9zFvtJLU7nL3D4AQezmV29ht26cwXxjN5sPp8N92eBIgwzMuGjpLaI+4m5dCSkDcWYXgwptwCA30m50IlsRE/tOandiud36FHa92NgX0woi7Qv2ADc+IB1WGHjwy7j507f71H+VHyoTCaSImw5924N9ZgRs7jDMhnx4sBdcPqHL0sYlmg4b9p3Ubmczu1vM7YGgyGwQ7QvBTS6NWutzEsqWDGR4xkZ7fh4ndB8u/tYNfvRjF8QCCA8GBVdTqgatDBmolzzfna7xaMQAjt0zfS6PJT5UZqBmZlySGO243DrbtAxkV8YN77H8s3x0xwcqZsaR+ljrxdYdHjw2clVvN/1kNNxSgJas8rIzogNCBKWK3cTqvc/DqcTbrwzUqpxLHV2Yv5QiyW4fPjl1FUrBHB3z7xXCUgFrFqvK7cs7tT3bMOagbc7VyH25K78KpxmYLDk3zfpm0mkGcjzncrxvpv/2TkuQ5ezRSrfsUJLnIKo5F9W92RY8OOtVDpKc3cTuy9aZ+vD4wW4KSZER7vvldNpn+TjTouGibO6bHzqIrTiQTIKLfSeb1q9JJeBSvUdzMw5K6FyaH9QRCf85CE/OheegTnrLYcRAtO2AaxpHCHwWLKzIKfSXKwWHunlSeBebgyKS8y1P7bBr6jddUkSu6yDrWeHbPsoMkgpXJKuaiNguo4l9J7Xs5jZzCmnXL/tC2BfSvohYT8ZfCIT5CXWg6WDKXjkYHsADdxSQAFYQO5rn0aDwQ4WBS9BjjU/fOTzMcgl6rJvudH9CBaMABbHgJqDzMHKYaFCZLLjwNHqnWjvVDSrkrG7TlE/b+l65zEBiuPLWNzzWrdsSSiXcfOpbtq06a+xsTrnGw9Np96p7qLfuygwmBJuGY/Nj5c4JOCVsbnMMT10QmzRYTjbs+Gu0s3bwrcAqxFRwfR05IYBSHVt5dSU722IUoCgU3MrbnFo8kwrgecHFvC33NWoGMrPgMvP8Oi28CC/y3BJlX0guZ1rV3GLHwZwquBRv9Z5K722c418BkpSdi+2u/lI6qVGAzGB376bZ2Z0HBZhSBTvC45naXIBcKLhcsE8BhckItRk5rfX2RWrTM5vWh0nYK+z6YF8I+4K9SNg+lQ+Vu6AX0AH2qNQ+qS0asfmeMWjJldn2a9spt1CCvGIP0125r9zt0Qam44Ztp3Z6PXAbwvm+4abNmWoGL5OL1GoG7Hm0eyjdKreB27kNN+266cHREBqeBex046ixPRD1EV6BYNeHbn6yLGwLL9XatOI4Zm/TgAXSXgmT7MUQ98GdcIF0Nl9s81PzeJajcBwbrvKcHpuywpXAfOEctOXCd2q2KN3gorzhSkj/+EuwlIABjmFi73s6te59Q/DoyFL0pVKdMXI3SuY77cBZrm496bMDiivcPLC78a9VFN/iO0ng7Qz0GvLh5upYHdWuOmij9x/+/vv/Ac3bODJytQAA"; \ No newline at end of file +window.searchData = "data:application/octet-stream;base64,H4sIAAAAAAAAE61dXZPbtpL9LzOvcx0B/M7bOh833hvfdSW52dpypVycET1DjyQqJOWJk8p/X4AgyEarG6A48+AqedTAAXAajcYBRP511TZP3dXX7/+6eqwP26uvhcxvrg7lvrr6+uqbXV0d+qubq1O7U/+925VdV3VfmT+/euj3O/Xd+Ff1/dXfN7aWRMiplrvm0PXt6a5vWl9V164dqPbm6li2uiFTi2YgsZHxhKTr9iGY7+ea+277j7r7x7GtP5d9tRynPNb/qr54oSaTVWhyEwMWHkovB9ejwcIRO6v755+/C1ZvbNYi/Fp3dXMIgkxmq3Ca/XFX9UEcaLYG57v9bbXd1od7Lwy0WoPyfXnXn8pd3fu9zDFbg/NDVe76h28eqrtHL5BrtwbpzeFTdRekB1qtQfmpOu7Ku+rdGy8KtFqD8kvzR30X4gYYrcJoy0O3M4HCAwKsVo1X+fRt88/KH1+AERHNml4xVm0vxHzXdAtAR6sXQ1VhbBnwbLgWO0nnkNG2YOmrD33VflTu1301fOFdR51lpwrXc21taGcwLZmrn4fnbbOtdt1UeXU47buvzB/9DZy7+bqtt/dV3zxV7Y9le1+96fdvd/s3/R1f67WvDN2FsaFkC36o2n3VyXdt8+Ou3JdR/toDTRpfjvnv5tSZqoZqpIh8oLT1c3r6tlZJU7nLlnUVWl+Oqtr8uRRJ9vqH7z1ortWKEa1Oqok6IfB2CpldjvN/dRS/dpKrcxBocznCt1V17Krq8Rtl06bZ6zdjgusB9BRZiA9CXrOr8JQe/rZ0Rv9X1yl/KQ+4wXMl19CEbqBpBVn/f7qq5asev7241p+/dH215+udvl9W8zye7968wcOp/7Q4Pu4akGWh8tf2W7pRAzRZ65jLsPXO3y+q+SyVelv1D832zI2cL5eOwE/lYducMXNe1fVkyFDkNo3E+r58xINCIY1m63G+UcnXfdN+WYAFTNfjvS077EQU1mh2Ic7M/o/l4f5U3p/Fj+nvi2PIx7Z+LA8Hf0XX0Ixu9NwiGmf/ULY1zjUwymS0DqMtb4MQ1mYlwr461OUhhDFZrUP5s2pvy/pTEAfarUJ6XXa/n/A8RCiTzTqEale2py44Zo7dSqTDvdprh2Cs0TqMpgvTPxutwlBhqNyFMGajdRgP1d1DFcKYjNZitM1jFfAuYLUSpT5UXRDEGq3DaJuyD9IOrNah/KmGOwAxmqyq/9vyUHcBgMlmHcKpD/XAmqyq/7vD/S7YhdloHUbXN+EpDqxWoXxffwpiTDYrEXb1sT40IYzJaiXKIexUs9E6DGUWcqvJZhXCP9WKcBfkA1itQ6ma9j6MMlutRGn3YYzRZh1Cq/a/AYDRZF39p09lqyJpAGK2WoXyQ1mHQ/pstA6jum2rpwCEtVmHoP4QGChrsq7+k0qX2vA4AbNVOG/uVPqn2hlI3qHZOhy1b1WZQLBDjt06pDYYF63JuvpV/hfuxmS0CuO/y2MZTq6A1SqUf6ltZbkt/SCz0UqMP8vHAB2TzTqEpq1CdEw2qxB+LPvPQcZno3UYdf9wKsP5j2O3CulteVdtF2Rajt1KpF35RYRQRpv1CHIBgnwWgvqHxTkKZDRbi9MHJ/xstBJDrdsPgVVrNlqF8e/qGJQCJpt1CE37VIXTOWi2Cudd1YaXq9loHUYT3lZNNisR2v50fwq6lmO3CumnZr8gfgGrdSinLszKbLQK4+dy34QgJpt1CFV7G+zFbLQOY9d8LgPbhclmNcICURaarcQpP6uFKJCiAqt1KMcFMs1stA7jqXyoQ+FxNlqJUW3D/ZiMVmH8Uu7rnR/Bmqyrv9qpYBQAsDbrEB7KAA2jxbraT+1jkIPZaBXGfx7bsg7PPmi2DqfdBpgYLVbV/r/VLjRO1mRV/b/WVa8/htY/x245ErzN9LrsqjT+7nCnbyhMaOA2kmPgPSZ0L1YOBUzhZdVeoyJ0f9z20n3SNzzeHI6n/m3VdarvFD62WX5vq212y2u8Hq3pzpw1lIG8axQMuLcRRJ0LXAxMDuT/HPUtzs7bgNFm+UB+PL8VG6r3+uOCK7K40Qx8j699BsH74B3QhdC3+nbIu7bZH/vzi7TBZjCln9ekY10vb4Exfjagc3HhInRc8uKmkF7ubcJyv97rO1zhuq6tXaDxLIyZuv5JOSLNpmvByj9+aR6rQAgY0YDtSri+2h8rtaU/tf5QO85Lx3otZHN8twTLmK0EaRbEUYMzWy6Hwk4dWP4uXvkCy9DFK1Bg1fMttJetsQGg5tR7Zr8DNZleAoZ5+eahqe/Yrplvl7Oi/lL9saCya2vIN31smD/gLIGaTdeCdb2az+xEgViT5QVQmBEO5wIW2AVsHHz/MsW65q1e4P1VTzaXV3/XVqX57YOn/tnocgDvWrh0GWRbP9DKOsnY+sloEYCziRl7/i383Y5viL71/nQHASEX/Pnn77xJiP3+hfIQp7oFHEzNW5mNILwlCUkQMpSTIMxFaUkIdEFm4sIuTU6CwJ78BCGGUpQQVHMIZA4uHjS/CDRNkmieBB8+9F+Ol0K+mkqFkV/NDfVOw2+rXV962jF8/2JZk1PhorRpaqK3F/4kYzJ4oTzDrW9BqjG3kIHbBliAcNb2GXD31UHP0mr7oa/+8JEFcc8KPaMBu+b+2Da3vmAGoYH5M0A/1vpg4INavTpegUDIuMxl8ISnenBfIv+y1YRSMN2SVVnYBLAgEfOABHKxCWVJOuaBCWUFFyQEK/OyuScLUjMX5tLsDA9aMEFDcMhZzSMNvGkaMHkZSRlXGN7twlYygL+fqs4nOp6BggLPAq73nrTiDNVaPwsymCaewS7LFBdAL0gWz8CX5otL4D0p4zluKGukAekpEsgfHaOXnCaXyUJuW9elbwTukhxuEbRflyKQF6hTHDDNoz+JhDYvlEeeVbkglXSauk67OoddomAtAfbrWOe4C9QsBpZm0I/8EvkVqCmUYo1NWpVlQZgFiZYfKpBrQawl6ZYfzJtxQahw0hXolT/vcnq1IPU6A7s0+yKGMZiAnYNCt54e98TnYK7JS6hlRI0hqlA7uSPQ4Qx3MeZk/ixQfzpE9jWcDi2DDqVDFPiidGghPJcOkbjedIgFpL3Vs4wim5dYRqkqQ8sobur6SeIsogtnyeollEQNLaELYVkdiAT1C0A8JO0wftxnr9puTd5Ve27S5as2ggmt2kEo36qNsIKrdhjMs5hisNBiSoFdtJjSvfMvpiQofI6Zfaghu5a6Fsu9jps5RH3+eYOaeKmmQOH5JQUWkBw4Nf6kQu0YvEBEP68vEM7dFl4aywk4fyBfBMdHcQIvEMIXAVZnj/b0Y1bBh3xysKRzeFGfG8HdinwBfG7PxfEbgQTCdxDIE70RUih4B6HCrn2RW7NHVKH5P55OlZ7DKQrkkqWBHjrvykBCAheeH0PrPkkWoCKTF4hxVI2BKIfbyQWeu4beb5CQ1vp5kGysozH90Y4HpVnz4z439KCafLEHNOni4INhAtEnDOUJPxgrFH8WgGmiAg5gc8fR8gKoS+ID0zdvgKBBga9N187ZAOFavEB8ICoMhAfUSF6DuS1va27eULBukeeA83GCwg2ECRaSZM4L+twY4VbkCxFzey6OEAgkECCCQJ74gJBC4SEMxUYHjOQPDhTQJbGB7pc3NJCQwL+mZ7GzkcG1eIHIQFQYiAyokQzgoXr6wCu0FKpT4jnQfFygYANxgYUkefOCPjcuuBX54sLcnovjAgIJxIUgkCcuIKRQXAhDsXEBI/njAgV0SVyg++WNCyQk8C/7+gQ2LDgGLxAVzusLBAW3hZfuJQg4/05iGRwbByg8fxjgACmWfIjPjQFOPb4QMDXm4gjgQgQCQAjGM/9dnND0DwKxsx/h+Cc/AXPJ3Cf75J36FCD0qfGdJkzOCb5e7ln8NES1hSYhaBunZV/W/GvXfjUsq2CdAfo1rAVQniBzNpqBEEOCUc7gJe+SQ1B6XjoVeWPM1J7Lg4wLEooyISBfmHGRgnEmBHVbdf2HhX49YhJFXgj8Q9DTmSZ8WOb4/oYEAwkEXxZL+PPkue3e2VadBZLwjKvWRnvStfzhnuoneMvgG33A9u+qf2ra85eQwS+9s3zpGwfPKlz03kGnjQxdGieMZqxmFP6tgEswb93HpfCwt+cPSbkYOfy4Fh5/0aNauB5jZ/m+Btd6HUz9zcu5yVTbch8ZmsYph2X/EAIabRaz5MPzOceMeLlnIMwL3WJGXu4TBvG3m3H39vVfV5/1EwnVsvL1lXwVvSpUiY91tduq0u9NU1R9zX6v6/ht/O7XStOnLYzJV5urm/ebm0S+SrLkt99u3tsSwxfDHwYzof4nbuL4lRpcx0w4ZlL9T97E2as4zRwz6ZhF6n8RBRo5ZmpA3sdUbbFjlqj/JZRZ4pil6n8pZZY6Zpn6X0aZZY5Zrv6XUwOSO2aKlPcFZVa4w6tHW5A8CETEwISgGihcLoQec0GyIVw6hB52EZH0uowIPfKC5ES4pAg9+CIhe+TyIvT4i5S0dKkRmgKRkZYuO0KzIHKynS5BouAtXY7kwFFBoUuXI6mJkCSbEs2XYcII0tLlSGoipCQtXY6kJkKS00u6HElNhIqVlKXLkdRESJJN6XIkNRGSZFO6HElNhCTZlC5HUhOh1j/K0uUo0kRIkqPI5SjSREQkR5HLUaSJiEiOIhTWhrhGchS5HEWaiIgOgS5HkSYiIjmKXI4iTUREchS5HEWaiIjkKHI5ijQREclR5HIUaSIikqPI5SjWREQkR7HLUayJiEmOYpejWBMRkxzFLkexJiImOYrR6jMsPyRHsctRrImISY5il6NYExGTHMUuR7EmIiY5il2OYk1ETHIUuxzFmoiY5Ch2OUo0ETHJUeJylGgiEpKjxOUo0UQkJEeJy1GiiUhIjhKXo0QTkZAcJShJGLIEkqPE5SjRRCQkR4nLUaKJSEiOEpejRBORkBwlLkeJJiIhOUpcjlJNREJylLocpZqIlOQodTlKNREpyVHqcpRqIlKSo9TlSOe171OSo9TlKNVEpCRHKcrlhmSO5Ch1OUo1ESnJUepylGoiUpKj1OUo1USkJEepy1GmiUhJjjKXo0wTkZEcZS5HmSYiIznKXI4yTURGcpS5HGWaiIzkKHM5yjQRGclR5nKUaSIykqMMpdxDzk1ylLkcZZqIjOQocznKNBEZyVHmcpRrIjKSo9zlKNdE5CRHuctRronISY5yl6NcE5GTHOUuR7kmIic5yl2Ock1ETnKUuxzlmoic5Ch3Oco1ETnJUY52RsPWiOQodznKNRE5yVHuclRoInKSo8LlqNBEFCRHhctRoYkoSI4Kl6NCE1GQHBUuR4UmoiA5KlyOCk1EQXJUuBwVmoiC5KhwOSo0EQXJUeFyVGgiCpKjAm1ghx0syVGB97CaiYIkyXwHbYdt7IbZ8aKN7GbYyW5IqsyX0HjYzG5ItsyX0HjYz25IwsyX0HjY0m5IzsyX0HjY1W7oDfAG7Ws3w8Z2Q++BN2hruxn2tht6G7xBu9vNsL3dkPyZL4HxqELQDJ7pEEaIoBnEUoTRIgTNIFYjjBwhaAaxIGEUCUa8wJqEESUEzSCWJYwuIZKbWLxK8gIZIwaNNCFS2hgxaNQJRsjA+oRgJSSBBAphFAp6KJBEIaShj3YMiZUkQx/tGEioEIMcIWj5QyCtQgyKhKAVEIHkCjGIEgrvJopeqbFGxoi+QZdQeLT+hegbpAmFR9KHdAsxqBMKjzZG9A0ChcKjjRGBg0YhaE1EIAFDDDKFoGURgTQMEUmPf0ZYDow8no+UDBEZBgtynJGYIQbJQtCyi0B6hhhUC9WTm0j5s8A1IwYH4UL1hG4GYnDQLlRP6JoRg4N8QUueSNkQsZmAMemgSNwQg4ShOk0bI/4GFUN1mjZG/MWRZ57EWNEd+IsykmwkdIjY8JfTNSP+YsMfHTWQ3CHizNdmxF+ce+Y2Ej1EXHjmNtI9RLLxzG0kfYhEeOY2Uj/EoHEIWs0SSAARg8whaEFLIA1EJEaUp9fLBMvyiScQICVEDHqHoDUwgcQQkWSe6Yr0EDGoHoLWzASSRMQgfAhaNhNIFRGD9iFo5UwgYUQM8oegxTOBtBExKCDkQZNA4ohII08gQPqISGNPIEASiUgTTyBI8clK6plUSCgRaeYJBEgrEWnuCQRILhGp4Y9eqZBiIgZdhFt8kGgiMsMfHWKQbiIGdURNYpJBJJ2ILPJEDaSeiCz2RA0koIgs8UQNpKGIQSkRtGoqMnw8lvmGDjGY5Z5AgMQUkRWeRR7pKWJQTQSt3wokqYhceKIGUlVELj2LPBJWRB55FnmkrYhBQSFPhwUSV0SeeOY20ldEnnrmNpJYRJ555jZSWUSee+Y2ElrEIKdwcxtpLaLYeOY2klvEIKoIWoIXSHERhfS0GYkuYpBWBC3ZC6S7iCL2TFckvYgi8UxXpL6IIvVMVyTAiEFmEfThgUAajChyj+sjGUYUhW/o8En1hnd9iaQYaaSYhHR+iaQYOagt5PUIiYQYufGsgRIJMXLjWQMlEmLkxrMGSiTESCPE0K4vkRAjjRBDu75EQow0Qgx9qiORECONEMONMuLPCDH0SiWRECONEEMfGUkkxEgjxNA+J5EQI4VnHyGRECONEMP4HBJi5KC1kNdnJJJh5KC0MKaIPSPCMB6HRBhpRBjG45AII80tEcbj8D0RI8Mww4avihgdhj7Gk2e3RaTHPfGFEaPD0Md+Et8ZMToM4xf42ojRYVLyPpjEN0eMDsP4Bb48MkgtpNgl8e0Ro8IwZOMLJEaFYchGKow0KgxDNlJhpFFhUkmGF6TCyMjHH1JhpFFh6INTiVQYGfn4QyqMNCoMfdAqkQojIx9/SIWRRoUhb6dJJMLIyEcgEmFk5CMQyTAy9hGIZBgZ+whEMoyMfQQiGUYaGYY+pZZIhpGxj0Akw0gjw9Cn2hLJMDL2EYhkGDnKMCSBSIWRsY9ApMLI2EcgUmFk4iMQqTAy8RGIVBiZ+AhEKow0Kgx9JUAiFUYmPgKRCiMTXwRFKoxMfAQiFUaOKgxdMSIw8RGIRBiZ+AhEIoxMfQQiEUamPgKRCCNTH4FIhpFGhknJ250SyTDSyDAMJ0iGkUaG4WrGlyc9MoxEMowcZRiabSTDSCPDkOK0RCqMNCoMwzZSYaRRYRi2kQojjQrDsI1UGGlUGPpOjEQqjDQqDH0tRiIVRhoVhr4ZI5EKI40KQ1+OkUiFkUaFYZwOqTDSqDD0ZRqJVBhpVBjS5ZAGIweZhY7MSIGRRoHJaL9ACow0CgwTMpACI40CQ9/qkUiBkUaBoetF3OWGO3pFQwqMNAoM1z3EnVFguO4Z7oYfVXyu2r7avjE/rnj/fnqz0l9XH8ZfXGhBfqhV//hCy/Bf//X33/NvLPT/dO3lx7Z+LA/6B2hzWVhUJQpMyf1D2erX2oNyEpRjEY/1Y/UFFovmUmyhtrxFWKCUSjm4Yuq/wzvGQcEYFEy4gl1Xd32pH/E/l5QFaOiGK/ln1d6W9ScEmgDQlClqf9gDSMxAY/VkUKY3V9ptfDVU9lXfoCIBqNH3Etjyv58cN4pT0PCMK1btyvbU4YHOQNGcLXq4L3e1Uy4H5Qq23PxzTThewHN1GPUUdn7rCqpIC1gF5/3cb1VhWzawInbI9auendkApwPbA13KPIimnp/3BPkGg6ivSDDVNN3Z9ADdTzgnv23r7X3VN09Vq5i/r+p+v9/t696ZoRL2n2vBXdmXO7cFCSiXeMpV903rxhHQ54hznLvhXaCgv8DZzPQS3rJ344O+wVhHoMH6YgBfuDbP4QVlJYzSkqN7Kju9swJSHcOpHYSfXjsMq8hgFdxc1VVQDZDQ1yKWMVW665ySICgm49in3IQbixPDD1cCfdDrLT++Rw4Wh5Ex8TGgip8TGIH5ItjQrot/Ht/WMRcGsTUd+5/7GmBqIIYggX3IuVk7V3HeDRh09TFnqAbKDxLoyxk/FIrEyp3xoGDCO4B5KAoM1aDb0jieWh1j+yGxH1LeJ6u2eaxcpwSVsmnT3UN9qJAzAzf0uNH4HHHoQXDyxTYIpZn1iGLsxYYd0V1duZkKCEisM4An0c8FQVMyC8sSMtVAeSSY2/owKVTFuUfC8dSHc1wN4HfeoDQoO/Yjy232xJJq3+oEI2MKI6PlJJLjh8TGrYwNmvZZHpBxWGls60ptXfk09ONftGpvPkQ2BYzth8R+SO2HjB0r05Tt8BNx2Bw49+MpEo/jpU9Rx+bYdklpm2M/2E7IxH5I7Qc+ELSNWpLQ2g/WsoRdyv5UEcQpBRcSDm5b6pdaOsUADQnnoubRztAvYX7I5jXbqjp2VfU4ZOJpdluPbuokSGC2Sc5/iAULrvcJW+7Uo0ECaGwp8IT0uSRwDzuHxOSZ3HBPNZ0PIKhPn/kFyhORAY7bhu3KsAsi9lQCBhYR26hg40TO9uhwv8MeBLqScItm1bYoOMH5Ns0yrnTXNzhHhy6YcgH+I3ha9FwUlBxjipiCi+TmwFzVnXm4H1yDYZYvudD6sXx0Yg5c8yKOwY/1J9RxCMYu6h/rXX2sD41TECwIbIqJXnsLpxxcoj3ABxxiYIaSsoPTKl91iwEXTblcQu+b7/AAgdiQcvETv9MYdhQu26xH31dNe4+RQcmUm0L3VbtHxWAOzPnffasiqVMK8JFy7nN/+lSqbjraQgqmHpvlPpQ1XpegKJBxM+6hKnf9w/kUgdtDvuxtWz05qRgoxi7pKn9Vmbg8ts1uV+7LKL91FhiohnBeP1exr9UaVe4ytw7giuz+VGXDW2ecoQ7DaigPp8N92aKRzsCMYUUTlW3uSgXp6A0ZaCkr/dQoE4Nrqc2phJmpOhOzy9wmsuvdlAPZpTCyH2L7IbEfWA8bX1ED1zToJJnNnVh3GSr4ODxVCSpOYA5qgdtT9mAf3wWLQ6ozbjKOz1iFgwiz7cjuZJNpAyPsEjdlD7Z/0q6+kf1g9z+S3UIp1EbtvbDbACIzLvAxUhlMB2x7+N6PVRArIRy+iHXdFi0RkDQ2V657FexRj0HkZMn6VB5LvFHN4CaPC576TKDcOolbBmJnxnn2Y/ln+ej2D4ROVpp4bNSq63QvBxMi58KfCgL3J+XN7ukFKBnzJfvPaEBz4Ajs5FFJ0MOpxAlZDoIWu71UEfpzqabF7cNHJ8BCiZ/z3F1zr99J4AoHUJsXbB64L++q7VkKmQNnZTWnfbkrvwinGGgsewoxFJNOMeDjbH49FFP/9k5J0Mec83JVskdOngMnzzkn3+v84MFZuHK42eGcfF92rmQPXDzmefijH9+xCbVTuABJGzJjm5tndiUqWI4IIS6CcmA0xWHOp8cqUMPgLknaoM6e45wff0jYCGlVhdh2KLbNSifpc9oKWUWksDsTYZdfOakdrBfphrhdgRt2jp1DdUTHUDncKvHFTipj0rIoyprgSsw1Fb5MAK4jwOUle0J5aNqnCifhBQh9rGR2aE6dSfqGnFENrtt0R1Rl6rDPwoVOB7cOVoMVqdUz85FmfUF6JNWmA3JSt+wH6/8ysR/YpJuWouEaIFhJmToKgaKMYE957WvYYUF4EGDzGcEulOa5kTD7grSzIfmoH6OISAfrVsF19Vg7zi3gpBDs3kCVas1j//dV/9C4uTM83xOsp6gqnAGGsYlNko4NVloKUKzgi7X96f6EloICRDI2jDpv/IGTEa7O7NkeMYuhkinYFv+uGnuWkMIjWMFu/Vq1AWqctRLKymwC25ZP2+a+Qo2Fs8VX8Nh0KFjBFMRXsuuqs8JwqrHtNf7n9BMMLUuJ47euB4LeRmybTfGjO2ucJc3GqkDLjzWxVYDLA3tTRm3q0Ukv5EnYdTOyS2pm96/sdkDX6IwFPLqVXKhSzn2W8RaAg4Ll4NSdBSvQhYIb/K7cN6gY8DP2NOn8OkjqCCB2M28PbSQrrXVVe4sbDpy14LKgTqX4KtV2ZAn94Big/3DTuds1n0vHSwrATcFxo4udXXEqYADZcGtYdzw7EdHPrQEluWXBviQArv4wXbSSiMht3raxedukt9uUUlrBQMb2Q2I/sIJg91Q+1G66pp+iA5rOzanuqdqedRq2fcP6xJeur9x4CwYr4saqL/e1mxhv4Mqw4eZOX+3UUuYWhNOfPQ1WjTxqefeE5gJMSYVVr0QyxQybsbCz+UwshudncmOlkw3nqv1DiRiDi8eG47pvjke3I7CYzR2FPewXmd1qsOGln96BAx0Y+u9Yw7TbYE8HbVVEiIeHVOw9gn5+WwZMAGAmbecCF3PYe2wwtFtFU7KHAu7LCOAiDfcDrLSm3O3xbF7B9Z09pzs9tmWNI5h+ChQoy7nUqd2iKQIjH6ueDo/0B4sg6OA4TGzafurci5Xw7hd79elzXfXaqEPLuaM8cgHkSW1n3XF1DhTYA9QvdRTf4ttucO7Qx32/3aiU/Vjt6oMyev/b33//PyqFuD9x0gAA"; \ No newline at end of file diff --git a/docs/classes/Client.html b/docs/classes/Client.html index 5fda044..d80ce6e 100644 --- a/docs/classes/Client.html +++ b/docs/classes/Client.html @@ -1,4 +1,4 @@ -Client | predictionguard

Client provides access to make raw http calls.

+Client | predictionguard

Client provides access the PredictionGuard API.

Constructors

Properties

apiKey url @@ -19,70 +19,44 @@

Constructors

  • constructor constructs a Client API for use.

    Parameters

    • url: string

      url represents the transport and domain:port.

    • apiKey: string

      apiKey represents PG api key.

      -

    Returns Client

Properties

apiKey: string
url: string

Methods

  • Chat generates chat completions based on a conversation history.

    -

    Parameters

    • model: Models

      model represents the model to use for the -request.

      -
    • input: ChatInput[]

      input represents the conversation history -with roles (user, assistant) and messages.

      -
    • maxTokens: number

      maxTokens represents the maximum number -of tokens in the generated chat.

      -
    • temperature: number

      temperature represents the parameter -for controlling randomness in generated chat.

      +

    Returns Client

Properties

apiKey: string
url: string

Methods

  • Chat generates chat completions based on a conversation history.

    +

    Parameters

    • input: ChatInput

      input represents the entire set of +possible input for the Chat call.

    Returns Promise<[Chat, null | Error]>

    • A Promise with a Chat object and an Error object if the error is not null.
    -

    Example

    import * as pg from 'predictionguard';

    const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY);

    async function Chat() {
    const model = pg.chat.Model.NeuralChat7B;
    const input = [
    {
    role: pg.Roles.User,
    content: 'How do you feel about the world in general',
    },
    ];
    const maxTokens = 1000;
    const temperature = 1.1;

    var [result, err] = await client.Chat(model, input, maxTokens, temperature);
    if (err != null) {
    console.log('ERROR:' + err.error);
    return;
    }

    console.log('RESULT:' + result.model + ': ' + result.choices[0].message.content);
    }

    Chat(); +

    Example

    import * as pg from 'predictionguard';

    const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY);

    async function Chat() {
    const input = {
    model: pg.Models.NeuralChat7B,
    messages: [
    {
    role: pg.Roles.User,
    content: 'How do you feel about the world in general',
    },
    ],
    maxTokens: 1000,
    temperature: 0.1,
    topP: 0.1,
    options: {
    factuality: true,
    toxicity: true,
    pii: pg.PIIs.Replace,
    piiReplaceMethod: pg.ReplaceMethods.Random,
    },
    };

    var [result, err] = await client.Chat(input);
    if (err != null) {
    console.log('ERROR:' + err.error);
    return;
    }

    console.log('RESULT:' + result.createdDate() + ': ' + result.model + ': ' + result.choices[0].message.content);
    }

    Chat();
    -
  • ChatSSE generates a stream of chat completions based on a +

  • ChatSSE generates a stream of chat completions based on a conversation history.

    -

    Parameters

    • model: Models

      model represents the model to use for the -request.

      -
    • input: ChatInput[]

      input represents the conversation history -with roles (user, assistant) and messages.

      -
    • maxTokens: number

      maxTokens represents the maximum number -of tokens in the generated chat.

      -
    • temperature: number

      temperature represents the parameter -for controlling randomness in the generated chat.

      -
    • onMessage: ((event, err) => void)

      onMessage represents a function that will receive the stream of chat -results.

      -
        • (event, err): void
        • Parameters

          Returns void

    Returns Promise<null | Error>

      +

Parameters

  • input: ChatSSEInput

    input represents the entire set of +possible input for the SSE Chat call.

    +

Returns Promise<null | Error>

  • A Promise with an Error object if the error is not null.
-

Example

import * as pg from '../dist/index.js';

const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY);

async function ChatSSE() {
const model = pg.Models.NeuralChat7B;
const input = [
{
role: pg.Roles.User,
content: 'How do you feel about the world in general',
},
];
const maxTokens = 1000;
const temperature = 1.1;

const onMessage = function (event, err) {
if (err != null) {
if (err.error == 'EOF') {
return;
}
console.log(err);
}

for (const choice of event.choices) {
if (choice.delta.hasOwnProperty('content')) {
process.stdout.write(choice.delta.content);
}
}
};

var err = await client.ChatSSE(model, input, maxTokens, temperature, onMessage);
if (err != null) {
console.log('ERROR:' + err.error);
return;
}
}

ChatSSE(); +

Example

import * as pg from 'predictiongaurd';

const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY);

async function ChatSSE() {
const input = {
model: pg.Models.NeuralChat7B,
messages: [
{
role: pg.Roles.User,
content: 'How do you feel about the world in general',
},
],
maxTokens: 1000,
temperature: 0.1,
topP: 0.1,
onMessage: function (event, err) {
if (err != null) {
if (err.error == 'EOF') {
return;
}
console.log(err);
}

for (const choice of event.choices) {
if (choice.delta.hasOwnProperty('content')) {
process.stdout.write(choice.delta.content);
}
}
},
};

var err = await client.ChatSSE(input);
if (err != null) {
console.log('ERROR:' + err.error);
return;
}
}

ChatSSE();
-
  • ChatVision generates answers a question about an image.

    -

    Parameters

    • role: Roles

      role represents the role of the person asking -the question.

      -
    • question: string

      question represents the question being -asked.

      -
    • image: Base64Encoder

      image represents an object that can -produce a base64 encoding of an image.

      -
    • maxTokens: number

      maxTokens represents the maximum number -of tokens in the generated chat.

      -
    • temperature: number

      temperature represents the parameter -for controlling randomness in the generated chat.

      +
  • ChatVision generates answers a question about an image.

    +

    Parameters

    • input: ChatVisionInput

      input represents the entire set of +possible input for the Vision Chat call.

    Returns Promise<[ChatVision, null | Error]>

    • A Promise with a ChatVision object and a Error object if the error is not null.
    -

    Example

    import * as pg from 'predictionguard';

    const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY);

    async function ChatVision() {
    const role = pg.Roles.User;
    const question = 'is there a deer in this picture';

    const image = new pg.ImageNetwork('https://pbs.twimg.com/profile_images/1571574401107169282/ylAgz_f5_400x400.jpg');
    // const file = new pg.ImageFile('/Users/bill/Documents/images/pGwOq5tz_400x400.jpg');

    const maxTokens = 300;
    const temperature = 0.1;

    var [result, err] = await client.ChatVision(role, question, image, maxTokens, temperature);
    if (err != null) {
    console.log('ERROR:' + err.error);
    return;
    }

    console.log('RESULT:' + result.createdDate() + ': ' + result.model + ': ' + result.choices[0].message.content);
    }

    ChatVision(); +

    Example

    import * as pg from 'predictionguard';

    const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY);

    async function ChatVision() {
    const image = new pg.ImageNetwork('https://pbs.twimg.com/profile_images/1571574401107169282/ylAgz_f5_400x400.jpg');

    const input = {
    role: pg.Roles.User,
    question: 'is there a deer in this picture',
    image: image,
    maxTokens: 1000,
    temperature: 0.1,
    topP: 0.1,
    };

    var [result, err] = await client.ChatVision(input);
    if (err != null) {
    console.log('ERROR:' + err.error);
    return;
    }

    console.log('RESULT:' + result.createdDate() + ': ' + result.model + ': ' + result.choices[0].message.content);
    }

    ChatVision();
    -
  • Completion generates text completions based on the provided input.

    -

    Parameters

    • model: Models

      model represents the model to use for the -request.

      -
    • maxTokens: number

      maxTokens represents the maximum number -of tokens in the generated chat.

      -
    • temperature: number

      temperature represents the parameter -for controlling randomness in generated chat.

      -
    • prompt: string

    Returns Promise<[Completion, null | Error]>

      +
  • Completion generates text completions based on the provided input.

    +

    Parameters

    • input: CompletionInput

      input represents the entire set of +possible input for the Completion call.

      +

    Returns Promise<[Completion, null | Error]>

    • A Promise with a Completion object and a Error object if the error is not null.
    -

    Example

    import * as pg from 'predictionguard';

    const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY);

    async function Completions() {
    const model = pg.Models.NeuralChat7B;
    const maxTokens = 1000;
    const temperature = 1.1;
    const prompt = 'Will I lose my hair';

    var [result, err] = await client.Completion(model, maxTokens, temperature, prompt);
    if (err != null) {
    console.log('ERROR:' + err.error);
    return;
    }

    console.log('RESULT:' + result.choices[0].text);
    }

    Completions(); +

    Example

    import * as pg from 'predictionguard';

    const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY);

    async function Completions() {
    const input = {
    model: pg.Models.NeuralChat7B,
    prompt: 'Will I lose my hair',
    maxTokens: 1000,
    temperature: 0.1,
    topP: 0.1,
    };

    var [result, err] = await client.Completion(input);
    if (err != null) {
    console.log('ERROR:' + err.error);
    return;
    }

    console.log('RESULT:' + result.choices[0].text);
    }

    Completions();
    -
  • Embedding generates chat completions based on a conversation history.

    Parameters

    • input: EmbeddingInput[]

      input represents a collection of text and images to vectorize.

    Returns Promise<[Embedding, null | Error]>

      @@ -91,7 +65,7 @@

    Example

    import * as pg from 'predictiongaurd';

    const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY);

    async function Embedding() {
    const image = new pg.ImageNetwork('https://pbs.twimg.com/profile_images/1571574401107169282/ylAgz_f5_400x400.jpg');

    const input = [
    {
    text: 'This is Bill Kennedy, a decent Go developer.',
    image: image,
    },
    ];

    var [result, err] = await client.Embedding(input);
    if (err != null) {
    console.log('ERROR:' + err.error);
    return;
    }

    for (const dt of result.data) {
    process.stdout.write(dt.embedding.toString());
    }
    }

    Embedding();
    -
  • Factuality checks the factuality of a given text compared to a reference.

    +
  • Factuality checks the factuality of a given text compared to a reference.

    Parameters

    • reference: string

      reference represents the reference text for comparison.

    • text: string

      text represents the text to be checked @@ -102,14 +76,14 @@

    Example

    import * as pg from 'predictionguard';

    const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY);

    async function Factuality() {
    const fact = `The President shall receive in full for his services during
    the term for which he shall have been elected compensation in the aggregate
    amount of 400,000 a year, to be paid monthly, and in addition an expense
    allowance of 50,000 to assist in defraying expenses relating to or resulting
    from the discharge of his official duties. Any unused amount of such expense
    allowance shall revert to the Treasury pursuant to section 1552 of title 31,
    United States Code. No amount of such expense allowance shall be included in
    the gross income of the President. He shall be entitled also to the use of
    the furniture and other effects belonging to the United States and kept in
    the Executive Residence at the White House.`;

    const text = `The president of the united states can take a salary of one
    million dollars`;

    var [result, err] = await client.Factuality(fact, text);
    if (err != null) {
    console.log('ERROR:' + err.error);
    return;
    }

    console.log('RESULT:' + JSON.stringify(result.checks[0]));
    }

    Factuality();
    -
  • HealthCheck validates the PG API Service is available.

    +
  • HealthCheck validates the PG API Service is available.

    Returns Promise<[string, null | Error]>

    • A Promise with a string and an Error object if the error is not null.

    Example

    import * as pg from 'predictionguard';

    const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY);

    async function HealthCheck() {
    var [result, err] = await client.HealthCheck();
    if (err != null) {
    console.log('ERROR:' + err.error);
    return;
    }

    console.log(result);
    }

    HealthCheck();
    -
  • Injection detects potential prompt injection attacks in a given prompt.

    Parameters

    • prompt: string

      prompt represents the text to detect injection attacks against.

    Returns Promise<[Injection, null | Error]>

      @@ -118,14 +92,14 @@

    Example

    import * as pg from 'predictionguard';

    const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY);

    async function Injection() {
    const prompt = `A short poem may be a stylistic choice or it may be that you
    have said what you intended to say in a more concise way.`;

    var [result, err] = await client.Injection(prompt);
    if (err != null) {
    console.log('ERROR:' + err.error);
    return;
    }

    console.log('RESULT:' + result.checks[0].probability);
    }

    Injection();
    -
  • RawDoGet performs a raw GET call.

    +
  • RawDoGet performs a raw GET call.

    Parameters

    • endpoint: string

      endpoint represents endpoint to call and does not include the transport or domain.

    Returns Promise<[any, null | Error]>

    • A Promise with a respose object and an error object if the error is not null.
    -
  • RawDoPost performs a raw POST call.

    +
  • RawDoPost performs a raw POST call.

    Parameters

    • endpoint: string

      endpoint represents endpoint to call and does not include the transport or domain.

    • body: any

      body represents an input object.

      @@ -133,7 +107,7 @@
    • A Promise with a respose object and an error object if the error is not null.
    -
  • RawDoSSEPost performs a raw POST call with SSE support.

    +
  • RawDoSSEPost performs a raw POST call with SSE support.

    Parameters

    • endpoint: string

      endpoint represents endpoint to call and does not include the transport or domain.

    • body: any

      body represents an input object.

      @@ -142,7 +116,7 @@
        • (event, err): void
        • Parameters

          • event: null | ServerSentEvent
          • err: null | Error

          Returns void

    Returns Promise<null | Error>

    • A Promise with an error object if the error is not null.
    -
  • ReplacePI replaces personal information such as names, SSNs, and +

  • ReplacePI replaces personal information such as names, SSNs, and emails in a given text.

    Parameters

    • replaceMethod: ReplaceMethods

      replaceMethod represents the method to use for replacing personal information.

      @@ -154,7 +128,7 @@

    Example

    import * as pg from 'predictionguard';

    const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY);

    async function ReplacePI() {
    const replaceMethod = pg.ReplaceMethods.Mask;
    const prompt = `My email is bill@ardanlabs.com and my number is 954-123-4567.`;

    var [result, err] = await client.ReplacePI(replaceMethod, prompt);
    if (err != null) {
    console.log('ERROR:' + err.error);
    return;
    }

    console.log('RESULT:' + result.checks[0].new_prompt);
    }

    ReplacePI();
    -
  • Toxicity checks the toxicity of a given text.

    Parameters

    • text: string

      text represents the text to be scored for toxicity.

    Returns Promise<[Toxicity, null | Error]>

      @@ -163,7 +137,7 @@

    Example

    import * as pg from 'predictionguard';

    const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY);

    async function Toxicity() {
    const text = `Every flight I have is late and I am very angry. I want to
    hurt someone.`;

    var [result, err] = await client.Toxicity(text);
    if (err != null) {
    console.log('ERROR:' + err.error);
    return;
    }

    console.log('RESULT:' + result.checks[0].score);
    }

    Toxicity();
    -
  • Translate converts text from one language to another.

    +
  • Translate converts text from one language to another.

    Parameters

    • text: string

      text represents the text to be translated.

    • sourceLang: Languages

      sourceLang represents the source language of the text.

      @@ -175,4 +149,4 @@

    Example

    import * as pg from 'predictionguard';

    const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY);

    async function Translate() {
    const sourceLang = pg.Languages.English;
    const targetLang = pg.Languages.Spanish;
    const text = `The rain in Spain stays mainly in the plain`;

    var [result, err] = await client.Translate(text, sourceLang, targetLang);
    if (err != null) {
    console.log('ERROR:' + err.error);
    return;
    }

    console.log('RESULT:' + result.best_translation);
    }

    Translate();
    -
\ No newline at end of file +
\ No newline at end of file diff --git a/docs/enums/Languages.html b/docs/enums/Languages.html index fe8a881..289dcb7 100644 --- a/docs/enums/Languages.html +++ b/docs/enums/Languages.html @@ -1,5 +1,5 @@ Languages | predictionguard

Enumeration Languages

Languages represents the set of languages that can be used.

-

Enumeration Members

Enumeration Members

Enumeration Members

Afrikanns: "afr"
Amharic: "amh"
Arabic: "ara"
Armenian: "hye"
Azerbaijan: "aze"
Basque: "eus"
Belarusian: "bel"
Bengali: "ben"
Bosnian: "bos"
Catalan: "cat"
Chechen: "che"
Cherokee: "chr"
Chinese: "zho"
Croatian: "hrv"
Czech: "ces"
Danish: "dan"
Dutch: "nld"
English: "eng"
Estonian: "est"
Fijian: "fij"
Filipino: "fil"
Finnish: "fin"
French: "fra"
Galician: "glg"
Georgian: "kat"
German: "deu"
Greek: "ell"
Gujarati: "guj"
Haitian: "hat"
Hebrew: "heb"
Hindi: "hin"
Hungarian: "hun"
Icelandic: "isl"
Indonesian: "ind"
Irish: "gle"
Italian: "ita"
Japanese: "jpn"
Kannada: "kan"
Kazakh: "kaz"
Korean: "kor"
Latvian: "lav"
Lithuanian: "lit"
Macedonian: "mkd"
Malay1: "msa"
Malay2: "zlm"
Malayalam: "mal"
Maltese: "mlt"
Marathi: "mar"
Nepali: "nep"
Norwegian: "nor"
Persian: "fas"
Polish: "pol"
Portuguese: "por"
Romanian: "ron"
Russian: "rus"
Samoan: "smo"
Serbian: "srp"
Slavonic: "chu"
Slovak: "slk"
Slovenian: "slv"
Spanish: "spa"
Swahili: "swh"
Swedish: "swe"
Tamil: "tam"
Telugu: "tel"
Thai: "tha"
Turkish: "tur"
Ukrainian: "ukr"
Urdu: "urd"
Vietnamese: "vie"
Welsh: "cym"
\ No newline at end of file +

Enumeration Members

Afrikanns: "afr"
Amharic: "amh"
Arabic: "ara"
Armenian: "hye"
Azerbaijan: "aze"
Basque: "eus"
Belarusian: "bel"
Bengali: "ben"
Bosnian: "bos"
Catalan: "cat"
Chechen: "che"
Cherokee: "chr"
Chinese: "zho"
Croatian: "hrv"
Czech: "ces"
Danish: "dan"
Dutch: "nld"
English: "eng"
Estonian: "est"
Fijian: "fij"
Filipino: "fil"
Finnish: "fin"
French: "fra"
Galician: "glg"
Georgian: "kat"
German: "deu"
Greek: "ell"
Gujarati: "guj"
Haitian: "hat"
Hebrew: "heb"
Hindi: "hin"
Hungarian: "hun"
Icelandic: "isl"
Indonesian: "ind"
Irish: "gle"
Italian: "ita"
Japanese: "jpn"
Kannada: "kan"
Kazakh: "kaz"
Korean: "kor"
Latvian: "lav"
Lithuanian: "lit"
Macedonian: "mkd"
Malay1: "msa"
Malay2: "zlm"
Malayalam: "mal"
Maltese: "mlt"
Marathi: "mar"
Nepali: "nep"
Norwegian: "nor"
Persian: "fas"
Polish: "pol"
Portuguese: "por"
Romanian: "ron"
Russian: "rus"
Samoan: "smo"
Serbian: "srp"
Slavonic: "chu"
Slovak: "slk"
Slovenian: "slv"
Spanish: "spa"
Swahili: "swh"
Swedish: "swe"
Tamil: "tam"
Telugu: "tel"
Thai: "tha"
Turkish: "tur"
Ukrainian: "ukr"
Urdu: "urd"
Vietnamese: "vie"
Welsh: "cym"
\ No newline at end of file diff --git a/docs/enums/PIIs.html b/docs/enums/PIIs.html new file mode 100644 index 0000000..d2656a0 --- /dev/null +++ b/docs/enums/PIIs.html @@ -0,0 +1,4 @@ +PIIs | predictionguard

Enumeration PIIs

PIIs represents the set of pii options that can be used.

+

Enumeration Members

Enumeration Members

Block: "block"
Replace: "replace"
\ No newline at end of file diff --git a/docs/enums/ReplaceMethods.html b/docs/enums/ReplaceMethods.html index 6d35da0..486318a 100644 --- a/docs/enums/ReplaceMethods.html +++ b/docs/enums/ReplaceMethods.html @@ -1,6 +1,6 @@ ReplaceMethods | predictionguard

Enumeration ReplaceMethods

ReplaceMethods represents the set of replace methods that can be used.

-

Enumeration Members

Enumeration Members

Enumeration Members

Category: "category"
Fake: "fake"
Mask: "mask"
Random: "random"
\ No newline at end of file +

Enumeration Members

Category: "category"
Fake: "fake"
Mask: "mask"
Random: "random"
\ No newline at end of file diff --git a/docs/interfaces/Base64Encoder.html b/docs/interfaces/Base64Encoder.html index 4ac5114..aeef8cd 100644 --- a/docs/interfaces/Base64Encoder.html +++ b/docs/interfaces/Base64Encoder.html @@ -1,4 +1,4 @@ Base64Encoder | predictionguard

Interface Base64Encoder

Base64Encoder defines a method that can read a data source and returns a base64 encoded string.

-
interface Base64Encoder {
    EncodeBase64(): Promise<[string, null | Error]>;
}

Methods

Methods

\ No newline at end of file +
interface Base64Encoder {
    EncodeBase64(): Promise<[string, null | Error]>;
}

Methods

Methods

\ No newline at end of file diff --git a/docs/interfaces/Chat.html b/docs/interfaces/Chat.html index 5236ce7..fa66662 100644 --- a/docs/interfaces/Chat.html +++ b/docs/interfaces/Chat.html @@ -1,15 +1,15 @@ Chat | predictionguard

Interface Chat

Chat represents an object that contains the result for the chat call.

-
interface Chat {
    choices: ChatChoice[];
    created: number;
    id: string;
    model: Models;
    object: string;
    createdDate(): Date;
}

Properties

interface Chat {
    choices: ChatChoice[];
    created: number;
    id: string;
    model: Models;
    object: string;
    createdDate(): Date;
}

Properties

choices: ChatChoice[]

choices represents the collection of choices to choose from.

-
created: number

created represents the unix timestamp for when the request was +

created: number

created represents the unix timestamp for when the request was received.

-
id: string

id represents a unique identifier for the result.

-
model: Models

model represents the model used for generating the result.

-
object: string

object represent the type of the result document.

-

Methods

  • createdDate converts the created unix timestamp into a JS Date.

    -

    Returns Date

\ No newline at end of file +
id: string

id represents a unique identifier for the result.

+
model: Models

model represents the model used for generating the result.

+
object: string

object represent the type of the result document.

+

Methods

  • createdDate converts the created unix timestamp into a JS Date.

    +

    Returns Date

\ No newline at end of file diff --git a/docs/interfaces/ChatChoice.html b/docs/interfaces/ChatChoice.html index fda66a9..1ad0982 100644 --- a/docs/interfaces/ChatChoice.html +++ b/docs/interfaces/ChatChoice.html @@ -1,10 +1,10 @@ ChatChoice | predictionguard

Interface ChatChoice

ChatChoice represents an object that contains a result choice.

-
interface ChatChoice {
    index: number;
    message: ChatMessage;
    status: string;
}

Properties

interface ChatChoice {
    index: number;
    message: ChatMessage;
    status: string;
}

Properties

Properties

index: number

index represents the index position in the collection for this choice.

-
message: ChatMessage

message represents the message response for this choice.

-
status: string

status represents if the response for this choice was successful +

message: ChatMessage

message represents the message response for this choice.

+
status: string

status represents if the response for this choice was successful or not.

-
\ No newline at end of file +
\ No newline at end of file diff --git a/docs/interfaces/ChatInput.html b/docs/interfaces/ChatInput.html index 92278df..1d7d8d5 100644 --- a/docs/interfaces/ChatInput.html +++ b/docs/interfaces/ChatInput.html @@ -1,4 +1,14 @@ -ChatInput | predictionguard

Interface ChatInput

ChatInput represents a role and content related to a chat.

-
interface ChatInput {
    content: string;
    role: Roles;
}

Properties

Properties

content: string
role: Roles
\ No newline at end of file +ChatInput | predictionguard

Interface ChatInput

ChatInput represents the full potential input options for chat.

+
interface ChatInput {
    maxTokens: number;
    messages: ChatInputMessage[];
    model: Models;
    options: ChatInputOptions;
    temperature: number;
    topP: number;
}

Properties

maxTokens: number

maxTokens represents the max number of tokens to return.

+
messages: ChatInputMessage[]

messages represents the set of messages to process.

+
model: Models

model represents the model to use.

+

options represents a set of optional parameters.

+
temperature: number

temperature represents the randomness in GPT's output.

+
topP: number

topP represents the diversity of the generated text.

+
\ No newline at end of file diff --git a/docs/interfaces/ChatInputMessage.html b/docs/interfaces/ChatInputMessage.html new file mode 100644 index 0000000..ce7ff1d --- /dev/null +++ b/docs/interfaces/ChatInputMessage.html @@ -0,0 +1,6 @@ +ChatInputMessage | predictionguard

Interface ChatInputMessage

ChatInputMessage represents a role and content related to a chat.

+
interface ChatInputMessage {
    content: string;
    role: Roles;
}

Properties

Properties

content: string

content represents the content of the message.

+
role: Roles

role represents the role of the sender (user or assistant).

+
\ No newline at end of file diff --git a/docs/interfaces/ChatInputOptions.html b/docs/interfaces/ChatInputOptions.html new file mode 100644 index 0000000..75f81c5 --- /dev/null +++ b/docs/interfaces/ChatInputOptions.html @@ -0,0 +1,14 @@ +ChatInputOptions | predictionguard

Interface ChatInputOptions

ChatInputOptions represents options for post and preprocessing the input.

+
interface ChatInputOptions {
    blockPromptInjection: boolean;
    factuality: boolean;
    pii: PIIs;
    piiReplaceMethod: ReplaceMethods;
    toxicity: boolean;
}

Properties

blockPromptInjection: boolean

blockPromptInjection represents the choice to run the +blockPromptInjection algorithm.

+
factuality: boolean

factuality represents the choice to run the factuality algorithm.

+
pii: PIIs

pii represents the choice to run the repalce personal information + algorithm and which one.

+
piiReplaceMethod: ReplaceMethods

piiReplaceMethod represents the method to use for PII.

+
toxicity: boolean

toxicity represents the choice to run the toxicity algorithm.

+
\ No newline at end of file diff --git a/docs/interfaces/ChatMessage.html b/docs/interfaces/ChatMessage.html index 5b02a7f..8df9420 100644 --- a/docs/interfaces/ChatMessage.html +++ b/docs/interfaces/ChatMessage.html @@ -1,9 +1,9 @@ ChatMessage | predictionguard

Interface ChatMessage

ChatMessage represents an object that contains the content and a role. It can be used for input and returned as part of the response.

-
interface ChatMessage {
    content: string;
    output: string;
    role: Roles;
}

Properties

interface ChatMessage {
    content: string;
    output: string;
    role: Roles;
}

Properties

Properties

content: string

content represents the content of the message.

-
output: string

output represents the output for this message.

-
role: Roles

role represents the role of the sender (user or assistant).

-
\ No newline at end of file +
output: string

output represents the output for this message.

+
role: Roles

role represents the role of the sender (user or assistant).

+
\ No newline at end of file diff --git a/docs/interfaces/ChatSSE.html b/docs/interfaces/ChatSSE.html index a348187..5277bdf 100644 --- a/docs/interfaces/ChatSSE.html +++ b/docs/interfaces/ChatSSE.html @@ -1,16 +1,16 @@ ChatSSE | predictionguard

Interface ChatSSE

ChatSSE represents an object that contains the result for the chatSSE call.

-
interface ChatSSE {
    choices: ChatSSEChoice[];
    created: number;
    id: string;
    model: Models;
    object: string;
    createdDate(): Date;
}

Properties

interface ChatSSE {
    choices: ChatSSEChoice[];
    created: number;
    id: string;
    model: Models;
    object: string;
    createdDate(): Date;
}

Properties

choices: ChatSSEChoice[]

choices represents the collection of choices to choose from.

-
created: number

created represents the unix timestamp for when the request was +

created: number

created represents the unix timestamp for when the request was received.

-
id: string

id represents a unique identifier for the result.

-
model: Models

model represents the model used for generating the result.

-
object: string

object represent the type of the result document.

-

Methods

  • createdDate converts the created unix timestamp into a JS Date.

    -

    Returns Date

\ No newline at end of file +
id: string

id represents a unique identifier for the result.

+
model: Models

model represents the model used for generating the result.

+
object: string

object represent the type of the result document.

+

Methods

  • createdDate converts the created unix timestamp into a JS Date.

    +

    Returns Date

\ No newline at end of file diff --git a/docs/interfaces/ChatSSEChoice.html b/docs/interfaces/ChatSSEChoice.html index dd9e46b..bf3ed44 100644 --- a/docs/interfaces/ChatSSEChoice.html +++ b/docs/interfaces/ChatSSEChoice.html @@ -1,15 +1,15 @@ ChatSSEChoice | predictionguard

Interface ChatSSEChoice

ChatSSEChoice represents an object that contains a result choice.

-
interface ChatSSEChoice {
    delta: ChatSSEDelta;
    finish_reason: string;
    generated_text: string;
    index: number;
    logprobs: number;
}

Properties

interface ChatSSEChoice {
    delta: ChatSSEDelta;
    finish_reason: string;
    generated_text: string;
    index: number;
    logprobs: number;
}

Properties

delta represents the partial content for this choice.

-
finish_reason: string

finish_reason represents the reason the response has finished +

finish_reason: string

finish_reason represents the reason the response has finished which is provided when this is the last choice.

-
generated_text: string

generated_text represents the final completed chat response which +

generated_text: string

generated_text represents the final completed chat response which is provided when this is the last choice.

-
index: number

index represents the index position in the collection for +

index: number

index represents the index position in the collection for this choice.

-
logprobs: number

logprobs represents the log probabilty of accuracy for this choice.

-
\ No newline at end of file +
logprobs: number

logprobs represents the log probabilty of accuracy for this choice.

+
\ No newline at end of file diff --git a/docs/interfaces/ChatSSEDelta.html b/docs/interfaces/ChatSSEDelta.html index 9331749..19a5e92 100644 --- a/docs/interfaces/ChatSSEDelta.html +++ b/docs/interfaces/ChatSSEDelta.html @@ -1,4 +1,4 @@ ChatSSEDelta | predictionguard

Interface ChatSSEDelta

ChatSSEDelta represents an object that contains the content.

-
interface ChatSSEDelta {
    content: string;
}

Properties

interface ChatSSEDelta {
    content: string;
}

Properties

Properties

content: string

content represents the partial content response for a choice.

-
\ No newline at end of file +
\ No newline at end of file diff --git a/docs/interfaces/ChatSSEInput.html b/docs/interfaces/ChatSSEInput.html new file mode 100644 index 0000000..431ea99 --- /dev/null +++ b/docs/interfaces/ChatSSEInput.html @@ -0,0 +1,14 @@ +ChatSSEInput | predictionguard

Interface ChatSSEInput

ChatSSEInput represents the full potential input options for SSE chat.

+
interface ChatSSEInput {
    maxTokens: number;
    messages: ChatInputMessage[];
    model: Models;
    onMessage: ((event, err) => void);
    temperature: number;
    topP: number;
}

Properties

maxTokens: number

maxTokens represents the max number of tokens to return.

+
messages: ChatInputMessage[]

messages represents the set of messages to process.

+
model: Models

model represents the model to use.

+
onMessage: ((event, err) => void)

onMessage represents a function that will receive the messages.

+

Type declaration

    • (event, err): void
    • Parameters

      Returns void

temperature: number

temperature represents the randomness in GPT's output.

+
topP: number

topP represents the diversity of the generated text.

+
\ No newline at end of file diff --git a/docs/interfaces/ChatVision.html b/docs/interfaces/ChatVision.html index 5bb9c9f..c74dc15 100644 --- a/docs/interfaces/ChatVision.html +++ b/docs/interfaces/ChatVision.html @@ -1,15 +1,15 @@ ChatVision | predictionguard

Interface ChatVision

ChatVision represents the result for the vision call.

-
interface ChatVision {
    choices: ChatVisionChoice[];
    created: number;
    id: string;
    model: Models;
    object: string;
    createdDate(): Date;
}

Properties

interface ChatVision {
    choices: ChatVisionChoice[];
    created: number;
    id: string;
    model: Models;
    object: string;
    createdDate(): Date;
}

Properties

choices: ChatVisionChoice[]

choices represents the collection of choices to choose from.

-
created: number

created represents the unix timestamp for when the request was +

created: number

created represents the unix timestamp for when the request was received.

-
id: string

id represents a unique identifier for the result.

-
model: Models

model represents the model used for generating the result.

-
object: string

object represent the type of the result document.

-

Methods

  • createdDate converts the created unix timestamp into a JS Date.

    -

    Returns Date

\ No newline at end of file +
id: string

id represents a unique identifier for the result.

+
model: Models

model represents the model used for generating the result.

+
object: string

object represent the type of the result document.

+

Methods

  • createdDate converts the created unix timestamp into a JS Date.

    +

    Returns Date

\ No newline at end of file diff --git a/docs/interfaces/ChatVisionChoice.html b/docs/interfaces/ChatVisionChoice.html index 2d9954c..696a680 100644 --- a/docs/interfaces/ChatVisionChoice.html +++ b/docs/interfaces/ChatVisionChoice.html @@ -1,10 +1,10 @@ ChatVisionChoice | predictionguard

Interface ChatVisionChoice

ChatVisionChoice represents a choice for the vision call.

-
interface ChatVisionChoice {
    index: number;
    message: ChatVisionMessage;
    status: string;
}

Properties

interface ChatVisionChoice {
    index: number;
    message: ChatVisionMessage;
    status: string;
}

Properties

Properties

index: number

index represents the index position in the collection for this choice.

-

message represents a response for this choice.

-
status: string

status represents if the response for this choice was successful +

message represents a response for this choice.

+
status: string

status represents if the response for this choice was successful or not.

-
\ No newline at end of file +
\ No newline at end of file diff --git a/docs/interfaces/ChatVisionInput.html b/docs/interfaces/ChatVisionInput.html new file mode 100644 index 0000000..afdc6e1 --- /dev/null +++ b/docs/interfaces/ChatVisionInput.html @@ -0,0 +1,14 @@ +ChatVisionInput | predictionguard

Interface ChatVisionInput

ChatVisionInput represents the full potential input options for Vision chat.

+
interface ChatVisionInput {
    image: Base64Encoder;
    maxTokens: number;
    question: string;
    role: Roles;
    temperature: number;
    topP: number;
}

Properties

image represents an object that knows how to retrieve an image.

+
maxTokens: number

maxTokens represents the max number of tokens to return.

+
question: string

question represents the question about the image.

+
role: Roles

role represents the role of the sender (user or assistant).

+
temperature: number

temperature represents the randomness in GPT's output.

+
topP: number

topP represents the diversity of the generated text.

+
\ No newline at end of file diff --git a/docs/interfaces/ChatVisionMessage.html b/docs/interfaces/ChatVisionMessage.html index 4f94508..b0ee79e 100644 --- a/docs/interfaces/ChatVisionMessage.html +++ b/docs/interfaces/ChatVisionMessage.html @@ -1,8 +1,8 @@ ChatVisionMessage | predictionguard

Interface ChatVisionMessage

ChatVisionMessage represents content for the vision call.

-
interface ChatVisionMessage {
    content: string;
    output: string;
    role: Roles;
}

Properties

interface ChatVisionMessage {
    content: string;
    output: string;
    role: Roles;
}

Properties

Properties

content: string

content represents the response for this message.

-
output: string

output represents the output for this message.

-
role: Roles

role represents the role of the sender (user or assistant).

-
\ No newline at end of file +
output: string

output represents the output for this message.

+
role: Roles

role represents the role of the sender (user or assistant).

+
\ No newline at end of file diff --git a/docs/interfaces/Completion.html b/docs/interfaces/Completion.html index a00fd81..e41d6af 100644 --- a/docs/interfaces/Completion.html +++ b/docs/interfaces/Completion.html @@ -1,14 +1,14 @@ Completion | predictionguard

Interface Completion

Completion represents an object that contains the result for the completion call.

-
interface Completion {
    choices: CompletionChoice[];
    created: number;
    id: string;
    object: string;
    createdDate(): Date;
}

Properties

interface Completion {
    choices: CompletionChoice[];
    created: number;
    id: string;
    object: string;
    createdDate(): Date;
}

Properties

Methods

Properties

choices: CompletionChoice[]

choices represents the collection of choices to choose from.

-
created: number

created represents the unix timestamp for when the request was +

created: number

created represents the unix timestamp for when the request was received.

-
id: string

id represents a unique identifier for the result.

-
object: string

object represent the type of the result document.

-

Methods

  • createdDate converts the created unix timestamp into a JS Date.

    -

    Returns Date

\ No newline at end of file +
id: string

id represents a unique identifier for the result.

+
object: string

object represent the type of the result document.

+

Methods

  • createdDate converts the created unix timestamp into a JS Date.

    +

    Returns Date

\ No newline at end of file diff --git a/docs/interfaces/CompletionChoice.html b/docs/interfaces/CompletionChoice.html index 2515e7b..88f0a82 100644 --- a/docs/interfaces/CompletionChoice.html +++ b/docs/interfaces/CompletionChoice.html @@ -1,13 +1,13 @@ CompletionChoice | predictionguard

Interface CompletionChoice

Choice represents an object that contains a result choice.

-
interface CompletionChoice {
    index: number;
    model: Models;
    status: string;
    text: string;
}

Properties

interface CompletionChoice {
    index: number;
    model: Models;
    status: string;
    text: string;
}

Properties

Properties

index: number

index represents the index position in the collection for this choice.

-
model: Models

model represents the model used for generating the result for +

model: Models

model represents the model used for generating the result for this choice.

-
status: string

status represents if the response for this choice was successful +

status: string

status represents if the response for this choice was successful or not.

-
text: string

text represents the generated text for this choice.

-
\ No newline at end of file +
text: string

text represents the generated text for this choice.

+
\ No newline at end of file diff --git a/docs/interfaces/CompletionInput.html b/docs/interfaces/CompletionInput.html new file mode 100644 index 0000000..bf01d29 --- /dev/null +++ b/docs/interfaces/CompletionInput.html @@ -0,0 +1,12 @@ +CompletionInput | predictionguard

Interface CompletionInput

CompletionInput represents the full potential input options for completion.

+
interface CompletionInput {
    maxTokens: number;
    model: Models;
    prompt: string;
    temperature: number;
    topP: number;
}

Properties

maxTokens: number

maxTokens represents the max number of tokens to return.

+
model: Models

model represents the model to use.

+
prompt: string

prompt represents the prompt to process.

+
temperature: number

temperature represents the randomness in GPT's output.

+
topP: number

topP represents the diversity of the generated text.

+
\ No newline at end of file diff --git a/docs/interfaces/Embedding.html b/docs/interfaces/Embedding.html index 171be34..e25d4f1 100644 --- a/docs/interfaces/Embedding.html +++ b/docs/interfaces/Embedding.html @@ -1,5 +1,5 @@ Embedding | predictionguard

Interface Embedding

Embedding represents the result for the embedding call.

-
interface Embedding {
    created: number;
    data: EmbeddingData[];
    id: string;
    model: Models;
    object: string;
    createdDate(): Date;
}

Properties

interface Embedding {
    created: number;
    data: EmbeddingData[];
    id: string;
    model: Models;
    object: string;
    createdDate(): Date;
}

Properties

created data id model @@ -7,9 +7,9 @@

Methods

Properties

created: number

created represents the unix timestamp for when the request was received.

-

EmbeddingData represents the collection of vector points.

-
id: string

id represents a unique identifier for the result.

-
model: Models

model represents the model used for generating the result.

-
object: string

object represent the type of the result document.

-

Methods

  • createdDate converts the created unix timestamp into a JS Date.

    -

    Returns Date

\ No newline at end of file +

EmbeddingData represents the collection of vector points.

+
id: string

id represents a unique identifier for the result.

+
model: Models

model represents the model used for generating the result.

+
object: string

object represent the type of the result document.

+

Methods

  • createdDate converts the created unix timestamp into a JS Date.

    +

    Returns Date

\ No newline at end of file diff --git a/docs/interfaces/EmbeddingData.html b/docs/interfaces/EmbeddingData.html index f9ecf90..64280d6 100644 --- a/docs/interfaces/EmbeddingData.html +++ b/docs/interfaces/EmbeddingData.html @@ -1,12 +1,12 @@ EmbeddingData | predictionguard

Interface EmbeddingData

EmbeddingData represents the vector data points.

-
interface EmbeddingData {
    embedding: number[];
    index: number;
    model: Models;
    status: string;
}

Properties

interface EmbeddingData {
    embedding: number[];
    index: number;
    model: Models;
    status: string;
}

Properties

embedding: number[]
index: number

index represents the index position in the collection for +

Properties

embedding: number[]
index: number

index represents the index position in the collection for this choice.

-
model: Models

model represents the model used for generating the result for +

model: Models

model represents the model used for generating the result for this choice.

-
status: string

status represents if the response for this choice was successful +

status: string

status represents if the response for this choice was successful or not.

-
\ No newline at end of file +
\ No newline at end of file diff --git a/docs/interfaces/EmbeddingInput.html b/docs/interfaces/EmbeddingInput.html index c6c49de..96eda25 100644 --- a/docs/interfaces/EmbeddingInput.html +++ b/docs/interfaces/EmbeddingInput.html @@ -1,6 +1,6 @@ EmbeddingInput | predictionguard

Interface EmbeddingInput

EmbeddingInput represents the input to generate embeddings.

-
interface EmbeddingInput {
    image: Base64Encoder;
    text: string;
}

Properties

interface EmbeddingInput {
    image: Base64Encoder;
    text: string;
}

Properties

Properties

image represents an image to vectorize.

-
text: string

text represents text to vectorize.

-
\ No newline at end of file +
text: string

text represents text to vectorize.

+
\ No newline at end of file diff --git a/docs/interfaces/Factuality.html b/docs/interfaces/Factuality.html index 433ee68..72e575e 100644 --- a/docs/interfaces/Factuality.html +++ b/docs/interfaces/Factuality.html @@ -1,14 +1,14 @@ Factuality | predictionguard

Interface Factuality

Factuality represents an object that contains the result for the factuality call.

-
interface Factuality {
    checks: FactualityCheck[];
    created: number;
    id: string;
    object: string;
    createdDate(): Date;
}

Properties

interface Factuality {
    checks: FactualityCheck[];
    created: number;
    id: string;
    object: string;
    createdDate(): Date;
}

Properties

Methods

Properties

checks: FactualityCheck[]

checks represents the collection of checks to choose from.

-
created: number

created represents the unix timestamp for when the request was +

created: number

created represents the unix timestamp for when the request was received.

-
id: string

id represents a unique identifier for the result.

-
object: string

object represent the type of the result document.

-

Methods

  • createdDate converts the created unix timestamp into a JS Date.

    -

    Returns Date

\ No newline at end of file +
id: string

id represents a unique identifier for the result.

+
object: string

object represent the type of the result document.

+

Methods

  • createdDate converts the created unix timestamp into a JS Date.

    +

    Returns Date

\ No newline at end of file diff --git a/docs/interfaces/FactualityCheck.html b/docs/interfaces/FactualityCheck.html index 97d80fa..4b9d785 100644 --- a/docs/interfaces/FactualityCheck.html +++ b/docs/interfaces/FactualityCheck.html @@ -1,9 +1,9 @@ FactualityCheck | predictionguard

Interface FactualityCheck

FactualityCheck represents an object that contains a check choice.

-
interface FactualityCheck {
    index: number;
    score: number;
    status: string;
}

Properties

interface FactualityCheck {
    index: number;
    score: number;
    status: string;
}

Properties

Properties

index: number

index represents the index position in the collection for this checks.

-
score: number

score represents the score for this check.

-
status: string

status represents the status for this check.

-
\ No newline at end of file +
score: number

score represents the score for this check.

+
status: string

status represents the status for this check.

+
\ No newline at end of file diff --git a/docs/interfaces/Injection.html b/docs/interfaces/Injection.html index 4c4c6ee..ad5e28b 100644 --- a/docs/interfaces/Injection.html +++ b/docs/interfaces/Injection.html @@ -1,14 +1,14 @@ Injection | predictionguard

Interface Injection

Injection represents an object that contains the result for the injection call.

-
interface Injection {
    checks: InjectionCheck[];
    created: number;
    id: string;
    object: string;
    createdDate(): Date;
}

Properties

interface Injection {
    checks: InjectionCheck[];
    created: number;
    id: string;
    object: string;
    createdDate(): Date;
}

Properties

Methods

Properties

checks: InjectionCheck[]

checks represents the collection of checks to choose from.

-
created: number

created represents the unix timestamp for when the result was +

created: number

created represents the unix timestamp for when the result was received.

-
id: string

id represents a unique identifier for the result.

-
object: string

object represent the type of the result document.

-

Methods

  • createdDate converts the created unix timestamp into a JS Date.

    -

    Returns Date

\ No newline at end of file +
id: string

id represents a unique identifier for the result.

+
object: string

object represent the type of the result document.

+

Methods

  • createdDate converts the created unix timestamp into a JS Date.

    +

    Returns Date

\ No newline at end of file diff --git a/docs/interfaces/InjectionCheck.html b/docs/interfaces/InjectionCheck.html index 87d4b8a..476e6b3 100644 --- a/docs/interfaces/InjectionCheck.html +++ b/docs/interfaces/InjectionCheck.html @@ -1,10 +1,10 @@ InjectionCheck | predictionguard

Interface InjectionCheck

InjectionCheck represents an object that contains a check choice.

-
interface InjectionCheck {
    index: number;
    probability: number;
    status: string;
}

Properties

interface InjectionCheck {
    index: number;
    probability: number;
    status: string;
}

Properties

index: number

index represents the index position in the collection for this checks.

-
probability: number

probability represents the probability of a potential injection +

probability: number

probability represents the probability of a potential injection attack.

-
status: string

status represents the status for this check.

-
\ No newline at end of file +
status: string

status represents the status for this check.

+
\ No newline at end of file diff --git a/docs/interfaces/ReplacePI.html b/docs/interfaces/ReplacePI.html index 7d7fe25..c090189 100644 --- a/docs/interfaces/ReplacePI.html +++ b/docs/interfaces/ReplacePI.html @@ -1,14 +1,14 @@ ReplacePI | predictionguard

Interface ReplacePI

ReplacePI represents an object that contains the result for the replacepi call.

-
interface ReplacePI {
    checks: ReplacePICheck[];
    created: number;
    id: string;
    object: string;
    createdDate(): Date;
}

Properties

interface ReplacePI {
    checks: ReplacePICheck[];
    created: number;
    id: string;
    object: string;
    createdDate(): Date;
}

Properties

Methods

Properties

checks: ReplacePICheck[]

checks represents the collection of checks to choose from.

-
created: number

created represents the unix timestamp for when the request was +

created: number

created represents the unix timestamp for when the request was received.

-
id: string

id represents a unique identifier for the result.

-
object: string

object represent the type of the result document.

-

Methods

  • createdDate converts the created unix timestamp into a JS Date.

    -

    Returns Date

\ No newline at end of file +
id: string

id represents a unique identifier for the result.

+
object: string

object represent the type of the result document.

+

Methods

  • createdDate converts the created unix timestamp into a JS Date.

    +

    Returns Date

\ No newline at end of file diff --git a/docs/interfaces/ReplacePICheck.html b/docs/interfaces/ReplacePICheck.html index ecbb037..f05ba95 100644 --- a/docs/interfaces/ReplacePICheck.html +++ b/docs/interfaces/ReplacePICheck.html @@ -1,9 +1,9 @@ ReplacePICheck | predictionguard

Interface ReplacePICheck

ReplacePICheck represents an object that contains a check choice.

-
interface ReplacePICheck {
    index: number;
    new_prompt: string;
    status: string;
}

Properties

interface ReplacePICheck {
    index: number;
    new_prompt: string;
    status: string;
}

Properties

index: number

index represents the index position in the collection for this checks.

-
new_prompt: string

new_prompt represents the text with replaced personal information.

-
status: string

status represents the status for this check.

-
\ No newline at end of file +
new_prompt: string

new_prompt represents the text with replaced personal information.

+
status: string

status represents the status for this check.

+
\ No newline at end of file diff --git a/docs/interfaces/Toxicity.html b/docs/interfaces/Toxicity.html index 10eb549..4cd1dca 100644 --- a/docs/interfaces/Toxicity.html +++ b/docs/interfaces/Toxicity.html @@ -1,14 +1,14 @@ Toxicity | predictionguard

Interface Toxicity

Toxicity represents an object that contains the result for the toxicity call.

-
interface Toxicity {
    checks: ToxicityCheck[];
    created: number;
    id: string;
    object: string;
    createdDate(): Date;
}

Properties

interface Toxicity {
    checks: ToxicityCheck[];
    created: number;
    id: string;
    object: string;
    createdDate(): Date;
}

Properties

Methods

Properties

checks: ToxicityCheck[]

checks represents the collection of checks to choose from.

-
created: number

created represents the unix timestamp for when the request was +

created: number

created represents the unix timestamp for when the request was received.

-
id: string

id represents a unique identifier for the result.

-
object: string

object represent the type of the result document.

-

Methods

  • createdDate converts the created unix timestamp into a JS Date.

    -

    Returns Date

\ No newline at end of file +
id: string

id represents a unique identifier for the result.

+
object: string

object represent the type of the result document.

+

Methods

  • createdDate converts the created unix timestamp into a JS Date.

    +

    Returns Date

\ No newline at end of file diff --git a/docs/interfaces/ToxicityCheck.html b/docs/interfaces/ToxicityCheck.html index 0c1d838..42c7cce 100644 --- a/docs/interfaces/ToxicityCheck.html +++ b/docs/interfaces/ToxicityCheck.html @@ -1,9 +1,9 @@ ToxicityCheck | predictionguard

Interface ToxicityCheck

ToxicityCheck represents an object that contains a check choice.

-
interface ToxicityCheck {
    index: number;
    score: number;
    status: string;
}

Properties

interface ToxicityCheck {
    index: number;
    score: number;
    status: string;
}

Properties

Properties

index: number

index represents the index position in the collection for this checks.

-
score: number

score represents the score for the provided text.

-
status: string

status represents the status for this check.

-
\ No newline at end of file +
score: number

score represents the score for the provided text.

+
status: string

status represents the status for this check.

+
\ No newline at end of file diff --git a/docs/interfaces/Translate.html b/docs/interfaces/Translate.html index 72a7cbd..da5e109 100644 --- a/docs/interfaces/Translate.html +++ b/docs/interfaces/Translate.html @@ -1,6 +1,6 @@ Translate | predictionguard

Interface Translate

Translate represents an object that contains the result for the translate call.

-
interface Translate {
    best_score: number;
    best_translation: string;
    best_translation_model: string;
    created: number;
    id: string;
    object: string;
    translations: Translation[];
    createdDate(): Date;
}

Properties

interface Translate {
    best_score: number;
    best_translation: string;
    best_translation_model: string;
    created: number;
    id: string;
    object: string;
    translations: Translation[];
    createdDate(): Date;
}

Properties

best_score: number

best_score represents the best score for the best translation.

-
best_translation: string

best_translation represents the best translation of the input text.

-
best_translation_model: string

best_translation_model represents the model used for the best +

best_translation: string

best_translation represents the best translation of the input text.

+
best_translation_model: string

best_translation_model represents the model used for the best translation.

-
created: number

created represents the unix timestamp for when the request was +

created: number

created represents the unix timestamp for when the request was received.

-
id: string

id represents a unique identifier for the result.

-
object: string

object represent the type of the result document.

-
translations: Translation[]

translations represents the collection of translations to choose from.

-

Methods

  • createdDate converts the created unix timestamp into a JS Date.

    -

    Returns Date

\ No newline at end of file +
id: string

id represents a unique identifier for the result.

+
object: string

object represent the type of the result document.

+
translations: Translation[]

translations represents the collection of translations to choose from.

+

Methods

  • createdDate converts the created unix timestamp into a JS Date.

    +

    Returns Date

\ No newline at end of file diff --git a/docs/interfaces/Translation.html b/docs/interfaces/Translation.html index 571135f..7a23d5a 100644 --- a/docs/interfaces/Translation.html +++ b/docs/interfaces/Translation.html @@ -1,10 +1,10 @@ Translation | predictionguard

Interface Translation

Translation represents an object that contains a translation choice.

-
interface Translation {
    model: string;
    score: number;
    status: string;
    translation: string;
}

Properties

interface Translation {
    model: string;
    score: number;
    status: string;
    translation: string;
}

Properties

model: string

model represents the model that was used for this translation.

-
score: number

score represents the quality score for this translation.

-
status: string

status represents the status of using the model for this translation.

-
translation: string

translation represents the translation.

-
\ No newline at end of file +
score: number

score represents the quality score for this translation.

+
status: string

status represents the status of using the model for this translation.

+
translation: string

translation represents the translation.

+
\ No newline at end of file diff --git a/docs/modules.html b/docs/modules.html index f28e84a..04cfd96 100644 --- a/docs/modules.html +++ b/docs/modules.html @@ -1,5 +1,6 @@ predictionguard

predictionguard

Index

Enumerations

Classes

Client @@ -9,15 +10,20 @@ Chat ChatChoice ChatInput +ChatInputMessage +ChatInputOptions ChatMessage ChatSSE ChatSSEChoice ChatSSEDelta +ChatSSEInput ChatVision ChatVisionChoice +ChatVisionInput ChatVisionMessage Completion CompletionChoice +CompletionInput Embedding EmbeddingData EmbeddingInput diff --git a/examples/chat.js b/examples/chat.js index 5856817..200ab00 100644 --- a/examples/chat.js +++ b/examples/chat.js @@ -3,17 +3,26 @@ import * as pg from '../dist/index.js'; const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY); async function Chat() { - const model = pg.Models.NeuralChat7B; - const input = [ - { - role: pg.Roles.User, - content: 'How do you feel about the world in general', + const input = { + model: pg.Models.NeuralChat7B, + messages: [ + { + role: pg.Roles.User, + content: 'How do you feel about the world in general', + }, + ], + maxTokens: 1000, + temperature: 0.1, + topP: 0.1, + options: { + factuality: true, + toxicity: true, + pii: pg.PIIs.Replace, + piiReplaceMethod: pg.ReplaceMethods.Random, }, - ]; - const maxTokens = 1000; - const temperature = 1.1; + }; - var [result, err] = await client.Chat(model, input, maxTokens, temperature); + var [result, err] = await client.Chat(input); if (err != null) { console.log('ERROR:' + err.error); return; diff --git a/examples/chat_sse.js b/examples/chat_sse.js index f4d9d96..e394ef5 100644 --- a/examples/chat_sse.js +++ b/examples/chat_sse.js @@ -3,32 +3,34 @@ import * as pg from '../dist/index.js'; const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY); async function ChatSSE() { - const model = pg.Models.NeuralChat7B; - const input = [ - { - role: pg.Roles.User, - content: 'How do you feel about the world in general', - }, - ]; - const maxTokens = 1000; - const temperature = 1.1; - - const onMessage = function (event, err) { - if (err != null) { - if (err.error == 'EOF') { - return; + const input = { + model: pg.Models.NeuralChat7B, + messages: [ + { + role: pg.Roles.User, + content: 'How do you feel about the world in general', + }, + ], + maxTokens: 1000, + temperature: 0.1, + topP: 0.1, + onMessage: function (event, err) { + if (err != null) { + if (err.error == 'EOF') { + return; + } + console.log(err); } - console.log(err); - } - for (const choice of event.choices) { - if (choice.delta.hasOwnProperty('content')) { - process.stdout.write(choice.delta.content); + for (const choice of event.choices) { + if (choice.delta.hasOwnProperty('content')) { + process.stdout.write(choice.delta.content); + } } - } + }, }; - var err = await client.ChatSSE(model, input, maxTokens, temperature, onMessage); + var err = await client.ChatSSE(input); if (err != null) { console.log('ERROR:' + err.error); return; diff --git a/examples/chat_vision.js b/examples/chat_vision.js index 8518908..d901d6c 100644 --- a/examples/chat_vision.js +++ b/examples/chat_vision.js @@ -1,18 +1,20 @@ import * as pg from '../dist/index.js'; -const client = new pg.Client('https://staging.predictionguard.com', process.env.PGKEYSTAGE); +const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY); async function ChatVision() { - const role = pg.Roles.User; - const question = 'is there a deer in this picture'; - const image = new pg.ImageNetwork('https://pbs.twimg.com/profile_images/1571574401107169282/ylAgz_f5_400x400.jpg'); - // const file = new pg.ImageFile('/Users/bill/Documents/images/pGwOq5tz_400x400.jpg'); - const maxTokens = 300; - const temperature = 0.1; + const input = { + role: pg.Roles.User, + question: 'is there a deer in this picture', + image: image, + maxTokens: 1000, + temperature: 0.1, + topP: 0.1, + }; - var [result, err] = await client.ChatVision(role, question, image, maxTokens, temperature); + var [result, err] = await client.ChatVision(input); if (err != null) { console.log('ERROR:' + err.error); return; diff --git a/examples/completion.js b/examples/completion.js index 37f4155..9df05b3 100644 --- a/examples/completion.js +++ b/examples/completion.js @@ -3,12 +3,15 @@ import * as pg from '../dist/index.js'; const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY); async function Completions() { - const model = pg.Models.NeuralChat7B; - const maxTokens = 1000; - const temperature = 1.1; - const prompt = 'Will I lose my hair'; + const input = { + model: pg.Models.NeuralChat7B, + prompt: 'Will I lose my hair', + maxTokens: 1000, + temperature: 0.1, + topP: 0.1, + }; - var [result, err] = await client.Completion(model, maxTokens, temperature, prompt); + var [result, err] = await client.Completion(input); if (err != null) { console.log('ERROR:' + err.error); return; diff --git a/makefile b/makefile index f943829..634c220 100644 --- a/makefile +++ b/makefile @@ -47,7 +47,16 @@ curl-chat: } \ ], \ "max_tokens": 1000, \ - "temperature": 1.1 \ + "temperature": 1.1, \ + "top_p": 0.1, \ + "output": { \ + "factuality": true, \ + "toxicity": true \ + }, \ + "input": { \ + "pii": "replace", \ + "pii_replace_method": "random" \ + } \ }' js-chat: compile-ts @@ -109,7 +118,8 @@ curl-comp: "model": "Neural-Chat-7B", \ "prompt": "Will I lose my hair", \ "max_tokens": 1000, \ - "temperature": 1.1 \ + "temperature": 1.1, \ + "top_p": 0.1 \ }' js-comp: compile-ts diff --git a/package.json b/package.json index 89e6f1d..f7f0ff1 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "type": "module", "name": "predictionguard", "author": "Prediction Guard", - "version": "0.11.1", + "version": "0.12.0", "license": "Apache 2.0", "main": "./dist/index.js", "types": "./dist/index.d.ts", diff --git a/src/api_client.ts b/src/api_client.ts index dd9ad99..1183fb4 100644 --- a/src/api_client.ts +++ b/src/api_client.ts @@ -31,46 +31,49 @@ export class Client { * const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY); * * async function Chat() { - * const model = pg.chat.Model.NeuralChat7B; - * const input = [ - * { - * role: pg.Roles.User, - * content: 'How do you feel about the world in general', + * const input = { + * model: pg.Models.NeuralChat7B, + * messages: [ + * { + * role: pg.Roles.User, + * content: 'How do you feel about the world in general', + * }, + * ], + * maxTokens: 1000, + * temperature: 0.1, + * topP: 0.1, + * options: { + * factuality: true, + * toxicity: true, + * pii: pg.PIIs.Replace, + * piiReplaceMethod: pg.ReplaceMethods.Random, * }, - * ]; - * const maxTokens = 1000; - * const temperature = 1.1; + * }; * - * var [result, err] = await client.Chat(model, input, maxTokens, temperature); + * var [result, err] = await client.Chat(input); * if (err != null) { * console.log('ERROR:' + err.error); * return; * } * - * console.log('RESULT:' + result.model + ': ' + result.choices[0].message.content); + * console.log('RESULT:' + result.createdDate() + ': ' + result.model + ': ' + result.choices[0].message.content); * } * * Chat(); * ``` * - * @param {Model} model - model represents the model to use for the - * request. - * @param {ChatInput[]} input - input represents the conversation history - * with roles (user, assistant) and messages. - * @param {number} maxTokens - maxTokens represents the maximum number - * of tokens in the generated chat. - * @param {number} temperature - temperature represents the parameter - * for controlling randomness in generated chat. + * @param {model.ChatInput} input - input represents the entire set of + * possible input for the Chat call. * * @returns - A Promise with a Chat object and an Error object if * the error is not null. */ - async Chat(model: model.Models, input: model.ChatInput[], maxTokens: number, temperature: number): Promise<[model.Chat, model.Error | null]> { + async Chat(input: model.ChatInput): Promise<[model.Chat, model.Error | null]> { const zero: model.Chat = { id: '', object: '', created: 0, - model: model, + model: input.model, choices: [], createdDate: function () { return new Date(0); @@ -78,12 +81,83 @@ export class Client { }; try { - const body = { - model: model, - max_tokens: maxTokens, - temperature: temperature, - messages: input, - }; + if (!input.hasOwnProperty('model')) { + return [zero, {error: 'model is a mandatory input'}]; + } + + if (!input.hasOwnProperty('messages')) { + return [zero, {error: 'messages is a mandatory input'}]; + } + + let maxTokens = 0; + if (input.hasOwnProperty('maxTokens')) { + maxTokens = input.maxTokens; + } + + let temperature = 0; + if (input.hasOwnProperty('temperature')) { + temperature = input.temperature; + } + + let topP = 0; + if (input.hasOwnProperty('topP')) { + topP = input.topP; + } + + const m = new Map(); + m.set('model', input.model); + m.set('messages', input.messages); + m.set('max_tokens', maxTokens); + m.set('temperature', temperature); + m.set('top_p', topP); + + if (input.hasOwnProperty('options')) { + if (input.options.hasOwnProperty('factuality') || input.options.hasOwnProperty('toxicity')) { + let factuality = false; + if (input.options.hasOwnProperty('factuality')) { + factuality = input.options.factuality; + } + + let toxicity = false; + if (input.options.hasOwnProperty('toxicity')) { + toxicity = input.options.toxicity; + } + + const output = { + factuality: factuality, + toxicity: toxicity, + }; + + m.set('output', output); + } + + if (input.options.hasOwnProperty('blockPromptInjection') || input.options.hasOwnProperty('pii') || input.options.hasOwnProperty('piiReplaceMethod')) { + let blockPromptInjection = false; + if (input.options.hasOwnProperty('blockPromptInjection')) { + blockPromptInjection = input.options.blockPromptInjection; + } + + let pii = ''; + if (input.options.hasOwnProperty('pii')) { + pii = input.options.pii; + } + + let replaceMethod = ''; + if (input.options.hasOwnProperty('piiReplaceMethod')) { + replaceMethod = input.options.piiReplaceMethod; + } + + const inp = { + block_prompt_injection: blockPromptInjection, + pii: pii, + pii_replace_method: replaceMethod, + }; + + m.set('input', inp); + } + } + + const body = Object.fromEntries(m.entries()); const [result, err] = await this.RawDoPost('chat/completions', body); if (err != null) { @@ -106,37 +180,39 @@ export class Client { * * @example * ``` - * import * as pg from 'predictionguard'; + * import * as pg from 'predictiongaurd'; * * const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY); * * async function ChatSSE() { - * const model = pg.Models.NeuralChat7B; - * const input = [ - * { - * role: pg.Roles.User, - * content: 'How do you feel about the world in general', - * }, - * ]; - * const maxTokens = 1000; - * const temperature = 1.1; - * - * const onMessage = function (event, err) { - * if (err != null) { - * if (err.error == 'EOF') { - * return; + * const input = { + * model: pg.Models.NeuralChat7B, + * messages: [ + * { + * role: pg.Roles.User, + * content: 'How do you feel about the world in general', + * }, + * ], + * maxTokens: 1000, + * temperature: 0.1, + * topP: 0.1, + * onMessage: function (event, err) { + * if (err != null) { + * if (err.error == 'EOF') { + * return; + * } + * console.log(err); * } - * console.log(err); - * } * - * for (const choice of event.choices) { - * if (choice.delta.hasOwnProperty('content')) { - * process.stdout.write(choice.delta.content); + * for (const choice of event.choices) { + * if (choice.delta.hasOwnProperty('content')) { + * process.stdout.write(choice.delta.content); + * } * } - * } + * }, * }; * - * var err = await client.ChatSSE(model, input, maxTokens, temperature, onMessage); + * var err = await client.ChatSSE(input); * if (err != null) { * console.log('ERROR:' + err.error); * return; @@ -146,28 +222,47 @@ export class Client { * ChatSSE(); * ``` * - * @param {Model} model - model represents the model to use for the - * request. - * @param {ChatInput[]} input - input represents the conversation history - * with roles (user, assistant) and messages. - * @param {number} maxTokens - maxTokens represents the maximum number - * of tokens in the generated chat. - * @param {number} temperature - temperature represents the parameter - * for controlling randomness in the generated chat. - * @param {(event: ChatSSE | null, err: Error | null) => void} onMessage - - * onMessage represents a function that will receive the stream of chat - * results. + * @param {model.ChatSSEInput} input - input represents the entire set of + * possible input for the SSE Chat call. * * @returns - A Promise with an Error object if the error is not * null. */ - async ChatSSE(model: model.Models, input: model.ChatInput[], maxTokens: number, temperature: number, onMessage: (event: model.ChatSSE | null, err: model.Error | null) => void): Promise { + async ChatSSE(input: model.ChatSSEInput): Promise { try { + if (!input.hasOwnProperty('model')) { + return {error: 'model is a mandatory input'}; + } + + if (!input.hasOwnProperty('messages')) { + return {error: 'messages is a mandatory input'}; + } + + if (!input.hasOwnProperty('onMessage')) { + return {error: 'onMessage is a mandatory input'}; + } + + let maxTokens = 0; + if (input.hasOwnProperty('maxTokens')) { + maxTokens = input.maxTokens; + } + + let temperature = 0; + if (input.hasOwnProperty('temperature')) { + temperature = input.temperature; + } + + let topP = 0; + if (input.hasOwnProperty('topP')) { + topP = input.topP; + } + const body = { - model: model, + model: input.model, + messages: input.messages, max_tokens: maxTokens, temperature: temperature, - messages: input, + top_p: topP, stream: true, }; @@ -181,7 +276,7 @@ export class Client { return new Date(this.created * 1000); }; - onMessage(chatSSE, err); + input.onMessage(chatSSE, err); }; const err = await this.RawDoSSEPost('chat/completions', body, f); @@ -204,16 +299,18 @@ export class Client { * const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY); * * async function ChatVision() { - * const role = pg.Roles.User; - * const question = 'is there a deer in this picture'; - * * const image = new pg.ImageNetwork('https://pbs.twimg.com/profile_images/1571574401107169282/ylAgz_f5_400x400.jpg'); - * // const file = new pg.ImageFile('/Users/bill/Documents/images/pGwOq5tz_400x400.jpg'); * - * const maxTokens = 300; - * const temperature = 0.1; + * const input = { + * role: pg.Roles.User, + * question: 'is there a deer in this picture', + * image: image, + * maxTokens: 1000, + * temperature: 0.1, + * topP: 0.1, + * }; * - * var [result, err] = await client.ChatVision(role, question, image, maxTokens, temperature); + * var [result, err] = await client.ChatVision(input); * if (err != null) { * console.log('ERROR:' + err.error); * return; @@ -225,21 +322,13 @@ export class Client { * ChatVision(); * ``` * - * @param {Role} role - role represents the role of the person asking - * the question. - * @param {string} question - question represents the question being - * asked. - * @param {Base64Encoder} image - image represents an object that can - * produce a base64 encoding of an image. - * @param {number} maxTokens - maxTokens represents the maximum number - * of tokens in the generated chat. - * @param {number} temperature - temperature represents the parameter - * for controlling randomness in the generated chat. + * @param {model.ChatVisionInput} input - input represents the entire set of + * possible input for the Vision Chat call. * * @returns - A Promise with a ChatVision object and a Error * object if the error is not null. */ - async ChatVision(role: model.Roles, question: string, image: model.Base64Encoder, maxTokens: number, temperature: number): Promise<[model.ChatVision, model.Error | null]> { + async ChatVision(input: model.ChatVisionInput): Promise<[model.ChatVision, model.Error | null]> { const zero: model.ChatVision = { id: '', object: '', @@ -252,7 +341,34 @@ export class Client { }; try { - const [b64, err1] = await image.EncodeBase64(); + if (!input.hasOwnProperty('role')) { + return [zero, {error: 'role is a mandatory input'}]; + } + + if (!input.hasOwnProperty('question')) { + return [zero, {error: 'question is a mandatory input'}]; + } + + if (!input.hasOwnProperty('image')) { + return [zero, {error: 'image is a mandatory input'}]; + } + + let maxTokens = 0; + if (input.hasOwnProperty('maxTokens')) { + maxTokens = input.maxTokens; + } + + let temperature = 0; + if (input.hasOwnProperty('temperature')) { + temperature = input.temperature; + } + + let topP = 0; + if (input.hasOwnProperty('topP')) { + topP = input.topP; + } + + const [b64, err1] = await input.image.EncodeBase64(); if (err1 != null) { return [zero, err1]; } @@ -261,11 +377,11 @@ export class Client { model: model.Models.Llava157BHF, messages: [ { - role: role, + role: input.role, content: [ { type: 'text', - text: question, + text: input.question, }, { type: 'image_url', @@ -278,6 +394,7 @@ export class Client { ], max_tokens: maxTokens, temperature: temperature, + top_p: topP, }; const [result, err2] = await this.RawDoPost('chat/completions', body); @@ -308,12 +425,15 @@ export class Client { * const client = new pg.Client('https://api.predictionguard.com', process.env.PGKEY); * * async function Completions() { - * const model = pg.Models.NeuralChat7B; - * const maxTokens = 1000; - * const temperature = 1.1; - * const prompt = 'Will I lose my hair'; + * const input = { + * model: pg.Models.NeuralChat7B, + * prompt: 'Will I lose my hair', + * maxTokens: 1000, + * temperature: 0.1, + * topP: 0.1, + * }; * - * var [result, err] = await client.Completion(model, maxTokens, temperature, prompt); + * var [result, err] = await client.Completion(input); * if (err != null) { * console.log('ERROR:' + err.error); * return; @@ -325,18 +445,13 @@ export class Client { * Completions(); * ``` * - * @param {Model} model - model represents the model to use for the - * request. - * @param {number} maxTokens - maxTokens represents the maximum number - * of tokens in the generated chat. - * @param {number} temperature - temperature represents the parameter - * for controlling randomness in generated chat. - * @param {number} temperature - prompt represents the chat input. + * @param {model.CompletionInput} input - input represents the entire set of + * possible input for the Completion call. * * @returns - A Promise with a Completion object and a Error * object if the error is not null. */ - async Completion(model: model.Models, maxTokens: number, temperature: number, prompt: string): Promise<[model.Completion, model.Error | null]> { + async Completion(input: model.CompletionInput): Promise<[model.Completion, model.Error | null]> { const zero: model.Completion = { id: '', object: '', @@ -348,11 +463,35 @@ export class Client { }; try { + if (!input.hasOwnProperty('model')) { + return [zero, {error: 'model is a mandatory input'}]; + } + + if (!input.hasOwnProperty('prompt')) { + return [zero, {error: 'prompt is a mandatory input'}]; + } + + let maxTokens = 0; + if (input.hasOwnProperty('maxTokens')) { + maxTokens = input.maxTokens; + } + + let temperature = 0; + if (input.hasOwnProperty('temperature')) { + temperature = input.temperature; + } + + let topP = 0; + if (input.hasOwnProperty('topP')) { + topP = input.topP; + } + const body = { - model: model, + model: input.model, + prompt: input.prompt, max_tokens: maxTokens, temperature: temperature, - prompt: prompt, + top_p: topP, }; const [result, err] = await this.RawDoPost('completions', body); @@ -406,7 +545,7 @@ export class Client { * Embedding(); * ``` * - * @param {EmbeddingInput[]} input - input represents a collection of + * @param {model.EmbeddingInput[]} input - input represents a collection of * text and images to vectorize. * * @returns - A Promise with a Embedding object and an Error object if @@ -678,7 +817,7 @@ export class Client { * ReplacePI(); * ``` * - * @param {ReplaceMethod} replaceMethod - replaceMethod represents the + * @param {model.ReplaceMethods} replaceMethod - replaceMethod represents the * method to use for replacing personal information. * @param {string} prompt - prompt represents the text to detect * injection attacks against. @@ -814,9 +953,9 @@ export class Client { * ``` * * @param {string} text - text represents the text to be translated. - * @param {Language} sourceLang - sourceLang represents the source + * @param {model.Languages} sourceLang - sourceLang represents the source * language of the text. - * @param {Language} targetLang - targetLang represents the target + * @param {model.Languages} targetLang - targetLang represents the target * language of the text. * * @returns - A Promise with a Translate object and a Error @@ -972,7 +1111,7 @@ export class Client { * @param {string} endpoint - endpoint represents endpoint to call and * does not include the transport or domain. * @param {any} body - body represents an input object. - * @param {(event: sse.ServerSentEvent | null, err: Error | null) => void} onMessage - + * @param {(event: sse.ServerSentEvent | null, err: model.Error | null) => void} onMessage - * onMessage represents a function that will receive the stream of chat * results. * diff --git a/src/api_model.ts b/src/api_model.ts index d512f9e..a3c0c81 100644 --- a/src/api_model.ts +++ b/src/api_model.ts @@ -26,6 +26,12 @@ export enum Roles { System = 'system', } +/** PIIs represents the set of pii options that can be used. */ +export enum PIIs { + Block = 'block', + Replace = 'replace', +} + /** ReplaceMethods represents the set of replace methods that can be used. */ export enum ReplaceMethods { Random = 'random', @@ -119,12 +125,56 @@ export interface Base64Encoder { // ----------------------------------------------------------------------------- -/** ChatInput represents a role and content related to a chat. */ -export interface ChatInput { +/** ChatInputMessage represents a role and content related to a chat. */ +export interface ChatInputMessage { + /** role represents the role of the sender (user or assistant). */ role: Roles; + + /** content represents the content of the message. */ content: string; } +/** ChatInputOptions represents options for post and preprocessing the input. */ +export interface ChatInputOptions { + /** factuality represents the choice to run the factuality algorithm. */ + factuality: boolean; + + /** toxicity represents the choice to run the toxicity algorithm. */ + toxicity: boolean; + + /** blockPromptInjection represents the choice to run the + * blockPromptInjection algorithm. */ + blockPromptInjection: boolean; + + /** pii represents the choice to run the repalce personal information + * algorithm and which one. */ + pii: PIIs; + + /** piiReplaceMethod represents the method to use for PII. */ + piiReplaceMethod: ReplaceMethods; +} + +/** ChatInput represents the full potential input options for chat. */ +export interface ChatInput { + /** model represents the model to use. */ + model: Models; + + /** messages represents the set of messages to process. */ + messages: ChatInputMessage[]; + + /** maxTokens represents the max number of tokens to return. */ + maxTokens: number; + + /** temperature represents the randomness in GPT's output. */ + temperature: number; + + /** topP represents the diversity of the generated text. */ + topP: number; + + /** options represents a set of optional parameters. */ + options: ChatInputOptions; +} + /** ChatMessage represents an object that contains the content and a role. It * can be used for input and returned as part of the response. */ export interface ChatMessage { @@ -176,6 +226,27 @@ export interface Chat { // ----------------------------------------------------------------------------- +/** ChatSSEInput represents the full potential input options for SSE chat. */ +export interface ChatSSEInput { + /** model represents the model to use. */ + model: Models; + + /** messages represents the set of messages to process. */ + messages: ChatInputMessage[]; + + /** maxTokens represents the max number of tokens to return. */ + maxTokens: number; + + /** temperature represents the randomness in GPT's output. */ + temperature: number; + + /** topP represents the diversity of the generated text. */ + topP: number; + + /** onMessage represents a function that will receive the messages. */ + onMessage: (event: ChatSSE | null, err: Error | null) => void; +} + /** ChatSSEDelta represents an object that contains the content. */ export interface ChatSSEDelta { /** content represents the partial content response for a choice. */ @@ -228,6 +299,27 @@ export interface ChatSSE { // ----------------------------------------------------------------------------- +/** ChatVisionInput represents the full potential input options for Vision chat. */ +export interface ChatVisionInput { + /** role represents the role of the sender (user or assistant). */ + role: Roles; + + /** question represents the question about the image. */ + question: string; + + /** image represents an object that knows how to retrieve an image. */ + image: Base64Encoder; + + /** maxTokens represents the max number of tokens to return. */ + maxTokens: number; + + /** temperature represents the randomness in GPT's output. */ + temperature: number; + + /** topP represents the diversity of the generated text. */ + topP: number; +} + /** ChatVisionMessage represents content for the vision call. */ export interface ChatVisionMessage { /** role represents the role of the sender (user or assistant). */ @@ -278,6 +370,24 @@ export interface ChatVision { // ----------------------------------------------------------------------------- +/** CompletionInput represents the full potential input options for completion. */ +export interface CompletionInput { + /** model represents the model to use. */ + model: Models; + + /** prompt represents the prompt to process. */ + prompt: string; + + /** maxTokens represents the max number of tokens to return. */ + maxTokens: number; + + /** temperature represents the randomness in GPT's output. */ + temperature: number; + + /** topP represents the diversity of the generated text. */ + topP: number; +} + /** Choice represents an object that contains a result choice. */ export interface CompletionChoice { /** index represents the index position in the collection for diff --git a/test/api_test.js b/test/api_test.js index 4212c8c..be34ff5 100644 --- a/test/api_test.js +++ b/test/api_test.js @@ -261,14 +261,26 @@ const chatVisionResp = { async function testChatBasic() { const client = new pg.Client('http://localhost:8080', 'any key'); - const messages = [ - { - role: pg.Roles.User, - content: 'How do you feel about the world in general', + const input = { + model: pg.Models.NeuralChat7B, + messages: [ + { + role: pg.Roles.User, + content: 'How do you feel about the world in general', + }, + ], + maxTokens: 1000, + temperature: 0.1, + topP: 0.1, + options: { + factuality: true, + toxicity: true, + pii: pg.PIIs.Replace, + piiReplaceMethod: pg.ReplaceMethods.Random, }, - ]; + }; - var [result, err] = await client.Chat(pg.Models.NeuralChat7B, 1000, 1.1, messages); + var [result, err] = await client.Chat(input); if (err != null) { assert.fail('ERROR:' + err.error); } @@ -288,12 +300,16 @@ async function testChatVision() { }, }; - const role = pg.Roles.User; - const question = 'is there a deer in this picture'; - const maxTokens = 300; - const temperature = 0.1; + const input = { + role: pg.Roles.User, + question: 'is there a deer in this picture', + image: imageMock, + maxTokens: 1000, + temperature: 0.1, + topP: 0.1, + }; - var [result, err] = await client.ChatVision(role, question, imageMock, maxTokens, temperature); + var [result, err] = await client.ChatVision(input); if (err != null) { assert.fail('ERROR:' + err.error); } @@ -307,14 +323,26 @@ async function testChatVision() { async function testChatBadkey() { const client = new pg.Client('http://localhost:8080', ''); - const messages = [ - { - role: pg.Roles.User, - content: 'How do you feel about the world in general', + const input = { + model: pg.Models.NeuralChat7B, + messages: [ + { + role: pg.Roles.User, + content: 'How do you feel about the world in general', + }, + ], + maxTokens: 1000, + temperature: 0.1, + topP: 0.1, + options: { + factuality: true, + toxicity: true, + pii: pg.PIIs.Replace, + piiReplaceMethod: pg.ReplaceMethods.Random, }, - ]; + }; - var [, err] = await client.Chat(pg.Models.NeuralChat7B, 1000, 1.1, messages); + var [, err] = await client.Chat(input); if (err == null) { assert.fail("didn't get an error"); } @@ -351,7 +379,15 @@ const completionResp = { async function testCompletionBasic() { const client = new pg.Client('http://localhost:8080', 'any key'); - var [result, err] = await client.Completion(pg.Models.NeuralChat7B, 1000, 1.0, 'Will I lose my hair'); + const input = { + model: pg.Models.NeuralChat7B, + prompt: 'Will I lose my hair', + maxTokens: 1000, + temperature: 0.1, + topP: 0.1, + }; + + var [result, err] = await client.Completion(input); if (err != null) { assert.fail('ERROR:' + err.error); } @@ -365,7 +401,15 @@ async function testCompletionBasic() { async function testCompletionBadkey() { const client = new pg.Client('http://localhost:8080', ''); - var [, err] = await client.Completion(pg.Models.NeuralChat7B, 1000, 1.0, 'Will I lose my hair'); + const input = { + model: pg.Models.NeuralChat7B, + prompt: 'Will I lose my hair', + maxTokens: 1000, + temperature: 0.1, + topP: 0.1, + }; + + var [, err] = await client.Completion(input); if (err == null) { assert.fail("didn't get an error"); }