Skip to content

Commit

Permalink
Grab bag of fixes that have been gathering
Browse files Browse the repository at this point in the history
  • Loading branch information
edfletcher committed Dec 4, 2023
1 parent 4945e4b commit 51b14ef
Show file tree
Hide file tree
Showing 8 changed files with 328 additions and 126 deletions.
21 changes: 19 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,24 @@ ENV PATH=/usr/games:$PATH
USER drc
CMD ["node", "discord"]

FROM base as prometheus
FROM node:18-alpine as prometheus
WORKDIR /app/drc
COPY package*.json ./
RUN npm install
COPY *.js .
COPY lib ./lib/
COPY config/default.js ./config/
COPY config/local-prod.json ./config/
COPY config/channelXforms-prod.json ./config/
COPY http ./http/
COPY scripts ./scripts/
COPY prometheus.js .
RUN adduser -u 1001 -D drc
RUN chown -R drc /app/drc/scripts
ENV NODE_ENV=prod
ENV DRC_LOG_PATH=/logs
ENV DRC_IN_CONTAINER=1
ENV TZ="America/Los_Angeles"
STOPSIGNAL SIGINT
USER drc
CMD ["node", "prometheus"]
CMD ["node", "prometheus"]
4 changes: 2 additions & 2 deletions cli/main.js
Original file line number Diff line number Diff line change
Expand Up @@ -380,12 +380,12 @@ module.exports = async function () {
msgTrack.totalInserted.bytes += Buffer.byteLength(insStr, 'utf8');
});
} else {
systemMessage(`(${_channel}) Unhandled message of type "${type}" in channel ${data.target} on ${data.__drcNetwork}: ${JSON.stringify(data)}`);
// systemMessage(`(${_channel}) Unhandled message of type "${type}" in channel ${data.target} on ${data.__drcNetwork}: ${JSON.stringify(data)}`);
}
} else {
const ignoreTypes = ['irc:join', 'irc:nick', 'irc:quit', 'irc:mode', 'irc:part'];
if (!ignoreTypes.includes(type)) {
systemMessage(`(${_channel}) Unhandled message of type "${type}" on ${data?.__drcNetwork}: ${dataJson}`);
// systemMessage(`(${_channel}) Unhandled message of type "${type}" on ${data?.__drcNetwork}: ${dataJson}`);
}
}
} catch (e) {
Expand Down
9 changes: 6 additions & 3 deletions config/default.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@ const SECRET_KEYS = [
'redis.url',
'shodan.apiKey',
'ipinfo.token',
'openai.secretKey'
'openai.secretKey',
'alpaca.apiKey'
];

function replace (obj, keys, replacement) {
Expand Down Expand Up @@ -203,7 +204,8 @@ const _config = {

openai: {
secretKey: null,
model: 'text-davinci-003',
organization: '',
model: 'gpt-3.5-turbo',
chatModel: 'gpt-3.5-turbo',
temperature: 0.9,
maxTokens: 3700,
Expand All @@ -224,7 +226,8 @@ const _config = {
hosts: {},
waitTimeSeconds: 17,
camelidaeFrontendAvailable: false,
defaultModel: ''
defaultModel: '',
apiKey: ''
},

prometheus: {
Expand Down
17 changes: 16 additions & 1 deletion discord/userCommands/alpaca.js
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,24 @@ async function waitForResponse (endpoint, promptId, queuePosition, model, logger

async function promptAndWait (prompt, endpoint, logger, options) {
const model = options?.model ?? config.defaultModel;
const mirostat = options?.mirostat ?? 0;
const headers = {};
let priority = 'NORMAL';

if (config.apiKey?.length) {
headers.Authorization = `Basic ${Buffer.from(`:${config.apiKey}`, 'utf8').toString('base64')}`;
priority = 'HIGH';
}

const promptRes = await fetch(`${endpoint}/prompt`, { // eslint-disable-line no-undef
method: 'POST',
body: JSON.stringify({ prompt, model })
headers,
body: JSON.stringify({
prompt,
model,
priority,
mirostat
})
});

if (!promptRes.ok) {
Expand Down
33 changes: 14 additions & 19 deletions discord/userCommands/gpt.js
Original file line number Diff line number Diff line change
@@ -1,15 +1,14 @@
'use strict';

const config = require('config').openai;
const oai = require('openai');
const OpenAI = require('openai');
const marked = require('marked');
const { fqUrlFromPath } = require('../../util');
const { servePage, isHTTPRunning } = require('../common');

require('../../logger')('discord');

const OAIAPI = new oai.OpenAIApi(new oai.Configuration({
apiKey: config.secretKey
}));
const OAIAPI = new OpenAI({ apiKey: config.secretKey, organization: config.organization });

async function f (context, ...a) {
if (!config.secretKey) {
Expand All @@ -33,18 +32,18 @@ async function f (context, ...a) {
context.sendToBotChan('Querying OpenAI...');

if (context.options.listModels) {
return (await OAIAPI.listModels())?.data?.data.map(({ id }) => id);
return (await OAIAPI.models.list())?.data?.map(({ id }) => id);
}

if (context.options.chat) {
delete dataObj.prompt;
dataObj.messages = [{ role: 'user', content: prompt }];
const res = await OAIAPI.createChatCompletion(dataObj);
dataObj.prompt = prompt; // createChatCompletion balks at it, but serverPage needs it
dataObj.response = res.data?.choices?.[0]?.message?.content ?? res.data;
} else {
const res = await OAIAPI.createCompletion(dataObj);
dataObj.response = res.data?.choices?.[0]?.text ?? res.data?.choices ?? res.data;
delete dataObj.prompt;
dataObj.messages = [{ role: 'user', content: prompt }];
const res = await OAIAPI.chat.completions.create(dataObj);
dataObj.prompt = prompt; // createChatCompletion balks at it, but serverPage needs it
dataObj.response = res.choices?.[0]?.message?.content ?? res.data;

if (res.choices?.length > 1) {
context.sendToBotChan('Multiple responses!');
context.sendToBotChan(res.choices);
}

const queryTimeS = Number((new Date() - startTime) / 1000).toFixed(1);
Expand All @@ -53,11 +52,7 @@ async function f (context, ...a) {
const serveObj = {
...dataObj,
queryTimeS,
response: dataObj.response
.replaceAll(/^\s+/g, '')
.replaceAll('<', '&lt;')
.replaceAll('>', '&gt;')
.replaceAll('\n', '\n<br/>'),
response: marked.parse(dataObj.response),
viaHTML: config.viaHTML
};
if (!context.options.ttl) {
Expand Down
2 changes: 1 addition & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ services:
restart: "on-failure"
logging:
options:
max-size: "512m"
max-size: "64m"
max-file: "10"

volumes:
Expand Down
Loading

0 comments on commit 51b14ef

Please sign in to comment.