Added docker setup

This commit is contained in:
Vasilije 2024-05-17 20:35:03 +02:00
parent 1ac28f4cb8
commit 7a7a857fdc
7 changed files with 4010 additions and 3668 deletions

View file

@ -0,0 +1,20 @@
# Use an official Node.js runtime as a parent image
FROM node:18.17.0
# Set the working directory to /app
WORKDIR /app
# Copy package.json and package-lock.json to the working directory
COPY package*.json ./
# Install any needed packages specified in package.json
RUN npm install
# Copy the rest of the application code to the working directory
COPY . .
# Make port 3000 available to the world outside this container
EXPOSE 3000
# Run npm install and then npm run dev
CMD ["sh", "-c", "npm install && npm run dev"]

File diff suppressed because it is too large Load diff

View file

@ -37,7 +37,7 @@ export default function Home() {
}, [refreshDatasets]); }, [refreshDatasets]);
const openDatasetData = (dataset: { id: string }) => { const openDatasetData = (dataset: { id: string }) => {
fetch(`http://localhost:8000/datasets/${dataset.id}/data`) fetch(`http://0.0.0.0:8000/datasets/${dataset.id}/data`)
.then((response) => response.json()) .then((response) => response.json())
.then(setDatasetData) .then(setDatasetData)
.then(() => setSelectedDataset(dataset.id)); .then(() => setSelectedDataset(dataset.id));
@ -56,7 +56,7 @@ export default function Home() {
const file = files[0]; const file = files[0];
formData.append('data', file, file.name); formData.append('data', file, file.name);
return fetch('http://localhost:8000/add', { return fetch('http://0.0.0.0:8000/add', {
method: 'POST', method: 'POST',
body: formData, body: formData,
}) })
@ -88,7 +88,7 @@ export default function Home() {
const cognifyDataset = useCallback((dataset: { id: string }) => { const cognifyDataset = useCallback((dataset: { id: string }) => {
showNotification(`Cognification started for dataset "${dataset.id}".`, 5000); showNotification(`Cognification started for dataset "${dataset.id}".`, 5000);
return fetch('http://localhost:8000/cognify', { return fetch('http://0.0.0.0:8000/cognify', {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
@ -120,7 +120,7 @@ export default function Home() {
}, [cognifyDataset, disableCognifyRun, enableCognifyRun]); }, [cognifyDataset, disableCognifyRun, enableCognifyRun]);
const deleteDataset = useCallback((dataset: { id: string }) => { const deleteDataset = useCallback((dataset: { id: string }) => {
fetch(`http://localhost:8000/datasets/${dataset.id}`, { fetch(`http://0.0.0.0:8000/datasets/${dataset.id}`, {
method: 'DELETE', method: 'DELETE',
}) })
.then(() => { .then(() => {
@ -146,7 +146,7 @@ export default function Home() {
}, [showExplorationWindow]); }, [showExplorationWindow]);
const exploreDataset = useCallback((dataset: { id: string }) => { const exploreDataset = useCallback((dataset: { id: string }) => {
fetch(`http://localhost:8000/datasets/${dataset.id}/graph`) fetch(`http://0.0.0.0:8000/datasets/${dataset.id}/graph`)
.then((response) => response.text()) .then((response) => response.text())
.then((text) => text.replace('"', '')) .then((text) => text.replace('"', ''))
.then((graphUrl: string) => { .then((graphUrl: string) => {

View file

@ -37,7 +37,7 @@ export default function DataView({ datasetId, data, onClose, onDataAdd }: DataVi
const showRawData = useCallback((dataItem: Data) => { const showRawData = useCallback((dataItem: Data) => {
setSelectedData(dataItem); setSelectedData(dataItem);
fetch(`http://localhost:8000/datasets/${datasetId}/data/${dataItem.id}/raw`) fetch(`http://0.0.0.0:8000/datasets/${datasetId}/data/${dataItem.id}/raw`)
.then((response) => response.arrayBuffer()) .then((response) => response.arrayBuffer())
.then(setRawData); .then(setRawData);
}, [datasetId]); }, [datasetId]);

View file

@ -14,7 +14,7 @@ function useDatasets() {
const statusTimeout = useRef<any>(null); const statusTimeout = useRef<any>(null);
const fetchDatasetStatuses = useCallback((datasets: Dataset[]) => { const fetchDatasetStatuses = useCallback((datasets: Dataset[]) => {
fetch(`http://localhost:8000/datasets/status?dataset=${datasets.map(d => d.id).join('&dataset=')}`) fetch(`http://0.0.0.0:8000/datasets/status?dataset=${datasets.map(d => d.id).join('&dataset=')}`)
.then((response) => response.json()) .then((response) => response.json())
.then((statuses) => setDatasets( .then((statuses) => setDatasets(
(datasets) => ( (datasets) => (
@ -65,7 +65,7 @@ function useDatasets() {
}, []); }, []);
const fetchDatasets = useCallback(() => { const fetchDatasets = useCallback(() => {
fetch('http://localhost:8000/datasets') fetch('http://0.0.0.0:8000/datasets')
.then((response) => response.json()) .then((response) => response.json())
.then((datasets) => datasets.map((dataset: string) => ({ id: dataset, name: dataset }))) .then((datasets) => datasets.map((dataset: string) => ({ id: dataset, name: dataset })))
.then((datasets) => { .then((datasets) => {

View file

@ -37,11 +37,24 @@ services:
depends_on: depends_on:
- postgres - postgres
- neo4j - neo4j
- frontend
deploy: deploy:
resources: resources:
limits: limits:
cpus: "4.0" cpus: "4.0"
memory: 8GB memory: 8GB
frontend:
container_name: frontend
build:
context: ./cognee-frontend
dockerfile: Dockerfile
volumes:
- "./cognee-frontend:/app"
ports:
- "3000:3000"
networks:
- cognee_backend
postgres: postgres:
image: postgres image: postgres