diff --git a/Dockerfile b/Dockerfile index d206593..c59e496 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,7 +2,7 @@ FROM node:12.18.3 ## It is just Label when it has build automation. -LABEL version=0.1 +LABEL version=0.2.3 ## OS command for create a directory RUN mkdir /data diff --git a/api/.gitignore b/api/.gitignore new file mode 100644 index 0000000..06fff06 --- /dev/null +++ b/api/.gitignore @@ -0,0 +1,138 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# Mysql DB data +db_data + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# else +data_uploader.py + +.vscode +*.csv diff --git a/api/Dockerfile b/api/Dockerfile new file mode 100644 index 0000000..4aa4b0f --- /dev/null +++ b/api/Dockerfile @@ -0,0 +1,47 @@ +## Base image gets +FROM python:3.8 + +## It is just Label when it has build automation. +LABEL version=0.1 + +RUN apt-get update && apt-get install -y --no-install-recommends \ + netcat \ + && rm -rf /var/lib/apt/lists/* + +## OS command for create a directory +RUN mkdir /data + +## Define a base directory when it runs. +WORKDIR /data + +## File & Directory copy to WORKDIR +COPY requirements.txt /data/ +COPY manage.py /data/ +COPY exporterhub_schema.sql /data/ +COPY entrypoint.sh /data/ +COPY my_settings.py /data/ + +## Initializing for run +RUN pip install -r requirements.txt + +COPY exporterhub /data/exporterhub +COPY hub /data/hub + +## For test run +##ENTRYPOINT ["tail","-f","/data/package.json"] + +## You can define the environment variable if you have some configurations. +## For example, if you have seperated database server, you can make a configuration as below. +#ENV MYSQL_SERVER "mysql.test.com" +ENV PYTHONUNBUFFERED=1 +ENV APP_STAT=server + +## Make sure the port number for service expose +EXPOSE 8000 + +## ENTRYPOINT will be runs at the end of container attached +ENTRYPOINT /data/entrypoint.sh + + + + diff --git a/api/LICENSE b/api/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/api/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/api/README.md b/api/README.md new file mode 100644 index 0000000..778493e --- /dev/null +++ b/api/README.md @@ -0,0 +1,21 @@ +# exporterhub-be +Exporterhub Backend OpenSource. + + +# Kickstart +## Token Requires +* ___Create Token 1st before the App runs as below.___ + * https://github.com/settings/tokens/new +![Token Generator](assets/create_a_token_first_N.png) + +* And Input the generated Token to docker-compose.yml as below +``` + api: + image: nexclipper/exporterhub-be:0.1 + TOKEN: "TOKEN must be HERE!!!" +``` + +* Run the docker-compose as below +``` +docker-compose up -d +``` diff --git a/api/assets/README.md b/api/assets/README.md new file mode 100644 index 0000000..8d1c8b6 --- /dev/null +++ b/api/assets/README.md @@ -0,0 +1 @@ + diff --git a/api/assets/create_a_token_first_N.png b/api/assets/create_a_token_first_N.png new file mode 100644 index 0000000..5747ffd Binary files /dev/null and b/api/assets/create_a_token_first_N.png differ diff --git a/api/entrypoint.sh b/api/entrypoint.sh new file mode 100755 index 0000000..5fd1611 --- /dev/null +++ b/api/entrypoint.sh @@ -0,0 +1,35 @@ +#!/bin/sh +parm=$APP_STAT +set -eu + +echo "Checking DB connection ..." + +i=0 +until [ $i -ge 10 ] +do + nc -z maria_db 3306 && break + + i=$(( i + 1 )) + + echo "$i: Waiting for DB 1 second ..." + sleep 1 +done + +if [ $i -eq 10 ] +then + echo "DB connection refused, terminating ..." + exit 1 +fi + +echo "DB is up ..." + + case $parm in + server) + python manage.py migrate + python manage.py runserver 0.0.0.0:8000 + ;; + scheduler) + python manage.py scheduler + ;; + esac + diff --git a/api/exporterhub/__init__.py b/api/exporterhub/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/exporterhub/asgi.py b/api/exporterhub/asgi.py new file mode 100644 index 0000000..18b53b6 --- /dev/null +++ b/api/exporterhub/asgi.py @@ -0,0 +1,16 @@ +""" +ASGI config for exporterhub project. + +It exposes the ASGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/ +""" + +import os + +from django.core.asgi import get_asgi_application + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'exporterhub.settings') + +application = get_asgi_application() diff --git a/api/exporterhub/settings.py b/api/exporterhub/settings.py new file mode 100644 index 0000000..6c9a1ed --- /dev/null +++ b/api/exporterhub/settings.py @@ -0,0 +1,166 @@ +""" +Django settings for exporterhub project. + +Generated by 'django-admin startproject' using Django 3.1.1. + +For more information on this file, see +https://docs.djangoproject.com/en/3.1/topics/settings/ + +For the full list of settings and their values, see +https://docs.djangoproject.com/en/3.1/ref/settings/ +""" + +from pathlib import Path +import my_settings +import os + +# Build paths inside the project like this: BASE_DIR / 'subdir'. +BASE_DIR = Path(__file__).resolve().parent.parent + + +# Quick-start development settings - unsuitable for production +# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/ + +# SECURITY WARNING: keep the secret key used in production secret! +#SECRET_KEY = my_settings.SECRET_KEY +SECRET_KEY = os.environ["SECRET_KEY"] + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = False + +ALLOWED_HOSTS = ['*'] + + +# Application definition + +INSTALLED_APPS = [ + #'django.contrib.admin', + #'django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.messages', + 'django.contrib.staticfiles', + 'corsheaders', + 'hub', + 'django_apscheduler', +] + +MIDDLEWARE = [ + 'django.middleware.security.SecurityMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.common.CommonMiddleware', + #'django.middleware.csrf.CsrfViewMiddleware', + #'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', + 'corsheaders.middleware.CorsMiddleware' +] + +ROOT_URLCONF = 'exporterhub.urls' + +TEMPLATES = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [], + 'APP_DIRS': True, + 'OPTIONS': { + 'context_processors': [ + 'django.template.context_processors.debug', + 'django.template.context_processors.request', + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages', + ], + }, + }, +] + +WSGI_APPLICATION = 'exporterhub.wsgi.application' + + +# Database +# https://docs.djangoproject.com/en/3.1/ref/settings/#databases + +DATABASES = my_settings.DATABASES + + +# Password validation +# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators + +AUTH_PASSWORD_VALIDATORS = [ + { + 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + }, +] + + +# Internationalization +# https://docs.djangoproject.com/en/3.1/topics/i18n/ + +LANGUAGE_CODE = 'en-us' + +TIME_ZONE = 'Asia/Seoul' + +USE_I18N = True + +USE_L10N = True + +USE_TZ = True + + +# Static files (CSS, JavaScript, Images) +# https://docs.djangoproject.com/en/3.1/howto/static-files/ + +STATIC_URL = '/static/' + +APPEND_SLASH = False + + +CORS_ORIGIN_ALLOW_ALL=True +CORS_ALLOW_CREDENTIALS = True + +CORS_ALLOW_METHODS = ( + 'DELETE', + 'GET', + 'OPTIONS', + 'PATCH', + 'POST', + 'PUT', +) + +CORS_ALLOW_HEADERS = ( + 'accept', + 'accept-encoding', + 'authorization', + 'content-type', + 'dnt', + 'origin', + 'user-agent', + 'x-csrftoken', + 'x-requested-with', + +) + +# Format string for displaying run time timestamps in the Django admin site. The default +# just adds seconds to the standard Django format, which is useful for displaying the timestamps +# for jobs that are scheduled to run on intervals of less than one minute. +# +# See https://docs.djangoproject.com/en/dev/ref/settings/#datetime-format for format string +# syntax details. +APSCHEDULER_DATETIME_FORMAT = "N j, Y, f:s a" + +# Maximum run time allowed for jobs that are triggered manually via the Django admin site, which +# prevents admin site HTTP requests from timing out. +# +# Longer running jobs should probably be handed over to a background task processing library +# that supports multiple background worker processes instead (e.g. Dramatiq, Celery, Django-RQ, +# etc. See: https://djangopackages.org/grids/g/workers-queues-tasks/ for popular options). +APSCHEDULER_RUN_NOW_TIMEOUT = 25 # Seconds diff --git a/api/exporterhub/urls.py b/api/exporterhub/urls.py new file mode 100644 index 0000000..f484571 --- /dev/null +++ b/api/exporterhub/urls.py @@ -0,0 +1,21 @@ +"""exporterhub URL Configuration + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/3.1/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: path('', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.urls import include, path + 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) +""" + +from django.urls import path, include + +urlpatterns = [ + path('', include('hub.urls')), +] diff --git a/api/exporterhub/wsgi.py b/api/exporterhub/wsgi.py new file mode 100644 index 0000000..7d48720 --- /dev/null +++ b/api/exporterhub/wsgi.py @@ -0,0 +1,16 @@ +""" +WSGI config for exporterhub project. + +It exposes the WSGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/ +""" + +import os + +from django.core.wsgi import get_wsgi_application + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'exporterhub.settings') + +application = get_wsgi_application() diff --git a/api/exporterhub_schema.sql b/api/exporterhub_schema.sql new file mode 100644 index 0000000..6d09923 --- /dev/null +++ b/api/exporterhub_schema.sql @@ -0,0 +1,213 @@ +-- MariaDB dump 10.18 Distrib 10.4.17-MariaDB, for debian-linux-gnu (x86_64) +-- +-- Host: localhost Database: exporterhub +-- ------------------------------------------------------ +-- Server version 10.4.17-MariaDB-1:10.4.17+maria~bionic + +/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; +/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; +/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; +/*!40101 SET NAMES utf8mb4 */; +/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; +/*!40103 SET TIME_ZONE='+00:00' */; +/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; +/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; +/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; +/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; + +-- +-- Table structure for table `categories` +-- + +DROP TABLE IF EXISTS `categories`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `categories` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `name` varchar(200) NOT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=11 DEFAULT CHARSET=latin1; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Table structure for table `django_apscheduler_djangojob` +-- + +DROP TABLE IF EXISTS `django_apscheduler_djangojob`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `django_apscheduler_djangojob` ( + `id` varchar(255) NOT NULL, + `next_run_time` datetime(6) DEFAULT NULL, + `job_state` longblob NOT NULL, + PRIMARY KEY (`id`), + KEY `django_apscheduler_djangojob_next_run_time_2f022619` (`next_run_time`) +) ENGINE=InnoDB DEFAULT CHARSET=latin1; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Table structure for table `django_apscheduler_djangojobexecution` +-- + +DROP TABLE IF EXISTS `django_apscheduler_djangojobexecution`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `django_apscheduler_djangojobexecution` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `status` varchar(50) NOT NULL, + `run_time` datetime(6) NOT NULL, + `duration` decimal(15,2) DEFAULT NULL, + `finished` decimal(15,2) DEFAULT NULL, + `exception` varchar(1000) DEFAULT NULL, + `traceback` longtext DEFAULT NULL, + `job_id` varchar(255) NOT NULL, + PRIMARY KEY (`id`), + KEY `django_apscheduler_djangojobexecution_run_time_16edd96b` (`run_time`), + KEY `django_apscheduler_djangojobexecution_job_id_daf5090a_fk` (`job_id`), + CONSTRAINT `django_apscheduler_djangojobexecution_job_id_daf5090a_fk` FOREIGN KEY (`job_id`) REFERENCES `django_apscheduler_djangojob` (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=49 DEFAULT CHARSET=latin1; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Table structure for table `django_content_type` +-- + +DROP TABLE IF EXISTS `django_content_type`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `django_content_type` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `app_label` varchar(100) NOT NULL, + `model` varchar(100) NOT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `django_content_type_app_label_model_76bd3d3b_uniq` (`app_label`,`model`) +) ENGINE=InnoDB AUTO_INCREMENT=10 DEFAULT CHARSET=latin1; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Table structure for table `django_migrations` +-- + +DROP TABLE IF EXISTS `django_migrations`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `django_migrations` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `app` varchar(255) NOT NULL, + `name` varchar(255) NOT NULL, + `applied` datetime(6) NOT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=16 DEFAULT CHARSET=latin1; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Table structure for table `django_session` +-- + +DROP TABLE IF EXISTS `django_session`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `django_session` ( + `session_key` varchar(40) NOT NULL, + `session_data` longtext NOT NULL, + `expire_date` datetime(6) NOT NULL, + PRIMARY KEY (`session_key`), + KEY `django_session_expire_date_a5c62663` (`expire_date`) +) ENGINE=InnoDB DEFAULT CHARSET=latin1; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Table structure for table `exporters` +-- + +DROP TABLE IF EXISTS `exporters`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `exporters` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `name` varchar(200) NOT NULL, + `logo_url` varchar(2000) NOT NULL, + `stars` int(11) NOT NULL, + `repository_url` varchar(2000) NOT NULL, + `description` longtext NOT NULL, + `readme_url` varchar(2000) NOT NULL, + `readme` longblob NOT NULL, + `comment` longtext DEFAULT NULL, + `created_at` datetime(6) NOT NULL, + `modified_at` datetime(6) NOT NULL, + `category_id` int(11) NOT NULL, + `official_id` int(11) DEFAULT NULL, + PRIMARY KEY (`id`), + KEY `exporters_official_id_9940ac09_fk_officials_id` (`official_id`), + KEY `exporters_category_id_d7f8c1ea_fk_categories_id` (`category_id`), + CONSTRAINT `exporters_category_id_d7f8c1ea_fk_categories_id` FOREIGN KEY (`category_id`) REFERENCES `categories` (`id`), + CONSTRAINT `exporters_official_id_9940ac09_fk_officials_id` FOREIGN KEY (`official_id`) REFERENCES `officials` (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=38 DEFAULT CHARSET=latin1; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Table structure for table `logs` +-- + +DROP TABLE IF EXISTS `logs`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `logs` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `description` longtext NOT NULL, + `comment` longtext DEFAULT NULL, + `readme` longblob NOT NULL, + `created_at` datetime(6) NOT NULL, + `exporter_id` int(11) NOT NULL, + `official_id` int(11) DEFAULT NULL, + PRIMARY KEY (`id`), + KEY `logs_exporter_id_2df1ba34_fk_exporters_id` (`exporter_id`), + KEY `logs_official_id_33866889_fk_officials_id` (`official_id`), + CONSTRAINT `logs_exporter_id_2df1ba34_fk_exporters_id` FOREIGN KEY (`exporter_id`) REFERENCES `exporters` (`id`), + CONSTRAINT `logs_official_id_33866889_fk_officials_id` FOREIGN KEY (`official_id`) REFERENCES `officials` (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=latin1; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Table structure for table `officials` +-- + +DROP TABLE IF EXISTS `officials`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `officials` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `name` varchar(200) NOT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=latin1; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Table structure for table `releases` +-- + +DROP TABLE IF EXISTS `releases`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `releases` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `release_url` varchar(2000) NOT NULL, + `version` varchar(200) NOT NULL, + `date` datetime(6) NOT NULL, + `exporter_id` int(11) NOT NULL, + PRIMARY KEY (`id`), + KEY `releases_exporter_id_73aa308f_fk_exporters_id` (`exporter_id`), + CONSTRAINT `releases_exporter_id_73aa308f_fk_exporters_id` FOREIGN KEY (`exporter_id`) REFERENCES `exporters` (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=582 DEFAULT CHARSET=latin1; +/*!40101 SET character_set_client = @saved_cs_client */; +/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; + +/*!40101 SET SQL_MODE=@OLD_SQL_MODE */; +/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; +/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; +/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; +/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; +/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; +/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; + +-- Dump completed on 2020-12-02 10:54:01 diff --git a/api/hub/__init__.py b/api/hub/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/hub/admin.py b/api/hub/admin.py new file mode 100644 index 0000000..5538909 --- /dev/null +++ b/api/hub/admin.py @@ -0,0 +1,3 @@ +#from django.contrib import admin + +# Register your models here. diff --git a/api/hub/apps.py b/api/hub/apps.py new file mode 100644 index 0000000..8b03cfb --- /dev/null +++ b/api/hub/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class HubConfig(AppConfig): + name = 'hub' diff --git a/api/hub/management/__init__.py b/api/hub/management/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/hub/management/commands/__init__.py b/api/hub/management/commands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/hub/management/commands/scheduler.py b/api/hub/management/commands/scheduler.py new file mode 100644 index 0000000..6466fa8 --- /dev/null +++ b/api/hub/management/commands/scheduler.py @@ -0,0 +1,114 @@ +import requests +import base64 +import logging +import datetime +import time +import re + +from apscheduler.schedulers.blocking import BlockingScheduler +from apscheduler.triggers.cron import CronTrigger +from django_apscheduler.jobstores import DjangoJobStore +from django_apscheduler.models import DjangoJobExecution + +from django.conf import settings +from django.core.management.base import BaseCommand + +from hub.models import Exporter, Release +from my_settings import TOKEN + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) +file_handler = logging.FileHandler('update.log') +logger.addHandler(file_handler) + +api_url = 'https://api.github.com/repos/' +headers = {'Authorization':TOKEN} +PATTERN = r"!\[(\w*|\s|\w+( \w+)*)\]\(([^,:!]*|\/[^,:!]*\.\w+|\w*.\w*)\)" + + +def update_exporters(): + exporters = Exporter.objects.select_related('category', 'official').prefetch_related('release_set').order_by('id') + repo_urls = exporters.values_list('repository_url', flat=True) + + for repo_url in repo_urls: + repo_api_url = api_url+repo_url.replace('https://github.com/','') + readme_api_url = repo_api_url+'/readme' + release_api_url = repo_api_url+'/releases' + repository = requests.get(repo_api_url, headers=headers) + + if repository.status_code==200: + repo_data = repository.json() + readme = requests.get(readme_api_url, headers=headers) + readme_data = readme.json() + release = requests.get(release_api_url, headers=headers) + release_data = release.json()[0] if release.json() else [] + exporter = exporters.get(repository_url=repo_url) + new_readme = base64.b64decode(readme_data["content"]).decode('utf-8') + matches = re.findall(PATTERN, readme) + repo_name = repo_url.replace('https://github.com/','') + + for match in matches: + for element in match: + if '.' in element: + new_readme=new_readme.replace(element,f"https://raw.githubusercontent.com/{repo_name}/master/{element}") + + if str(exporter.modified_at) < repo_data['updated_at']: + Exporter.objects.filter(id=exporter.id).update( + stars = repo_data["stargazers_count"], + description = repo_data["description"], + readme = new_readme.encode('utf-8') + ) + logger.info(f'id: {exporter.id} name: {exporter.name} | SUCCESSFULLY_UPDATED_REPOSITORY_INFO | {datetime.datetime.now()}') + + if release_data and (str(exporter.release_set.last()) < release_data['created_at']): + Release.objects.create( + exporter_id=exporter.id, + date=release_data['created_at'], + version=release_data['tag_name'], + release_url=release_data['html_url'] + ) + logger.info(f'id: {exporter.id} name: {exporter.name} SUCCESSFULLY_UPDATED_RELEASE_INFO | {datetime.datetime.now()}') + + else: + logger.info(f'id: {exporter.id} name: {exporter.name} NO_CHANGES_MADE | {datetime.datetime.now()}') + else: + logger.error(f"id: {exporter.id} name: {exporter.name} ERROR_CHECK_REPOSITORY({repo_url}) | {datetime.datetime.now()}") + +def delete_old_job_executions(max_age=604_800): + """This job deletes all apscheduler job executions older than `max_age` from the database.""" + DjangoJobExecution.objects.delete_old_job_executions(max_age) + +class Command(BaseCommand): + help="Update exporters' GitHub repository information." + + def handle(self,*args, **options): + scheduler=BlockingScheduler(timezone=settings.TIME_ZONE) + scheduler.add_jobstore(DjangoJobStore(),'default') + + scheduler.add_job( + update_exporters, + trigger=CronTrigger(hour='23', minute='50'), + id='update_exporters', + max_instances=1, + replace_existing=True, + ) + logger.info("Added job 'update_exporters'.") + + scheduler.add_job( + delete_old_job_executions, + trigger=CronTrigger( + day_of_week="mon", hour="00", minute="00" + ), + id='delete_old_job_executions', + max_instances=1, + replace_existing=True, + ) + logger.info("Added weekly job 'delete_old_job_executions'.") + + try: + logger.info('Starting scheduler...') + scheduler.start() + except KeyboardInterrupt: + logger.info('Stopping scheduler...') + scheduler.shutdown() + logger.info('Scheduler shut down successfully.') \ No newline at end of file diff --git a/api/hub/migrations/0001_initial.py b/api/hub/migrations/0001_initial.py new file mode 100644 index 0000000..4864217 --- /dev/null +++ b/api/hub/migrations/0001_initial.py @@ -0,0 +1,88 @@ +# Generated by Django 3.1.3 on 2020-11-23 05:20 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='Category', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=200)), + ], + options={ + 'db_table': 'categories', + }, + ), + migrations.CreateModel( + name='Exporter', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=200)), + ('logo_url', models.URLField(max_length=2000)), + ('stars', models.IntegerField()), + ('repository_url', models.URLField(max_length=2000)), + ('description', models.TextField()), + ('readme_url', models.URLField(max_length=2000)), + ('readme', models.BinaryField()), + ('comment', models.TextField()), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('modified_at', models.DateTimeField(auto_now=True)), + ('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hub.category')), + ], + options={ + 'db_table': 'exporters', + }, + ), + migrations.CreateModel( + name='Official', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=200)), + ], + options={ + 'db_table': 'officials', + }, + ), + migrations.CreateModel( + name='Release', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('release_url', models.URLField(max_length=2000)), + ('version', models.CharField(max_length=200)), + ('commit_date', models.DateTimeField()), + ('exporter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hub.exporter')), + ], + options={ + 'db_table': 'releases', + }, + ), + migrations.CreateModel( + name='Log', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('description', models.TextField()), + ('comment', models.TextField()), + ('readme', models.BinaryField()), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('exporter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hub.exporter')), + ('official', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='hub.official')), + ], + options={ + 'db_table': 'logs', + }, + ), + migrations.AddField( + model_name='exporter', + name='official', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='hub.official'), + ), + ] diff --git a/api/hub/migrations/0002_auto_20201123_1423.py b/api/hub/migrations/0002_auto_20201123_1423.py new file mode 100644 index 0000000..f656450 --- /dev/null +++ b/api/hub/migrations/0002_auto_20201123_1423.py @@ -0,0 +1,18 @@ +# Generated by Django 3.1.3 on 2020-11-23 05:23 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('hub', '0001_initial'), + ] + + operations = [ + migrations.AlterField( + model_name='exporter', + name='comment', + field=models.TextField(null=True), + ), + ] diff --git a/api/hub/migrations/0003_auto_20201123_1524.py b/api/hub/migrations/0003_auto_20201123_1524.py new file mode 100644 index 0000000..682be10 --- /dev/null +++ b/api/hub/migrations/0003_auto_20201123_1524.py @@ -0,0 +1,18 @@ +# Generated by Django 3.1.3 on 2020-11-23 06:24 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('hub', '0002_auto_20201123_1423'), + ] + + operations = [ + migrations.RenameField( + model_name='release', + old_name='commit_date', + new_name='date', + ), + ] diff --git a/api/hub/migrations/0004_auto_20201123_1850.py b/api/hub/migrations/0004_auto_20201123_1850.py new file mode 100644 index 0000000..241bde8 --- /dev/null +++ b/api/hub/migrations/0004_auto_20201123_1850.py @@ -0,0 +1,18 @@ +# Generated by Django 3.1.3 on 2020-11-23 09:50 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('hub', '0003_auto_20201123_1524'), + ] + + operations = [ + migrations.AlterField( + model_name='log', + name='comment', + field=models.TextField(null=True), + ), + ] diff --git a/api/hub/migrations/__init__.py b/api/hub/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/hub/models.py b/api/hub/models.py new file mode 100644 index 0000000..b3c3451 --- /dev/null +++ b/api/hub/models.py @@ -0,0 +1,50 @@ +from django.db import models + +class Category(models.Model): + name = models.CharField(max_length=200) + + class Meta: + db_table='categories' + +class Official(models.Model): + name = models.CharField(max_length=200) + + class Meta: + db_table = 'officials' + +class Exporter(models.Model): + category = models.ForeignKey(Category, on_delete=models.CASCADE) + official = models.ForeignKey(Official, on_delete=models.SET_NULL, null=True) + name = models.CharField(max_length=200) + logo_url = models.URLField(max_length=2000) + stars = models.IntegerField() + repository_url = models.URLField(max_length=2000) + description = models.TextField() + readme_url = models.URLField(max_length=2000) + readme = models.BinaryField() + comment = models.TextField(null=True) + created_at = models.DateTimeField(auto_now_add=True) + modified_at = models.DateTimeField(auto_now=True) + + class Meta: + db_table = 'exporters' + +class Release(models.Model): + exporter = models.ForeignKey(Exporter, on_delete=models.CASCADE) + release_url = models.URLField(max_length=2000) + version = models.CharField(max_length=200) + date = models.DateTimeField() + + class Meta: + db_table = 'releases' + +class Log(models.Model): + exporter = models.ForeignKey(Exporter, on_delete=models.CASCADE) + official = models.ForeignKey(Official, on_delete=models.SET_NULL, null=True) + description = models.TextField() + comment = models.TextField(null=True) + readme = models.BinaryField() + created_at = models.DateTimeField(auto_now_add=True) + + class Meta: + db_table = 'logs' diff --git a/api/hub/tests.py b/api/hub/tests.py new file mode 100644 index 0000000..c9df50d --- /dev/null +++ b/api/hub/tests.py @@ -0,0 +1,271 @@ +import json + +from django.test import TestCase, Client + +from .models import Category, Official, Exporter, Release + +client=Client() + +class CategoryTest(TestCase): + def setUp(self): + Category.objects.create( + id=1, + name='category1' + ) + + def tearDown(self): + Category.objects.all().delete() + + def test_get_category_success(self): + response=client.get('/categories') + self.assertEqual(response.status_code,200) + self.assertEqual(response.json(), + { + 'categories':[{ + 'category_id':1, + 'category_name':'category1'}] + } + ) + + def test_get_category_not_found(self): + response=client.get('/category') + self.assertEqual(response.status_code,404) + +class MainPageTest(TestCase): + def setUp(self): + Category.objects.create( + id=1, + name='category' + ) + Official.objects.create( + id=1, + name='official' + ) + Exporter.objects.create( + id=1, + category_id=1, + official_id=1, + name='exporter', + logo_url='logo.com', + stars=1, + repository_url='repo.com', + description='hello world', + readme_url='readme.com', + readme=b'this is readme', + created_at='2020-11-27T15:00:00Z', + modified_at='2020-11-27T15:00:00Z' + ) + Release.objects.create( + id=1, + exporter_id=1, + release_url='release.com', + version='1.0', + date='2020-11-27T15:00:00Z' + ) + + def tearDown(self): + Category.objects.all().delete() + Official.objects.all().delete() + Exporter.objects.all().delete() + Release.objects.all().delete() + + def test_get_main_success(self): + response=client.get('/') + self.maxDiff = None + self.assertEqual(response.status_code,200) + self.assertEqual(response.json(), + {'exporters':[ + { + 'exporter_id':1, + 'name':'exporter', + 'logo_url':'logo.com', + 'category':'category', + 'official':'official', + 'stars':1, + 'repository_url':'repo.com', + 'description':'hello world', + 'recent_release':'2020-11-27T15:00:00Z', + 'release':[{ + 'release_version':'1.0', + 'release_date':'2020-11-27T15:00:00Z', + 'release_url':'release.com', + }], + } + ] + } + ) + def test_get_main_not_found(self): + response=client.get('/main') + self.assertEqual(response.status_code,404) + +class DetailPageTest(TestCase): + def setUp(self): + Category.objects.create( + id=1, + name='category' + ) + Official.objects.create( + id=1, + name='official' + ) + Exporter.objects.create( + id=1, + category_id=1, + official_id=1, + name='exporter', + logo_url='logo.com', + stars=1, + repository_url='repo.com', + description='hello world', + readme_url='readme.com', + readme=b'this is readme', + created_at='2020-11-27T15:00:00Z', + modified_at='2020-11-27T15:00:00Z' + ) + + def tearDown(self): + Category.objects.all().delete() + Official.objects.all().delete() + Exporter.objects.all().delete() + + def test_get_exporter_detail_success(self): + response=client.get('/exporters/1') + self.assertEqual(response.status_code,200) + self.assertEqual(response.json(), + { + 'data':'this is readme' + } + ) + + def test_get_exporter_detail_fail(self): + response=client.get('/exporters/99999') + self.assertEqual(response.status_code,400) + self.assertEqual(response.json(), + { + 'message':'NO_EXPORTER' + } + ) + + def test_get_exporter_detail_not_found(self): + response=client.get('/exporters?id=1') + self.assertEqual(response.status_code,404) + +class RepositoryTest(TestCase): + def setUp(self): + Category.objects.create( + id=1, + name='Database' + ) + Official.objects.create( + id=1, + name='official' + ) + Official.objects.create( + id=2, + name='unofficial' + ) + Exporter.objects.create( + id=1, + category_id=1, + official_id=1, + name='exporter', + logo_url='logo.com', + stars=1, + repository_url='repo.com', + description='hello world', + readme_url='readme.com', + readme=b'this is readme', + created_at='2020-11-27T15:00:00Z', + modified_at='2020-11-27T15:00:00Z' + ) + Release.objects.create( + id=1, + exporter_id=1, + release_url='release.com', + version='1.0', + date='2020-11-27T15:00:00Z' + ) + + def tearDown(self): + Category.objects.all().delete() + Official.objects.all().delete() + Exporter.objects.all().delete() + Release.objects.all().delete() + + def test_post_repository_success(self): + exporter={ + 'repo_url':'https://github.com/prometheus-community/json_exporter', + 'category':'Database' + } + response=client.post('/exporter', data=exporter, content_type='application/json') + self.maxDiff = None + self.assertEqual(response.status_code,201) + self.assertEqual(response.json(), + { + 'message':'SUCCESS' + } + ) + + def test_post_repository_fail(self): + exporter={ + 'repo_url':'https://github.com/prometheus-community/doesnotexist', + 'category':'Database' + } + response=client.post('/exporter', data=exporter, content_type='application/json') + self.maxDiff = None + self.assertEqual(response.status_code,400) + self.assertEqual(response.json(), + { + 'message':'WRONG_REPOSITORY' + } + ) + + def test_post_repository_existing(self): + exporter={ + 'repo_url':'repo.com', + 'category':'Database' + } + response=client.post('/exporter', data=exporter, content_type='application/json') + self.maxDiff = None + self.assertEqual(response.status_code,400) + self.assertEqual(response.json(), + { + 'message':'EXISTING_REPOSITORY' + } + ) + + def test_post_repository_key_error(self): + exporter={ + 'repository_url':'https://github.com/prometheus-community/doesnotexist', + 'category':'Database' + } + response=client.post('/exporter', data=exporter, content_type='application/json') + self.maxDiff = None + self.assertEqual(response.status_code,400) + self.assertEqual(response.json(), + { + 'message':'KEY_ERROR' + } + ) + + def test_delete_exporter_success(self): + response=client.delete('/exporter?exporter_id=1') + self.assertEqual(response.status_code,200) + + def test_delete_exporter_fail(self): + response=client.delete('/exporter?exporter_id=99999') + self.assertEqual(response.status_code,400) + self.assertEqual(response.json(), + { + 'message':'NO_EXPORTER' + } + ) + + def test_delete_exporter_key_error(self): + response=client.delete('/exporter?name=1') + self.assertEqual(response.status_code,400) + self.assertEqual(response.json(), + { + 'message':'KEY_ERROR' + } + ) diff --git a/api/hub/urls.py b/api/hub/urls.py new file mode 100644 index 0000000..e1ba8b7 --- /dev/null +++ b/api/hub/urls.py @@ -0,0 +1,13 @@ +from django.urls import path +from hub.views.admin.category import CategoryView +from hub.views.admin.repository import RepositoryView +from hub.views.public.exporters import MainView +from hub.views.public.exporter_detail import DetailView + + +urlpatterns = [ + path('', MainView.as_view()), + path('categories', CategoryView.as_view()), + path('exporter', RepositoryView.as_view()), + path('exporters/', DetailView.as_view()), +] diff --git a/api/hub/views/__init__.py b/api/hub/views/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/hub/views/admin/__init__.py b/api/hub/views/admin/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/hub/views/admin/category.py b/api/hub/views/admin/category.py new file mode 100644 index 0000000..c38cdbd --- /dev/null +++ b/api/hub/views/admin/category.py @@ -0,0 +1,17 @@ +import json + +from django.views import View +from django.http import JsonResponse + +from hub.models import Category + +class CategoryView(View): + def get(self, request): + categories=Category.objects.all() + data={"categories": + [{ + "category_id" : category.id, + "category_name": category.name + } for category in categories] + } + return JsonResponse(data, status=200) \ No newline at end of file diff --git a/api/hub/views/admin/repository.py b/api/hub/views/admin/repository.py new file mode 100644 index 0000000..c991d06 --- /dev/null +++ b/api/hub/views/admin/repository.py @@ -0,0 +1,146 @@ +import json +import requests +import base64 +import re + +from django.views import View +from django.http import JsonResponse + +from hub.models import Exporter, Release +from my_settings import TOKEN + +api_url = 'https://api.github.com/repos/' +headers = {'Authorization' : 'token ' + TOKEN} +PATTERN = r"!\[(\w*|\s|\w+( \w+)*)\]\(([^,:!]*|\/[^,:!]*\.\w+|\w*.\w*)\)" + +categories={ + "Database" : 1, + "Hardware" : 2, + "HTTP" : 3, + "Library" : 4, + "Logging" : 5, + "Messaging" : 6, + "Miscellaneous": 7, + "Monitoring" : 8, + "Software" : 9, + "Storage" : 10 + } + +class RepositoryView(View): + def get_repo(self, repo_url): + if 'https://github.com/' not in repo_url: + return False + repo_api_url = api_url+repo_url.replace('https://github.com/','') + readme_api_url = repo_api_url+'/readme' + release_api_url = repo_api_url+'/releases' + repo = requests.get(repo_api_url, headers=headers) + + if repo.status_code==200: + repo_data = repo.json() + readme = requests.get(readme_api_url, headers=headers) + readme_data = readme.json() + release = requests.get(release_api_url, headers=headers) + release_data = release.json() + + data={ + "name" : repo_data["name"], + "logo_url" : repo_data["owner"]["avatar_url"], + "stars" : repo_data["stargazers_count"], + "description" : repo_data["description"], + "readme_url" : repo_url+"/blob/master/README.md", + "readme" : readme_data["content"], + "release" : [{ + "release_version": release["tag_name"], + "release_date" : release["created_at"], + "release_url" : release["html_url"] + } for release in release_data] + } + return data + return False + + def post(self, request): + try: + data = json.loads(request.body) + repo_url = data["repo_url"] + category = data["category"] + + if Exporter.objects.filter(repository_url=repo_url).exists(): + return JsonResponse({'message':'EXISTING_REPOSITORY'}, status=400) + + if "prometheus/" in repo_url: + official = 1 + else: + official = 2 + + repo_info = self.get_repo(repo_url) + + if repo_info: + readme = base64.b64decode(repo_info["readme"]).decode('utf-8') + matches = re.findall(PATTERN, readme) + repo_name = repo_url.replace('https://github.com/','') + + for match in matches: + for element in match: + if '.' in element: + readme=readme.replace(element,f"https://raw.githubusercontent.com/{repo_name}/master/{element}") + + exporter=Exporter.objects.create( + category_id = categories[category], + official_id = official, + name = repo_info["name"], + logo_url = repo_info["logo_url"], + stars = repo_info["stars"], + repository_url = repo_url, + description = repo_info["description"], + readme_url = repo_info["readme_url"], + readme = readme.encode('utf-8'), + ) + + release=sorted(repo_info["release"], key=lambda x: x["release_date"]) + + for info in release: + Release( + exporter_id = exporter.id, + release_url = info["release_url"], + version = info["release_version"], + date = info["release_date"] + ).save() + + return JsonResponse({'message':'SUCCESS'}, status=201) + + return JsonResponse({'message':'WRONG_REPOSITORY'}, status=400) + + except KeyError: + return JsonResponse({'message':'KEY_ERROR'}, status=400) + + def delete(self, request): + try: + exporter_id = request.GET['exporter_id'] + exporter = Exporter.objects.get(id=exporter_id) + release = Release.objects.filter(exporter_id=exporter_id) + if release.exists(): + release.delete() + exporter.delete() + + return JsonResponse({'message':'SUCCESS'}, status=200) + + except Exporter.DoesNotExist: + return JsonResponse({'message':'NO_EXPORTER'}, status=400) + except KeyError: + return JsonResponse({'message':'KEY_ERROR'}, status=400) + + def patch(self, request): + try: + exporter_id = request.GET['exporter_id'] + data = json.loads(request.body) + category = data['category'] + exporter = Exporter.objects.get(id=exporter_id) + exporter.category = categories[category] + exporter.save() + + return JsonResponse({'message':'SUCCESS'}, status=200) + + except Exporter.DoesNotExist: + return JsonResponse({'message':'NO_EXPORTER'}, status=400) + except KeyError: + return JsonResponse({'message':'KEY_ERROR'}, status=400) diff --git a/api/hub/views/private/__init__.py b/api/hub/views/private/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/hub/views/public/__init__.py b/api/hub/views/public/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/hub/views/public/exporter_detail.py b/api/hub/views/public/exporter_detail.py new file mode 100644 index 0000000..1db3818 --- /dev/null +++ b/api/hub/views/public/exporter_detail.py @@ -0,0 +1,33 @@ +import json + +from django.views import View +from django.http import JsonResponse + +from hub.models import Exporter + +class DetailView(View): + def get(self, request, exporter_id): + try: + exporter=Exporter.objects.select_related('category','official').prefetch_related('release_set').get(id=exporter_id) + data={ + 'exporter_id' : exporter.id, + 'name' : exporter.name, + 'logo_url' : exporter.logo_url, + 'category' : exporter.category.name, + 'official' : exporter.official.name, + 'stars' : exporter.stars, + 'repository_url' : exporter.repository_url, + 'description' : exporter.description, + 'readme' : exporter.readme.decode('utf-8'), + 'recent_release' : exporter.release_set.last().date if exporter.release_set.all() else '1970-01-01', + 'release' : [{ + 'release_version': release.version, + 'release_date' : release.date, + 'release_url' : release.release_url + } for release in exporter.release_set.all()], + } + + return JsonResponse(data, status=200) + + except Exporter.DoesNotExist: + return JsonResponse({'message':'NO_EXPORTER'}, status=400) \ No newline at end of file diff --git a/api/hub/views/public/exporters.py b/api/hub/views/public/exporters.py new file mode 100644 index 0000000..30fb96c --- /dev/null +++ b/api/hub/views/public/exporters.py @@ -0,0 +1,35 @@ +import json + +from django.views import View +from django.http import JsonResponse + +from hub.models import Exporter + +class MainView(View): + def get(self, request): + try: + exporters=Exporter.objects.select_related('category', 'official').prefetch_related('release_set').order_by('id') + data={"exporters": + [ + { + "exporter_id" : exporter.id, + "name" : exporter.name, + "logo_url" : exporter.logo_url, + "category" : exporter.category.name, + "official" : exporter.official.name, + "stars" : exporter.stars, + "repository_url" : exporter.repository_url, + "description" : exporter.description, + "recent_release" : exporter.release_set.last().date if exporter.release_set.all() else '1970-01-01', + "release" : [{ + "release_version": release.version, + "release_date" : release.date, + "release_url" : release.release_url + } for release in exporter.release_set.all()], + } + for exporter in exporters] + } + + return JsonResponse(data, status=200) + except Exception as e: + return JsonResponse({'message':f"{e}"}, status=400) \ No newline at end of file diff --git a/api/manage.py b/api/manage.py new file mode 100755 index 0000000..dc3f71a --- /dev/null +++ b/api/manage.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python +"""Django's command-line utility for administrative tasks.""" +import os +import sys + + +def main(): + """Run administrative tasks.""" + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'exporterhub.settings') + try: + from django.core.management import execute_from_command_line + except ImportError as exc: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc + execute_from_command_line(sys.argv) + + +if __name__ == '__main__': + main() diff --git a/api/my_settings.py b/api/my_settings.py new file mode 100644 index 0000000..0fbb818 --- /dev/null +++ b/api/my_settings.py @@ -0,0 +1,16 @@ +import os + +SECRET_KEY = os.environ.get('SECRET_KEY', 'unsafe-secret-key') + +TOKEN= os.environ.get('APP_TOKEN', None) +DATABASES = { + 'default': { + 'ENGINE': os.environ.get('APP_DB_ENGINE', 'django.db.backends.sqlite3'), + 'NAME': os.environ.get('DB_NAME', 'db.sqlite'), + 'USER': os.environ.get('DB_USER', ''), + 'PASSWORD': os.environ.get('DB_PASSWORD', ''), + 'HOST': os.environ.get('DB_HOST', None), + 'PORT': os.environ.get('DB_PORT', None), + 'CONN_MAX_AGE': 600, + } +} diff --git a/api/my_settings_old.py b/api/my_settings_old.py new file mode 100644 index 0000000..6549202 --- /dev/null +++ b/api/my_settings_old.py @@ -0,0 +1,11 @@ +DATABASES = { + 'default' : { + 'ENGINE': 'django.db.backends.mysql', + 'NAME': "exporterhub", + 'USER': "root", + 'PASSWORD': 'secret', + 'HOST': "maria_db", + 'PORT': "3306", + } +} +TOKEN="" diff --git a/api/repo_list.txt b/api/repo_list.txt new file mode 100644 index 0000000..ba8c55b --- /dev/null +++ b/api/repo_list.txt @@ -0,0 +1,36 @@ + +"https://github.com/cloudflare/ebpf_exporter", +"https://github.com/digitalocean/ceph_exporter", +"https://github.com/fstab/grok_exporter", +"https://github.com/google/cadvisor", +"https://github.com/kbudde/rabbitmq_exporter", +"https://github.com/Kong/kong-plugin-prometheus", +"https://github.com/kubernetes/kube-state-metrics", +"https://github.com/ncabatoff/process-exporter", +"https://github.com/nginxinc/nginx-prometheus-exporter", +"https://github.com/NVIDIA/gpu-monitoring-tools", +"https://github.com/oliver006/redis_exporter", +"https://github.com/openstack-exporter/openstack-exporter", +"https://github.com/prometheus-community/json_exporter", +"https://github.com/prometheus-community/stackdriver_exporter", +"https://github.com/prometheus-community/windows_exporter", +"https://github.com/prometheus/blackbox_exporter", +"https://github.com/prometheus/cloudwatch_exporter", +"https://github.com/prometheus/collectd_exporter", +"https://github.com/prometheus/consul_exporter", +"https://github.com/prometheus/graphite_exporter", +"https://github.com/prometheus/haproxy_exporter", +"https://github.com/prometheus/influxdb_exporter", +"https://github.com/prometheus/jmx_exporter", +"https://github.com/prometheus/memcached_exporter", +"https://github.com/prometheus/mysqld_exporter", +"https://github.com/prometheus/node_exporter", +"https://github.com/prometheus/snmp_exporter", +"https://github.com/prometheus/statsd_exporter", +"https://github.com/RobustPerception/azure_metrics_exporter", +"https://github.com/wrouesnel/postgres_exporter", +"https://github.com/prometheus/client_golang", +"https://github.com/prometheus/client_java", +"https://github.com/prometheus/client_python", +"https://github.com/prometheus/client_ruby", +"https://github.com/prometheus/pushgateway", diff --git a/api/requirements.txt b/api/requirements.txt new file mode 100644 index 0000000..7bebca8 --- /dev/null +++ b/api/requirements.txt @@ -0,0 +1,15 @@ +APScheduler==3.6.3 +asgiref==3.3.1 +certifi==2020.6.20 +chardet==3.0.4 +Django==3.1.3 +django-apscheduler==0.5.1 +django-cors-headers==3.5.0 +idna==2.10 +mysqlclient==2.0.1 +pytz==2020.4 +requests==2.25.0 +six==1.15.0 +sqlparse==0.4.1 +tzlocal==2.1 +urllib3==1.26.2 diff --git a/docker-compose.yml b/docker-compose.yml index b9a8a4f..71b8594 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,7 +1,7 @@ version: '3.1' services: expoterhub: - image: nexclipper/exporterhub:release-v0.2.1 + image: nexclipper/exporterhub:0.2.3 ports: - "8080:3000" depends_on: @@ -21,8 +21,20 @@ services: # - ./db_data:/var/lib/mysql/data - ./db_data:/var/lib/mysql/ # - ./exporterhub_schema.sql:/data/application/init.sql + scheduler: + image: nexclipper/exporterhub-api:0.1 + environment: + APP_DB_ENGINE: django.db.backends.mysql + DB_NAME: exporterhub + DB_USER: root + DB_PASSWORD: secret + DB_HOST: maria_db + DB_PORT: "3306" + APP_STAT: "scheduler" + SECRET_KEY: "supersecretkey" + APP_TOKEN: "TOKEN must be HERE!!!" api: - image: nexclipper/exporterhub-be:0.1 + image: nexclipper/exporterhub-api:0.1 ports: - "8000:8000" environment: @@ -34,5 +46,6 @@ services: DB_PORT: "3306" SECRET_KEY: "supersecretkey" APP_TOKEN: "TOKEN must be HERE!!!" + APP_STAT: "server" depends_on: - maria_db diff --git a/src/Routes.js b/src/Routes.js index 7f36ba8..f1d4bb4 100644 --- a/src/Routes.js +++ b/src/Routes.js @@ -16,12 +16,12 @@ function Routes() { useEffect(() => { const fetchData = async () => { const result = await axios( - "/data/exporter_list.json" - // EXPORTERS_API + // "/data/exporter_list.json" + EXPORTERS_API ); const categoriesData = await axios( - "/data/categories.json" - //CATEGORIES_API + // "/data/categories.json" + CATEGORIES_API ); dispatch(loadData(result.data.exporters)); diff --git a/src/components/Content/ContentMenu.js b/src/components/Content/ContentMenu.js index f59f511..9edd3f5 100644 --- a/src/components/Content/ContentMenu.js +++ b/src/components/Content/ContentMenu.js @@ -3,6 +3,7 @@ import { useDispatch } from "react-redux"; import axios from "axios"; import styled from "styled-components"; import { sortByPopularity } from "../../store/actions/exporterActions"; +import { CATEGORIES_API } from "../../config"; const ContentMenu = ({ totalCount }) => { const [categories, setCategories] = useState([]); @@ -14,8 +15,8 @@ const ContentMenu = ({ totalCount }) => { useEffect(() => { axios .get( - "/data/categories.json" - //CATEGORIES_API + // "/data/categories.json" + CATEGORIES_API ) .then(res => { setCategories(res.data.categories); diff --git a/src/components/ContentDetail/ContentDetail.js b/src/components/ContentDetail/ContentDetail.js index cbc16c6..8e53b11 100644 --- a/src/components/ContentDetail/ContentDetail.js +++ b/src/components/ContentDetail/ContentDetail.js @@ -14,8 +14,8 @@ const ContentDetail = () => { useEffect(() => { axios .get( - `http://10.153.5.73:8000/exporters/${id}` - //`${EXPORTER_API}/${id}` + // `http://10.153.5.73:8000/exporters/${id}` + `${EXPORTER_API}/${id}` ) .then(res => { setExporterInfo(res.data); diff --git a/src/components/Modal/EditModal.js b/src/components/Modal/EditModal.js index 9ddedbb..481cda7 100644 --- a/src/components/Modal/EditModal.js +++ b/src/components/Modal/EditModal.js @@ -2,6 +2,7 @@ import { useState } from "react"; import { useSelector } from "react-redux"; import axios from "axios"; import styled, { css } from "styled-components"; +import { EXPORTER_ADMIN_API } from "../../config"; const EditModal = ({ cancleModal, exporterId }) => { const categories = useSelector(store => store.categoryReducer); @@ -10,23 +11,21 @@ const EditModal = ({ cancleModal, exporterId }) => { const deleteExporter = () => { axios .delete( - `http://10.153.5.73:8000/exporter?exporter_id=${exporterId}` - //`${EXPORTER_ADMIN_API}?exporter_id=${exporterId}` + // `http://10.153.5.73:8000/exporter?exporter_id=${exporterId}` + `${EXPORTER_ADMIN_API}?exporter_id=${exporterId}` ) .then(res => { console.log(res.data.message); - //성공을 알리는 모달 }) .catch(error => { console.log(error.response.data.message); - //실패를 알리는 모달 }); }; const editExporter = () => { axios .PATCH( - `http://10.153.5.73:8000/exporter?exporter_id=${exporterId}`, - //`${EXPORTER_ADMIN_API}?exporter_id=${exporterId}`, + // `http://10.153.5.73:8000/exporter?exporter_id=${exporterId}`, + `${EXPORTER_ADMIN_API}?exporter_id=${exporterId}`, { category: "category_name" } ) .then(res => { diff --git a/src/components/Modal/RegisterModal.js b/src/components/Modal/RegisterModal.js index 6d60a4e..1df2687 100644 --- a/src/components/Modal/RegisterModal.js +++ b/src/components/Modal/RegisterModal.js @@ -2,6 +2,7 @@ import { useState } from "react"; import { useSelector } from "react-redux"; import axios from "axios"; import styled from "styled-components"; +import { EXPORTER_ADMIN_API } from "../../config"; const RegisterModal = ({ cancleModal }) => { const categories = useSelector(store => store.categoryReducer); @@ -11,8 +12,8 @@ const RegisterModal = ({ cancleModal }) => { const registerExporter = () => { axios .post( - "http://10.153.1.241:8000/exporter", - //EXPORTER_ADMIN_API + // "http://10.153.1.241:8000/exporter", + EXPORTER_ADMIN_API, { repo_url: repoUrl, category: category diff --git a/src/config.js b/src/config.js index 3646f7d..059e146 100644 --- a/src/config.js +++ b/src/config.js @@ -1,4 +1,5 @@ -export const EXPORTERS_API = "http://10.153.4.34:8000"; -export const EXPORTER_API = "http://10.153.4.34:8000/exporters"; -export const CATEGORIES_API = "http://10.153.4.34:8000/categories"; -export const EXPORTER_ADMIN_API = "http://10.153.4.34:8000/exporter"; +export let API_SURVER = "api"; +export const EXPORTERS_API = `http://${API_SURVER}:8000`; +export const EXPORTER_API = `http://${API_SURVER}:8000/exporters`; +export const CATEGORIES_API = `http://${API_SURVER}:8000/categories`; +export const EXPORTER_ADMIN_API = `http://${API_SURVER}:8000/exporter`;