From e0ac46bca5a19b2c557c9bc8533aadda86c9f617 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juanjo=20Guti=C3=A9rrez?= Date: Fri, 30 Jan 2026 10:17:24 +0100 Subject: [PATCH] first test --- .drone.yml | 26 ++++++++++++ Dockerfile | 10 +++++ README.md | 114 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 150 insertions(+) create mode 100644 .drone.yml create mode 100644 Dockerfile create mode 100644 README.md diff --git a/.drone.yml b/.drone.yml new file mode 100644 index 0000000..ca93fe3 --- /dev/null +++ b/.drone.yml @@ -0,0 +1,26 @@ +--- +kind: pipeline +type: docker +name: default + +steps: +- name: docker image build + image: plugins/docker + settings: + tags: + - latest + repo: docker.gutierrezdequevedo.com/ps/hermes +- name: notify matrix + image: spotlightkid/drone-matrixchat-notify + settings: + homeserver: 'https://grava.work' + roomid: '!wMVeFx6jwwF0TWA18h:grava.work' + userid: '@juanjo:grava.work' + deviceid: 'drone CI' + accesstoken: G66FRa3fG7qNfM4KKoW5wx6TWophvvtF + markdown: 'yes' + template: | + `${DRONE_REPO}` build #${DRONE_BUILD_NUMBER} status: **${DRONE_BUILD_STATUS}** + + [${DRONE_BUILD_LINK}] + diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..d7c2fb1 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,10 @@ +FROM python:3.14-slim + +WORKDIR /app + +COPY . . +RUN pip install --no-cache-dir -r requirements.txt + +EXPOSE 5000 + +CMD ["python", "app.py"] diff --git a/README.md b/README.md new file mode 100644 index 0000000..3c5b0b2 --- /dev/null +++ b/README.md @@ -0,0 +1,114 @@ +# OpenAI Models Viewer + +A web-based interface for browsing and interacting with OpenAI-compatible API endpoints. This application allows you to manage multiple server connections, view available models, and chat with AI models directly from your browser. + +## Features + +- **Multi-Server Management**: Add and manage multiple OpenAI-compatible endpoints +- **Model Discovery**: Browse and view all available models from configured servers +- **Interactive Chat**: Chat directly with AI models through a clean web interface +- **Local Storage**: Securely stores server configurations in browser localStorage +- **Responsive Design**: Works on desktop and mobile devices + +## Project Structure + +``` +├── app.py # Flask backend server +├── requirements.txt # Python dependencies +├── static/ +│ ├── index.html # Main HTML interface +│ ├── script.js # Client-side JavaScript logic +│ └── style.css # Styling and layout +``` + +## Getting Started + +### Prerequisites + +- Python 3.7+ +- pip (Python package manager) + +### Installation + +1. Clone the repository: +```bash +git clone +cd openai-models-viewer +``` + +2. Install Python dependencies: +```bash +pip install -r requirements.txt +``` + +3. Run the application: +```bash +python app.py +``` + +The application will start on `http://localhost:5000` by default. + +### Usage + +1. **Add a Server**: + - Click the gear icon next to the server selector + - Enter a server name (e.g., "OpenAI Production") + - Enter the API endpoint URL (e.g., `https://api.openai.com`) + - Enter your API key + - Click "Add Server" + +2. **View Models**: + - Select a configured server from the dropdown + - Models will automatically load and display + +3. **Chat with Models**: + - Click on any model to open the chat interface + - Type your message and press Enter or click Send + - View the AI's response in real-time + +## Development + +### Running with Custom Port + +```bash +python app.py 8080 +``` + +### Docker Support + +Build and run with Docker: + +```bash +docker build -t openai-models-viewer . +docker run -p 5000:5000 openai-models-viewer +``` + +## API Endpoints + +The application connects to standard OpenAI-compatible endpoints: +- `/models` - List available models +- `/v1/chat/completions` - Chat completion endpoint + +## Security + +- API keys are stored locally in browser localStorage +- All communication happens directly between your browser and the API endpoints +- No server-side storage of sensitive information + +## Contributing + +1. Fork the repository +2. Create your feature branch +3. Commit your changes +4. Push to the branch +5. Open a pull request + +## License + +This project is licensed under the MIT License - see the LICENSE file for details. + +## Acknowledgments + +- Built with Flask (Python web framework) +- Uses modern JavaScript for client-side functionality +- Designed with responsive CSS for cross-device compatibility \ No newline at end of file