Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
e8359fb
Initial plan
Copilot Dec 22, 2025
ef05438
Add local AI models infrastructure with download scripts and custom A…
Copilot Dec 22, 2025
ce5d596
Integrate local model and API key dialogs into AI chat interface
Copilot Dec 22, 2025
32de913
Add local model inference stub and test files
Copilot Dec 22, 2025
d00b480
Fix code review issues: import paths, compatibility, and deduplication
Copilot Dec 22, 2025
24d3910
Add comprehensive feature summary documentation
Copilot Dec 22, 2025
4cc0351
Implement direct API client for custom API keys - bypass GDevelop quotas
Copilot Dec 22, 2025
1e729f6
Integrate custom API keys into AI request pipeline
Copilot Dec 22, 2025
ed70c9b
Implement production-ready local inference with transformers.js and W…
Copilot Dec 22, 2025
efb9aae
Add Scratch project import support based on Leopard.js concepts
Copilot Dec 22, 2025
0eaccb5
Add VS Code Copilot Chat integration with agent system and context-aw…
Copilot Dec 22, 2025
bb2a26e
Merge pull request #1 from charlieduzstuf/copilot/setup-local-models-…
charlieduzstuf Dec 22, 2025
3d94b9e
Merge branch '4ian:master' into master
charlieduzstuf Dec 24, 2025
bfd8c6d
Initial plan
Copilot Dec 24, 2025
8be5b77
Fix Flow type checking errors - Phase 1
Copilot Dec 24, 2025
2aa96db
Fix remaining Flow type errors - Phase 2
Copilot Dec 24, 2025
7ff4661
Format code with Prettier
Copilot Dec 24, 2025
6c15bae
Make GitHub Actions workflows work in forks without secrets
Copilot Dec 24, 2025
52d125d
Merge pull request #2 from charlieduzstuf/copilot/fix-ci-job-errors
charlieduzstuf Dec 24, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .github/workflows/build-storybook.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ jobs:
cache-dependency-path: "newIDE/app/package-lock.json"

- name: Configure AWS Credentials
if: ${{ secrets.BUILD_STORYBOOK_AWS_ACCESS_KEY_ID != '' }}
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.BUILD_STORYBOOK_AWS_ACCESS_KEY_ID }}
Expand All @@ -48,14 +49,17 @@ jobs:

# Publish on S3 to allow quick testing of components.
- name: Publish Storybook to S3 bucket (specific commit)
if: ${{ secrets.BUILD_STORYBOOK_AWS_ACCESS_KEY_ID != '' }}
run: aws s3 sync ./build-storybook/ s3://gdevelop-storybook/$(git rev-parse --abbrev-ref HEAD)/commit/$(git rev-parse HEAD)/ --delete
working-directory: newIDE/app

- name: Publish Storybook to S3 bucket (latest)
if: ${{ secrets.BUILD_STORYBOOK_AWS_ACCESS_KEY_ID != '' }}
run: aws s3 sync ./build-storybook/ s3://gdevelop-storybook/$(git rev-parse --abbrev-ref HEAD)/latest/ --delete
working-directory: newIDE/app

- name: Log urls to the Storybook
if: ${{ secrets.BUILD_STORYBOOK_AWS_ACCESS_KEY_ID != '' }}
run: |
echo "Find the latest Storybook for this branch on https://gdevelop-storybook.s3.amazonaws.com/$(git rev-parse --abbrev-ref HEAD)/latest/index.html"
echo "Find the Storybook for this commit on https://gdevelop-storybook.s3.amazonaws.com/$(git rev-parse --abbrev-ref HEAD)/commit/$(git rev-parse HEAD)/index.html"
Expand Down
60 changes: 60 additions & 0 deletions .github/workflows/download-ai-models.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
# GitHub Action to download local AI models for GDevelop
# This is a manual workflow due to the large size of the models (~134GB total)

name: Download Local AI Models

on:
# Only allow manual triggering to avoid automatic downloads
workflow_dispatch:
inputs:
model:
description: 'Which model to download (0=Apriel, 1=GPT-OSS, 2=Qwen, all=All models)'
required: false
default: 'all'
type: choice
options:
- all
- '0'
- '1'
- '2'

jobs:
download-models:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3

- uses: actions/setup-python@v4
with:
python-version: '3.10'
cache: 'pip'

- name: Install Python dependencies
run: |
pip install huggingface_hub

- name: Download AI Models
run: |
cd newIDE/app/src/AiGeneration/Local
if [ "${{ github.event.inputs.model }}" = "all" ]; then
python3 download_models.py
else
python3 download_models.py ${{ github.event.inputs.model }}
fi

- name: Create model archive
run: |
cd newIDE/app/src/AiGeneration/Local
tar -czf ai-models.tar.gz apriel-1.5-15b-thinker gpt-oss-20b qwen3-vl-32b-instruct

- name: Upload models as artifact
uses: actions/upload-artifact@v3
with:
name: gdevelop-ai-models
path: newIDE/app/src/AiGeneration/Local/ai-models.tar.gz
retention-days: 7

- name: Log completion
run: |
echo "AI models have been downloaded and archived."
echo "Download the artifact from the Actions tab to use these models locally."
4 changes: 2 additions & 2 deletions .github/workflows/extract-translations.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,12 @@ jobs:

# Only upload on Crowdin for the master branch
- name: Install Crowdin CLI
if: github.ref == 'refs/heads/master'
if: github.ref == 'refs/heads/master' && secrets.CROWDIN_PROJECT_ID != ''
run: npm i -g @crowdin/cli

- name: Upload translations to Crowdin
run: crowdin upload sources
if: github.ref == 'refs/heads/master'
if: github.ref == 'refs/heads/master' && secrets.CROWDIN_PROJECT_ID != ''
env:
CROWDIN_PROJECT_ID: ${{ secrets.CROWDIN_PROJECT_ID }}
CROWDIN_PERSONAL_TOKEN: ${{ secrets.CROWDIN_PERSONAL_TOKEN }}
1 change: 1 addition & 0 deletions newIDE/app/.flowconfig
Original file line number Diff line number Diff line change
Expand Up @@ -27,3 +27,4 @@
[options]
module.ignore_non_literal_requires=true
sharedmemory.hash_table_pow=22
esproposal.optional_chaining=enable
38 changes: 37 additions & 1 deletion newIDE/app/src/AiGeneration/AiConfiguration.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,14 @@ import {
type AiConfigurationPreset,
type AiSettings,
} from '../Utils/GDevelopServices/Generation';
import { AVAILABLE_LOCAL_MODELS } from './Local/LocalModelManager';
import { shouldUseLocalModel, getActiveLocalModel } from './Local/LocalStorage';

export type AiConfigurationPresetWithAvailability = {|
...AiConfigurationPreset,
disabled: boolean,
enableWith: 'higher-tier-plan' | null,
isLocalModel?: boolean,
|};

export const getAiConfigurationPresetsWithAvailability = ({
Expand All @@ -31,7 +34,7 @@ export const getAiConfigurationPresetsWithAvailability = ({
}));
}

return aiSettings.aiRequest.presets.map(preset => {
const onlinePresets = aiSettings.aiRequest.presets.map(preset => {
const presetAvailability = limits.capabilities.ai.availablePresets.find(
presetAvailability =>
presetAvailability.id === preset.id &&
Expand All @@ -45,8 +48,26 @@ export const getAiConfigurationPresetsWithAvailability = ({
? presetAvailability.disabled
: preset.disabled,
enableWith: (presetAvailability && presetAvailability.enableWith) || null,
isLocalModel: false,
};
});

// Add local model presets
const localModelPresets: Array<AiConfigurationPresetWithAvailability> = AVAILABLE_LOCAL_MODELS.map(
model => ({
mode: 'chat',
id: `local-${model.id}`,
nameByLocale: {
en: `${model.name} (Local)`,
},
disabled: false,
isDefault: false,
enableWith: null,
isLocalModel: true,
})
);

return [...onlinePresets, ...localModelPresets];
};

export const getDefaultAiConfigurationPresetId = (
Expand All @@ -62,3 +83,18 @@ export const getDefaultAiConfigurationPresetId = (
'default'
);
};

/**
* Check if a preset is a local model
*/
export const isLocalModelPreset = (presetId: string): boolean => {
return presetId.startsWith('local-');
};

/**
* Check if the current configuration uses unlimited requests
* (local models or custom API keys)
*/
export const hasUnlimitedRequests = (presetId: string): boolean => {
return isLocalModelPreset(presetId);
};
96 changes: 96 additions & 0 deletions newIDE/app/src/AiGeneration/AiConfiguration.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
// @flow
/**
* Tests for Local AI Model Configuration
*/

import {
isLocalModelPreset,
hasUnlimitedRequests,
getAiConfigurationPresetsWithAvailability,
} from './AiConfiguration';

describe('AiConfiguration - Local Models', () => {
describe('isLocalModelPreset', () => {
it('should return true for local model preset IDs', () => {
expect(isLocalModelPreset('local-apriel-1.5-15b-thinker')).toBe(true);
expect(isLocalModelPreset('local-gpt-oss-20b')).toBe(true);
expect(isLocalModelPreset('local-qwen3-vl-32b-instruct')).toBe(true);
});

it('should return false for non-local preset IDs', () => {
expect(isLocalModelPreset('default')).toBe(false);
expect(isLocalModelPreset('gpt-4')).toBe(false);
expect(isLocalModelPreset('claude-3')).toBe(false);
});
});

describe('hasUnlimitedRequests', () => {
it('should return true for local model presets', () => {
expect(hasUnlimitedRequests('local-apriel-1.5-15b-thinker')).toBe(true);
expect(hasUnlimitedRequests('local-gpt-oss-20b')).toBe(true);
});

it('should return false for non-local presets', () => {
expect(hasUnlimitedRequests('default')).toBe(false);
expect(hasUnlimitedRequests('gpt-4')).toBe(false);
});
});

describe('getAiConfigurationPresetsWithAvailability', () => {
it('should include local model presets', () => {
const mockGetAiSettings = () => ({
aiRequest: {
presets: [
{
mode: 'chat',
id: 'default',
nameByLocale: { en: 'Default' },
disabled: false,
isDefault: true,
},
],
},
});

const mockLimits: any = {
capabilities: {
ai: {
availablePresets: [
{
mode: 'chat',
name: 'Default',
id: 'default',
disabled: false,
},
],
},
versionHistory: {},
},
quotas: {},
credits: {
userBalance: { amount: 0 },
prices: {},
purchasableQuantities: {},
},
message: undefined,
};

const presets = getAiConfigurationPresetsWithAvailability({
getAiSettings: mockGetAiSettings,
limits: mockLimits,
});

// Should have online presets + 3 local model presets
expect(presets.length).toBeGreaterThanOrEqual(4);

const localPresets = presets.filter(p => p.isLocalModel);
expect(localPresets.length).toBe(3);

// Check that local presets have correct properties
const aprielPreset = localPresets.find(p => p.id.includes('apriel'));
expect(aprielPreset).toBeDefined();
expect(aprielPreset?.disabled).toBe(false);
expect(aprielPreset?.isLocalModel).toBe(true);
});
});
});
Loading