File indexing completed on 2024-05-05 15:53:11

0001 /* GCompris - lang_api.js
0002  *
0003  * SPDX-FileCopyrightText: 2014 Bruno Coudoin
0004  *
0005  * Authors:
0006  *   Bruno Coudoin (bruno.coudoin@gcompris.net)
0007  *
0008  *   SPDX-License-Identifier: GPL-3.0-or-later
0009  */
0010 
0011 .pragma library
0012 .import GCompris 1.0 as GCompris
0013 .import "qrc:/gcompris/src/core/core.js" as Core
0014 
0015 var contentText
0016 
0017 function validateDataset(levels)
0018 {
0019     return true;
0020 }
0021 
0022 function load(parser, baseUrl, datasetFilename, translationFilename) {
0023 
0024     var datasetUrl = baseUrl + "/" + datasetFilename;
0025     var dataset = parser.parseFromUrl(datasetUrl, validateDataset);
0026     if (dataset === null) {
0027         console.error("Lang: Invalid dataset, can't continue: "
0028                       + datasetUrl);
0029         return;
0030     }
0031     dataset['contentText'] = loadContent(parser,
0032                                          GCompris.ApplicationInfo.getLocaleFilePath(baseUrl + "/" + translationFilename))
0033 
0034     if(!dataset['contentText']) {
0035         return null
0036     }
0037     applyImgPrefix(dataset)
0038 
0039     return dataset
0040 }
0041 
0042 function loadContent(parser, datasetUrl) {
0043 
0044     var dataset = parser.parseFromUrl(datasetUrl, validateDataset);
0045     if (dataset === null) {
0046         console.error("Lang: Invalid dataset, can't continue: "
0047                       + datasetUrl);
0048         return;
0049     }
0050     return dataset
0051 }
0052 
0053 function getChapter(dataset, chapter) {
0054     return dataset[chapter]
0055 }
0056 
0057 // Return a datamodel for the chapter suitable for creating a chapter selector
0058 function getChapterModel(dataset) {
0059     var chapters = []
0060     for (var c = 0; c < dataset.length; c++) {
0061         chapters.push(
0062                     {'name': dataset[c].name,
0063                         'image': dataset[c].content[0].content[0].image,
0064                         'index': c
0065                     })
0066     }
0067     return chapters
0068 }
0069 
0070 function getLesson(dataset, chapter, lesson) {
0071     return chapter.content[lesson]
0072 }
0073 
0074 function getAllLessons(dataset) {
0075     var lessons = []
0076     for (var c in dataset) {
0077         for (var l in dataset[c].content) {
0078             var lesson = getLesson(dataset, dataset[c], l)
0079             lessons.push(lesson)
0080         }
0081     }
0082     return lessons
0083 }
0084 
0085 /* return a list of words in the lesson. Each words is formatted like:
0086  * 'description' => "splatter"
0087  * 'image' => "words/splatter.webp"
0088  * 'voice' => "voices-$CA/$LOCALE/words/splatter.$CA"
0089  * 'translatedTxt' => "splatter"
0090  */
0091 function getLessonWords(dataset, lesson) {
0092     var wordList = lesson.content
0093     // Fill up the lesson with the translated text
0094     var allWords = []
0095     for (var k in wordList) {
0096         var word = wordList[k]
0097         word['translatedTxt'] = dataset.contentText[
0098                     word.voice.substr(word.voice.lastIndexOf("/")+1).replace("$CA", "ogg")];
0099         if(word['translatedTxt'])
0100             allWords.push(word)
0101     }
0102     return allWords
0103 }
0104 
0105 /* Apply the imgPrefix of the chapter to the whole image set
0106  */
0107 function applyImgPrefix(dataset) {
0108     for (var c = 0; c < dataset.length; c++) {
0109         if(!dataset[c].imgPrefix)
0110             break
0111         for (var l in dataset[c].content) {
0112             for (var k in dataset[c].content[l].content) {
0113                 dataset[c].content[l].content[k].image = dataset[c].imgPrefix + dataset[c].content[l].content[k].image
0114             }
0115         }
0116     }
0117 }
0118 
0119 /**
0120  * Helper to load a dataset
0121  */
0122 function loadDataset(parser, resourceUrl, locale) {
0123     var wordset = GCompris.ApplicationSettings.useExternalWordset() ? "words.json" : "words_sample.json";
0124 
0125     var dataset = load(parser, resourceUrl, wordset,
0126                         "content-"+ locale +".json")
0127     var englishFallback = false
0128 
0129     // If dataset is empty, we try to load from short locale
0130     // and if not present again, we switch to default one
0131     var localeUnderscoreIndex = locale.indexOf('_')
0132     if(!dataset) {
0133         var localeShort;
0134         // We will first look again for locale xx (without _XX if exist)
0135         if(localeUnderscoreIndex > 0) {
0136             localeShort = locale.substring(0, localeUnderscoreIndex)
0137         } else {
0138             localeShort = locale;
0139         }
0140         dataset = load(parser, resourceUrl, wordset,
0141                             "content-"+localeShort+ ".json")
0142     }
0143 
0144     // If still dataset is empty then fallback to english
0145     if(!dataset) {
0146         // English fallback
0147         englishFallback = true
0148         dataset = load(parser, resourceUrl, wordset, "content-en.json")
0149     }
0150     return {"dataset": dataset, "englishFallback": englishFallback};
0151 }