diff --git a/.create-adapter.json b/.create-adapter.json new file mode 100644 index 0000000..dcf061b --- /dev/null +++ b/.create-adapter.json @@ -0,0 +1,39 @@ +{ + "cli": true, + "target": "directory", + "adapterName": "pushover", + "title": "Pushover", + "description": "Sends pushover notifications", + "expert": "yes", + "features": [ + "adapter" + ], + "adminFeatures": [], + "type": "protocols", + "startMode": "daemon", + "connectionType": "cloud", + "dataSource": "poll", + "connectionIndicator": "yes", + "language": "JavaScript", + "adminReact": "no", + "tools": [ + "ESLint", + "type checking" + ], + "i18n": "JSON", + "releaseScript": "yes", + "devServer": "yes", + "devServerPort": 8081, + "indentation": "Space (4)", + "quotes": "single", + "es6class": "yes", + "authorName": "bluefox", + "authorGithub": "GermanBluefox", + "authorEmail": "dogafox@gmail.com", + "gitRemoteProtocol": "SSH", + "gitCommit": "yes", + "defaultBranch": "master", + "license": "MIT License", + "dependabot": "yes", + "creatorVersion": "2.1.1" +} \ No newline at end of file diff --git a/.github/workflows/dependabot-automerge.yml b/.github/workflows/dependabot-automerge.yml index 7ebd310..c9b5329 100644 --- a/.github/workflows/dependabot-automerge.yml +++ b/.github/workflows/dependabot-automerge.yml @@ -4,19 +4,24 @@ name: Auto-Merge Dependabot PRs on: + # WARNING: This needs to be run in the PR base, DO NOT build untrusted code in this action + # details under https://github.blog/changelog/2021-02-19-github-actions-workflows-triggered-by-dependabot-prs-will-run-with-read-only-permissions/ pull_request_target: jobs: auto-merge: + if: github.actor == 'dependabot[bot]' runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v2 - name: Check if PR should be auto-merged uses: ahmadnassri/action-dependabot-auto-merge@v2 with: - # This must be a personal access token with push access + # In order to use this, you need to go to https://github.com/settings/tokens and + # create a Personal Access Token with the permission "public_repo". + # Enter this token in your repository settings under "Secrets" and name it AUTO_MERGE_TOKEN github-token: ${{ secrets.AUTO_MERGE_TOKEN }} # By default, squash and merge, so Github chooses nice commit messages - command: squash and merge \ No newline at end of file + command: squash and merge diff --git a/.npmignore b/.npmignore deleted file mode 100644 index a4979d5..0000000 --- a/.npmignore +++ /dev/null @@ -1,12 +0,0 @@ -/**/* -/gulpfile.js -gulpfile.js -!/admin/**/* -!/admin/* -!/img/* -!/img/**/* -!/io-package.json -!/package.json -!/LICENSE -!/main.js -!/README.md diff --git a/.releaseconfig.json b/.releaseconfig.json new file mode 100644 index 0000000..27072bb --- /dev/null +++ b/.releaseconfig.json @@ -0,0 +1,3 @@ +{ + "plugins": ["iobroker", "license"] +} \ No newline at end of file diff --git a/admin/tsconfig.json b/admin/tsconfig.json new file mode 100644 index 0000000..31f5ea0 --- /dev/null +++ b/admin/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../tsconfig.json", + "include": [ + "./admin.d.ts", + "./**/*.js", + // include the adapter-config definition if it exists + "../src/lib/adapter-config.d.ts", + ] +} diff --git a/gulpfile.js b/gulpfile.js deleted file mode 100644 index 9ffd4ec..0000000 --- a/gulpfile.js +++ /dev/null @@ -1,500 +0,0 @@ -/*! - * ioBroker gulpfile - * Date: 2019-01-28 - */ -'use strict'; - -const gulp = require('gulp'); -const fs = require('fs'); -const pkg = require('./package.json'); -const ioPackage = require('./io-package.json'); -const version = (pkg && pkg.version) ? pkg.version : ioPackage.common.version; -const fileName = 'words.js'; -const EMPTY = ''; -const translate = require('./lib/tools.js').translateText; -const languages = { - en: {}, - de: {}, - ru: {}, - pt: {}, - nl: {}, - fr: {}, - it: {}, - es: {}, - pl: {}, - 'zh-cn': {} -}; - -function lang2data(lang, isFlat) { - let str = isFlat ? '' : '{\n'; - let count = 0; - for (const w in lang) { - if (lang.hasOwnProperty(w)) { - count++; - if (isFlat) { - str += (lang[w] === '' ? (isFlat[w] || w) : lang[w]) + '\n'; - } else { - const key = ' "' + w.replace(/'/g, '\\"') + '": '; - str += key + '"' + lang[w].replace(/"/g, '\\"') + '",\n'; - } - } - } - if (!count) - return isFlat ? '' : '{\n}'; - if (isFlat) { - return str; - } else { - return str.substring(0, str.length - 2) + '\n}'; - } -} - -function readWordJs(src) { - try { - let words; - if (fs.existsSync(src + 'js/' + fileName)) { - words = fs.readFileSync(src + 'js/' + fileName).toString(); - } else { - words = fs.readFileSync(src + fileName).toString(); - } - words = words.substring(words.indexOf('{'), words.length); - words = words.substring(0, words.lastIndexOf(';')); - - const resultFunc = new Function('return ' + words + ';'); - - return resultFunc(); - } catch (e) { - return null; - } -} - -function padRight(text, totalLength) { - return text + (text.length < totalLength ? new Array(totalLength - text.length).join(' ') : ''); -} - -function writeWordJs(data, src) { - let text = ''; - text += '/*global systemDictionary:true */\n'; - text += '\'use strict\';\n\n'; - text += 'systemDictionary = {\n'; - for (const word in data) { - if (data.hasOwnProperty(word)) { - text += ' ' + padRight('"' + word.replace(/"/g, '\\"') + '": {', 50); - let line = ''; - for (const lang in data[word]) { - if (data[word].hasOwnProperty(lang)) { - line += '"' + lang + '": "' + padRight(data[word][lang].replace(/"/g, '\\"') + '",', 50) + ' '; - } - } - if (line) { - line = line.trim(); - line = line.substring(0, line.length - 1); - } - text += line + '},\n'; - } - } - text += '};'; - if (fs.existsSync(src + 'js/' + fileName)) { - fs.writeFileSync(src + 'js/' + fileName, text); - } else { - fs.writeFileSync(src + '' + fileName, text); - } -} - -function words2languages(src) { - const langs = Object.assign({}, languages); - const data = readWordJs(src); - if (data) { - for (const word in data) { - if (data.hasOwnProperty(word)) { - for (const lang in data[word]) { - if (data[word].hasOwnProperty(lang)) { - langs[lang][word] = data[word][lang]; - // pre-fill all other languages - for (const j in langs) { - if (langs.hasOwnProperty(j)) { - langs[j][word] = langs[j][word] || EMPTY; - } - } - } - } - } - } - if (!fs.existsSync(src + 'i18n/')) { - fs.mkdirSync(src + 'i18n/'); - } - for (const l in langs) { - if (!langs.hasOwnProperty(l)) - continue; - const keys = Object.keys(langs[l]); - keys.sort(); - const obj = {}; - for (let k = 0; k < keys.length; k++) { - obj[keys[k]] = langs[l][keys[k]]; - } - if (!fs.existsSync(src + 'i18n/' + l)) { - fs.mkdirSync(src + 'i18n/' + l); - } - - fs.writeFileSync(src + 'i18n/' + l + '/translations.json', lang2data(obj)); - } - } else { - console.error('Cannot read or parse ' + fileName); - } -} - -function words2languagesFlat(src) { - const langs = Object.assign({}, languages); - const data = readWordJs(src); - if (data) { - for (const word in data) { - if (data.hasOwnProperty(word)) { - for (const lang in data[word]) { - if (data[word].hasOwnProperty(lang)) { - langs[lang][word] = data[word][lang]; - // pre-fill all other languages - for (const j in langs) { - if (langs.hasOwnProperty(j)) { - langs[j][word] = langs[j][word] || EMPTY; - } - } - } - } - } - } - const keys = Object.keys(langs.en); - keys.sort(); - for (const l in langs) { - if (!langs.hasOwnProperty(l)) - continue; - const obj = {}; - for (let k = 0; k < keys.length; k++) { - obj[keys[k]] = langs[l][keys[k]]; - } - langs[l] = obj; - } - if (!fs.existsSync(src + 'i18n/')) { - fs.mkdirSync(src + 'i18n/'); - } - for (const ll in langs) { - if (!langs.hasOwnProperty(ll)) - continue; - if (!fs.existsSync(src + 'i18n/' + ll)) { - fs.mkdirSync(src + 'i18n/' + ll); - } - - fs.writeFileSync(src + 'i18n/' + ll + '/flat.txt', lang2data(langs[ll], langs.en)); - } - fs.writeFileSync(src + 'i18n/flat.txt', keys.join('\n')); - } else { - console.error('Cannot read or parse ' + fileName); - } -} - -function languagesFlat2words(src) { - const dirs = fs.readdirSync(src + 'i18n/'); - const langs = {}; - const bigOne = {}; - const order = Object.keys(languages); - dirs.sort(function (a, b) { - const posA = order.indexOf(a); - const posB = order.indexOf(b); - if (posA === -1 && posB === -1) { - if (a > b) - return 1; - if (a < b) - return -1; - return 0; - } else if (posA === -1) { - return -1; - } else if (posB === -1) { - return 1; - } else { - if (posA > posB) - return 1; - if (posA < posB) - return -1; - return 0; - } - }); - const keys = fs.readFileSync(src + 'i18n/flat.txt').toString().split('\n'); - - for (let l = 0; l < dirs.length; l++) { - if (dirs[l] === 'flat.txt') - continue; - const lang = dirs[l]; - const values = fs.readFileSync(src + 'i18n/' + lang + '/flat.txt').toString().split('\n'); - langs[lang] = {}; - keys.forEach(function (word, i) { - langs[lang][word] = values[i]; - }); - - const words = langs[lang]; - for (const word in words) { - if (words.hasOwnProperty(word)) { - bigOne[word] = bigOne[word] || {}; - if (words[word] !== EMPTY) { - bigOne[word][lang] = words[word]; - } - } - } - } - // read actual words.js - const aWords = readWordJs(); - - const temporaryIgnore = ['flat.txt']; - if (aWords) { - // Merge words together - for (const w in aWords) { - if (aWords.hasOwnProperty(w)) { - if (!bigOne[w]) { - console.warn('Take from actual words.js: ' + w); - bigOne[w] = aWords[w]; - } - dirs.forEach(function (lang) { - if (temporaryIgnore.indexOf(lang) !== -1) - return; - if (!bigOne[w][lang]) { - console.warn('Missing "' + lang + '": ' + w); - } - }); - } - } - - } - - writeWordJs(bigOne, src); -} - -function languages2words(src) { - const dirs = fs.readdirSync(src + 'i18n/'); - const langs = {}; - const bigOne = {}; - const order = Object.keys(languages); - dirs.sort(function (a, b) { - const posA = order.indexOf(a); - const posB = order.indexOf(b); - if (posA === -1 && posB === -1) { - if (a > b) - return 1; - if (a < b) - return -1; - return 0; - } else if (posA === -1) { - return -1; - } else if (posB === -1) { - return 1; - } else { - if (posA > posB) - return 1; - if (posA < posB) - return -1; - return 0; - } - }); - for (let l = 0; l < dirs.length; l++) { - if (dirs[l] === 'flat.txt') - continue; - const lang = dirs[l]; - langs[lang] = fs.readFileSync(src + 'i18n/' + lang + '/translations.json').toString(); - langs[lang] = JSON.parse(langs[lang]); - const words = langs[lang]; - for (const word in words) { - if (words.hasOwnProperty(word)) { - bigOne[word] = bigOne[word] || {}; - if (words[word] !== EMPTY) { - bigOne[word][lang] = words[word]; - } - } - } - } - // read actual words.js - const aWords = readWordJs(); - - const temporaryIgnore = ['flat.txt']; - if (aWords) { - // Merge words together - for (const w in aWords) { - if (aWords.hasOwnProperty(w)) { - if (!bigOne[w]) { - console.warn('Take from actual words.js: ' + w); - bigOne[w] = aWords[w]; - } - dirs.forEach(function (lang) { - if (temporaryIgnore.indexOf(lang) !== -1) - return; - if (!bigOne[w][lang]) { - console.warn('Missing "' + lang + '": ' + w); - } - }); - } - } - - } - - writeWordJs(bigOne, src); -} - -async function translateNotExisting(obj, baseText, yandex) { - let t = obj['en']; - if (!t) { - t = baseText; - } - - if (t) { - for (let l in languages) { - if (!obj[l]) { - const time = new Date().getTime(); - obj[l] = await translate(t, l, yandex); - console.log('en -> ' + l + ' ' + (new Date().getTime() - time) + ' ms'); - } - } - } -} - -//TASKS - -gulp.task('adminWords2languages', function (done) { - words2languages('./admin/'); - done(); -}); - -gulp.task('adminWords2languagesFlat', function (done) { - words2languagesFlat('./admin/'); - done(); -}); - -gulp.task('adminLanguagesFlat2words', function (done) { - languagesFlat2words('./admin/'); - done(); -}); - -gulp.task('adminLanguages2words', function (done) { - languages2words('./admin/'); - done(); -}); - -gulp.task('updatePackages', function (done) { - ioPackage.common.version = pkg.version; - ioPackage.common.news = ioPackage.common.news || {}; - if (!ioPackage.common.news[pkg.version]) { - const news = ioPackage.common.news; - const newNews = {}; - - newNews[pkg.version] = { - en: 'news', - de: 'neues', - ru: 'новое', - pt: 'novidades', - nl: 'nieuws', - fr: 'nouvelles', - it: 'notizie', - es: 'noticias', - pl: 'nowości', - 'zh-cn': '新' - }; - ioPackage.common.news = Object.assign(newNews, news); - } - fs.writeFileSync('io-package.json', JSON.stringify(ioPackage, null, 4)); - done(); -}); - -gulp.task('updateReadme', function (done) { - const readme = fs.readFileSync('README.md').toString(); - const pos = readme.indexOf('## Changelog\n'); - if (pos !== -1) { - const readmeStart = readme.substring(0, pos + '## Changelog\n'.length); - const readmeEnd = readme.substring(pos + '## Changelog\n'.length); - - if (readme.indexOf(version) === -1) { - const timestamp = new Date(); - const date = timestamp.getFullYear() + '-' + - ('0' + (timestamp.getMonth() + 1).toString(10)).slice(-2) + '-' + - ('0' + (timestamp.getDate()).toString(10)).slice(-2); - - let news = ''; - if (ioPackage.common.news && ioPackage.common.news[pkg.version]) { - news += '* ' + ioPackage.common.news[pkg.version].en; - } - - fs.writeFileSync('README.md', readmeStart + '### ' + version + ' (' + date + ')\n' + (news ? news + '\n\n' : '\n') + readmeEnd); - } - } - done(); -}); - -gulp.task('translate', async function (done) { - let yandex; - const i = process.argv.indexOf('--yandex'); - if (i > -1) { - yandex = process.argv[i + 1]; - } - - if (ioPackage && ioPackage.common) { - if (ioPackage.common.news) { - console.log('Translate News'); - for (let k in ioPackage.common.news) { - console.log('News: ' + k); - let nw = ioPackage.common.news[k]; - await translateNotExisting(nw, null, yandex); - } - } - if (ioPackage.common.titleLang) { - console.log('Translate Title'); - await translateNotExisting(ioPackage.common.titleLang, ioPackage.common.title, yandex); - } - if (ioPackage.common.desc) { - console.log('Translate Description'); - await translateNotExisting(ioPackage.common.desc, null, yandex); - } - - if (fs.existsSync('./admin/i18n/en/translations.json')) { - let enTranslations = require('./admin/i18n/en/translations.json'); - for (let l in languages) { - console.log('Translate Text: ' + l); - let existing = {}; - if (fs.existsSync('./admin/i18n/' + l + '/translations.json')) { - existing = require('./admin/i18n/' + l + '/translations.json'); - } - for (let t in enTranslations) { - if (!existing[t]) { - existing[t] = await translate(enTranslations[t], l, yandex); - } - } - if (!fs.existsSync('./admin/i18n/' + l + '/')) { - fs.mkdirSync('./admin/i18n/' + l + '/'); - } - fs.writeFileSync('./admin/i18n/' + l + '/translations.json', JSON.stringify(existing, null, 4)); - } - } - - } - fs.writeFileSync('io-package.json', JSON.stringify(ioPackage, null, 4)); -}); - -gulp.task('translateAndUpdateWordsJS', gulp.series('translate', 'adminLanguages2words', 'adminWords2languages')); - -gulp.task('copy', done => { - const words = fs.readFileSync(__dirname + '/admin/words.js').toString('utf8'); - const translation = words.substring(words.indexOf('{'), words.lastIndexOf(';')); - try { - JSON.parse(translation); // check that the words can be parsed - } catch (e) { - const lines = translation.split(/\r\n|\n\r|\n/); - throw new Error('Cannot parse admin/words.js. Please fix. Probably it is a comma a the very last line: ...' + lines[lines.length - 2].substring(lines[lines.length - 2].length - 20)); - } - - fs.writeFileSync(__dirname + '/widgets/info/js/words.js', words); - done(); -}); - -gulp.task('rename', done => { - const dirs = fs.readdirSync(__dirname + '/admin/i18n'); - dirs.forEach(dir => { - if (fs.existsSync(__dirname + '/admin/i18n/' + dir + '/translations.json')) { - fs.writeFileSync(__dirname + '/admin/i18n/' + dir + '.json', fs.readFileSync(__dirname + '/admin/i18n/' + dir + '/translations.json')); - fs.unlinkSync(__dirname + '/admin/i18n/' + dir + '/translations.json'); - } - }); -}) - -gulp.task('default', gulp.series('copy')); diff --git a/io-package.json b/io-package.json index 519fab0..7e08ae5 100644 --- a/io-package.json +++ b/io-package.json @@ -210,16 +210,16 @@ "zh-cn": "推倒" }, "desc": { - "en": "This adapter allows to send pushover notifications from ioBroker", - "de": "Dieser Adapter ermöglicht das Senden von Pushover-Benachrichtigungen von ioBroker", - "ru": "Этот адаптер позволяет отправлять pushover-уведомления от ioBroker", - "pt": "Este adaptador permite enviar notificações pushover do ioBroker", - "nl": "Met deze adapter kunt u pushover-meldingen van ioBroker verzenden", - "fr": "Cet adaptateur permet d'envoyer des notifications de transfert depuis ioBroker", - "it": "Questo adattatore consente di inviare notifiche pushover da ioBroker", - "es": "Este adaptador permite enviar notificaciones fáciles de ioBroker", - "pl": "Ten adapter umożliwia wysyłanie powiadomień push z programu ioBroker", - "zh-cn": "此适配器允许从 ioBroker 发送推送通知" + "en": "Sends pushover notifications", + "de": "Sendet Pushover-Benachrichtigungen", + "ru": "Отправляет уведомления pushover", + "pt": "Envia notificações pushover", + "nl": "Stuurt een persverklaring", + "fr": "Envoyez des notifications pushover", + "it": "Invia notifiche pushover", + "es": "Enviar notificaciones de empuje", + "pl": "Powiadomienia", + "zh-cn": "发布通知" }, "authors": [ "bluefox " diff --git a/lib/adapter-config.d.ts b/lib/adapter-config.d.ts new file mode 100644 index 0000000..e0fbfa9 --- /dev/null +++ b/lib/adapter-config.d.ts @@ -0,0 +1,19 @@ +// This file extends the AdapterConfig type from "@types/iobroker" +// using the actual properties present in io-package.json +// in order to provide typings for adapter.config properties + +import { native } from '../io-package.json'; + +type _AdapterConfig = typeof native; + +// Augment the globally declared type ioBroker.AdapterConfig +declare global { + namespace ioBroker { + interface AdapterConfig extends _AdapterConfig { + // Do not enter anything here! + } + } +} + +// this is required so the above AdapterConfig is found by TypeScript / type checking +export {}; \ No newline at end of file diff --git a/lib/tools.js b/lib/tools.js deleted file mode 100644 index e6a208a..0000000 --- a/lib/tools.js +++ /dev/null @@ -1,80 +0,0 @@ -const axios = require('axios'); - -/** - * Tests whether the given variable is a real object and not an Array - * @param {any} it The variable to test - * @returns {it is Record} - */ -function isObject(it) { - // This is necessary because: - // typeof null === 'object' - // typeof [] === 'object' - // [] instanceof Object === true - return Object.prototype.toString.call(it) === '[object Object]'; -} - -/** - * Tests whether the given variable is really an Array - * @param {any} it The variable to test - * @returns {it is any[]} - */ -function isArray(it) { - if (Array.isArray != null) { - return Array.isArray(it); - } - return Object.prototype.toString.call(it) === '[object Array]'; -} - -/** - * Translates text using the Google Translate API - * @param {string} text The text to translate - * @param {string} targetLang The target languate - * @param {string} yandex api key - * @returns {Promise} - */ -async function translateText(text, targetLang, yandex) { - if (targetLang === 'en') { - return text; - } - if (yandex) { - return await translateYandex(text, targetLang, yandex); - } else { - return await translateGoogle(text, targetLang); - } -} - -async function translateYandex(text, targetLang, yandex) { - if (targetLang === 'zh-cn') { - targetLang = 'zh'; - } - try { - const url = `https://translate.yandex.net/api/v1.5/tr.json/translate?key=${yandex}&text=${encodeURIComponent(text)}&lang=en-${targetLang}`; - const response = await axios({url, timeout: 15000}); - if (response.data && response.data['text']) { - return response.data['text'][0]; - } - throw new Error('Invalid response for translate request'); - } catch (e) { - throw new Error(`Could not translate to "${targetLang}": ${e}`); - } -} - -async function translateGoogle(text, targetLang) { - try { - const url = `http://translate.googleapis.com/translate_a/single?client=gtx&sl=en&tl=${targetLang}&dt=t&q=${encodeURIComponent(text)}&ie=UTF-8&oe=UTF-8`; - const response = await axios({url, timeout: 15000}); - if (isArray(response.data)) { - // we got a valid response - return response.data[0][0][0]; - } - throw new Error('Invalid response for translate request'); - } catch (e) { - throw new Error(`Could not translate to "${targetLang}": ${e}`); - } -} - -module.exports = { - isArray, - isObject, - translateText -}; diff --git a/package.json b/package.json index 920e6ee..6d02a91 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "iobroker.pushover", - "description": "This adapter allows to send pushover notifications from ioBroker", + "description": "ioBroker Adapter to send pushover notifications", "version": "2.1.0", "author": "bluefox ", "contributors": [ @@ -23,21 +23,50 @@ "axios": "^0.27.2" }, "devDependencies": { - "@alcalzone/release-script": "^2.2.2", - "gulp": "^4.0.2", - "mocha": "^9.2.2", + "@alcalzone/release-script": "^3.5.9", + "@alcalzone/release-script-plugin-iobroker": "^3.5.9", + "@alcalzone/release-script-plugin-license": "^3.5.9", + "@iobroker/adapter-dev": "^1.0.0", + "@iobroker/testing": "^3.0.2", + "@types/chai": "^4.3.1", + "@types/chai-as-promised": "^7.1.5", + "@types/mocha": "^9.1.1", + "@types/node": "^17.0.41", + "@types/proxyquire": "^1.3.28", + "@types/sinon": "^10.0.11", + "@types/sinon-chai": "^3.2.8", "chai": "^4.3.6", - "@iobroker/testing": "^3.0.2" + "chai-as-promised": "^7.1.1", + "eslint": "^8.17.0", + "mocha": "^10.0.0", + "proxyquire": "^2.1.3", + "sinon": "^14.0.0", + "sinon-chai": "^3.7.0", + "typescript": "~4.7.3" }, "bugs": { "url": "https://github.com/ioBroker/ioBroker.pushover/issues" }, "main": "main.js", + "files": [ + "admin{,/!(src)/**}/!(tsconfig|tsconfig.*).json", + "admin{,/!(src)/**}/*.{html,css,png,svg,jpg,js}", + "lib/", + "www/", + "io-package.json", + "LICENSE", + "main.js" + ], "scripts": { - "test": "npm run test:package && npm run test:unit", - "test:package": "mocha test/package --exit", - "test:unit": "mocha test/unit --exit", - "test:integration": "mocha test/integration --exit", - "release": "release-script" + "test:js": "mocha --config test/mocharc.custom.json \"{!(node_modules|test)/**/*.test.js,*.test.js,test/**/test!(PackageFiles|Startup).js}\"", + "test:package": "mocha test/package --exit", + "test:integration": "mocha test/integration --exit", + "test": "npm run test:js && npm run test:package", + "check": "tsc --noEmit -p tsconfig.check.json", + "lint": "eslint --ext .js,.jsx", + "translate": "translate-adapter", + "release": "release-script patch --yes", + "release-minor": "release-script minor --yes", + "release-major": "release-script major --yes" } } \ No newline at end of file diff --git a/test/integration.js b/test/integration.js index 1b3453e..fa6db2e 100644 --- a/test/integration.js +++ b/test/integration.js @@ -2,4 +2,4 @@ const path = require('path'); const { tests } = require('@iobroker/testing'); // Run integration tests - See https://github.com/ioBroker/testing for a detailed explanation and further options -tests.integration(path.join(__dirname, '..')); +tests.integration(path.join(__dirname, '..')); \ No newline at end of file diff --git a/test/mocha.custom.opts b/test/mocha.custom.opts deleted file mode 100644 index 703f749..0000000 --- a/test/mocha.custom.opts +++ /dev/null @@ -1,2 +0,0 @@ ---require test/mocha.setup.js -{!(node_modules|test)/**/*.test.js,*.test.js,test/**/test!(PackageFiles|Startup).js} \ No newline at end of file diff --git a/test/mocharc.custom.json b/test/mocharc.custom.json new file mode 100644 index 0000000..2e317d5 --- /dev/null +++ b/test/mocharc.custom.json @@ -0,0 +1,10 @@ +{ + "require": [ + "test/mocha.setup.js" + ], + "watch-files": [ + "!(node_modules|test)/**/*.test.js", + "*.test.js", + "test/**/test!(PackageFiles|Startup).js" + ] +} \ No newline at end of file diff --git a/test/tsconfig.json b/test/tsconfig.json new file mode 100644 index 0000000..a2308c1 --- /dev/null +++ b/test/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "noImplicitAny": false + }, + "include": [ + "./**/*.js" + ] +} diff --git a/tsconfig.check.json b/tsconfig.check.json new file mode 100644 index 0000000..6c24629 --- /dev/null +++ b/tsconfig.check.json @@ -0,0 +1,14 @@ +// Specialized tsconfig for type-checking js files +{ + "extends": "./tsconfig.json", + "compilerOptions": {}, + "include": [ + "**/*.js", + "**/*.d.ts" + ], + "exclude": [ + "**/build", + "node_modules/", + "widgets/" + ] +} diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..226ea75 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,40 @@ +// Root tsconfig to set the settings and power editor support for all TS files +{ + "compileOnSave": true, + "compilerOptions": { + // do not compile anything, this file is just to configure type checking + "noEmit": true, + + // check JS files + "allowJs": true, + "checkJs": true, + + "module": "commonjs", + "moduleResolution": "node", + "esModuleInterop": true, + // this is necessary for the automatic typing of the adapter config + "resolveJsonModule": true, + + // Set this to false if you want to disable the very strict rules (not recommended) + "strict": true, + // Or enable some of those features for more fine-grained control + // "strictNullChecks": true, + // "strictPropertyInitialization": true, + // "strictBindCallApply": true, + "noImplicitAny": false, + // "noUnusedLocals": true, + // "noUnusedParameters": true, + "useUnknownInCatchVariables": false, + + // Consider targetting es2019 or higher if you only support Node.js 12+ + "target": "es2018", + + }, + "include": [ + "**/*.js", + "**/*.d.ts" + ], + "exclude": [ + "node_modules/**" + ] +} \ No newline at end of file