From e7324c1068e061d377008cfde8a51c6a59139925 Mon Sep 17 00:00:00 2001
From: jeevin1807 <81909169+jeevin1807@users.noreply.github.com>
Date: Sat, 10 Apr 2021 08:00:57 +0530
Subject: [PATCH 01/17] Update and rename main.py to __init__.py
---
main.py => __init__.py | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
rename main.py => __init__.py (80%)
diff --git a/main.py b/__init__.py
similarity index 80%
rename from main.py
rename to __init__.py
index 6b45da5..0056a45 100644
--- a/main.py
+++ b/__init__.py
@@ -20,15 +20,15 @@ class MainWindow(QMainWindow):
back_btn.triggered.connect(self.browser.back)
navbar.addAction(back_btn)
- forward_btn = QAction('Forward', self)
+ forward_btn = QAction('®', self)
forward_btn.triggered.connect(self.browser.forward)
navbar.addAction(forward_btn)
- reload_btn = QAction('Reload', self)
+ reload_btn = QAction('✓', self)
reload_btn.triggered.connect(self.browser.reload)
navbar.addAction(reload_btn)
- home_btn = QAction('Home', self)
+ home_btn = QAction('_', self)
home_btn.triggered.connect(self.navigate_home)
navbar.addAction(home_btn)
@@ -39,7 +39,7 @@ class MainWindow(QMainWindow):
self.browser.urlChanged.connect(self.update_url)
def navigate_home(self):
- self.browser.setUrl(QUrl('http://programming-hero.com'))
+ self.browser.setUrl(QUrl('http://google.com'))
def navigate_to_url(self):
url = self.url_bar.text()
@@ -50,6 +50,6 @@ class MainWindow(QMainWindow):
app = QApplication(sys.argv)
-QApplication.setApplicationName('My Cool Browser')
+QApplication.setApplicationName('javes')
window = MainWindow()
-app.exec_()
\ No newline at end of file
+app.exec_()
From a73b1d07711bbcff9fc540a14e314841605eb614 Mon Sep 17 00:00:00 2001
From: jeevin1807 <81909169+jeevin1807@users.noreply.github.com>
Date: Sat, 10 Apr 2021 08:01:55 +0530
Subject: [PATCH 02/17] Create README.md
---
README.md | 5 +++++
1 file changed, 5 insertions(+)
create mode 100644 README.md
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..400fa3f
--- /dev/null
+++ b/README.md
@@ -0,0 +1,5 @@
+# a new powerful browser
+
+
+
+This is a open source software for all
From 22cda86733e7822c44f6a3f82040eab65db5d7cf Mon Sep 17 00:00:00 2001
From: jeevin1807 <81909169+jeevin1807@users.noreply.github.com>
Date: Sat, 10 Apr 2021 08:05:39 +0530
Subject: [PATCH 03/17] Update __init__.py
---
__init__.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/__init__.py b/__init__.py
index 0056a45..b0d9d53 100644
--- a/__init__.py
+++ b/__init__.py
@@ -20,15 +20,15 @@ class MainWindow(QMainWindow):
back_btn.triggered.connect(self.browser.back)
navbar.addAction(back_btn)
- forward_btn = QAction('®', self)
+ forward_btn = QAction('forward', self)
forward_btn.triggered.connect(self.browser.forward)
navbar.addAction(forward_btn)
- reload_btn = QAction('✓', self)
+ reload_btn = QAction('reload', self)
reload_btn.triggered.connect(self.browser.reload)
navbar.addAction(reload_btn)
- home_btn = QAction('_', self)
+ home_btn = QAction('home', self)
home_btn.triggered.connect(self.navigate_home)
navbar.addAction(home_btn)
From 0dd3229cd7908d37ec71e0a16f52d6101aacf493 Mon Sep 17 00:00:00 2001
From: jeevin1807 <81909169+jeevin1807@users.noreply.github.com>
Date: Sat, 10 Apr 2021 08:07:45 +0530
Subject: [PATCH 04/17] Update readme.md
---
README.md | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/README.md b/README.md
index 400fa3f..e06f2f0 100644
--- a/README.md
+++ b/README.md
@@ -3,3 +3,8 @@
This is a open source software for all
+
+
+
+
+https://telegra.ph/file/08e36f146292a4dc484d2.jpg
From ff2de68da2c28ee0ffb7abb5791e3c88e2f45b0d Mon Sep 17 00:00:00 2001
From: jeevin1807 <81909169+jeevin1807@users.noreply.github.com>
Date: Sat, 10 Apr 2021 10:51:01 +0530
Subject: [PATCH 05/17] Update
---
README.md | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/README.md b/README.md
index e06f2f0..f338a28 100644
--- a/README.md
+++ b/README.md
@@ -7,4 +7,5 @@ This is a open source software for all
-https://telegra.ph/file/08e36f146292a4dc484d2.jpg
+!browser[https://telegra.ph/file/08e36f146292a4dc484d2.jpg
+]
From 84c89078fe15d356d2b65f1d2473a2d86ac1d201 Mon Sep 17 00:00:00 2001
From: jeevin1807 <81909169+jeevin1807@users.noreply.github.com>
Date: Sat, 10 Apr 2021 10:52:29 +0530
Subject: [PATCH 06/17] Update
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index f338a28..7fdceca 100644
--- a/README.md
+++ b/README.md
@@ -7,5 +7,5 @@ This is a open source software for all
-!browser[https://telegra.ph/file/08e36f146292a4dc484d2.jpg
+![browser][https://telegra.ph/file/08e36f146292a4dc484d2.jpg
]
From 86cb07563658754351bbda2bbaa44a97927a7dad Mon Sep 17 00:00:00 2001
From: jeevin1807 <81909169+jeevin1807@users.noreply.github.com>
Date: Sat, 10 Apr 2021 10:53:04 +0530
Subject: [PATCH 07/17] Update README.md
---
README.md | 9 +++++----
1 file changed, 5 insertions(+), 4 deletions(-)
diff --git a/README.md b/README.md
index 7fdceca..ccfc0ba 100644
--- a/README.md
+++ b/README.md
@@ -1,11 +1,12 @@
+![browser][https://telegra.ph/file/08e36f146292a4dc484d2.jpg
+]
+
+
+
# a new powerful browser
-
-
This is a open source software for all
-![browser][https://telegra.ph/file/08e36f146292a4dc484d2.jpg
-]
From e9b4908a2ac769b9b36793178087b953a6e73cdf Mon Sep 17 00:00:00 2001
From: jeevin1807 <81909169+jeevin1807@users.noreply.github.com>
Date: Sat, 10 Apr 2021 10:54:53 +0530
Subject: [PATCH 08/17] Update README.md
---
README.md | 209 ++++++++++++++++++++++++++++++++++++++++++++++++++++--
1 file changed, 205 insertions(+), 4 deletions(-)
diff --git a/README.md b/README.md
index ccfc0ba..dceb199 100644
--- a/README.md
+++ b/README.md
@@ -1,12 +1,213 @@
-![browser][https://telegra.ph/file/08e36f146292a4dc484d2.jpg
-]
+JARVIS](https://telegra.ph/file/4cbc2ebe249e4c614a102.jpg)
+# JARVISBOT
+[](https://app.codacy.com/gh/AnimeKaizoku/SaitamaRobot?utm_source=github.com&utm_medium=referral&utm_content=AnimeKaizoku/SaitamaRobot&utm_campaign=Badge_Grade_Settings) [](https://GitHub.com/Naereen/StrapDown.js/graphs/commit-activity) [](https://perso.crans.org/besson/LICENSE.html) [](https://github.com/ellerbrock/open-source-badges/) [](https://makeapullrequest.com) [](https://t.me/IGRIS_BOT1)
+
+
+A modular Telegram Python bot running on python3 with a sqlalchemy database.
+
+Originally a Marie fork, IGRIS evolved further and was built to be more robust and more dedicated to Anime Chats.
+
+Can be found on telegram as [JARVIS](https://t.me/darklord07_bot).
+
+The Support group can be reached out to at [SOLO•GUILD](https://t.me/JARVISUPPORT), where you can ask for help about [JARVIS](https://t.me/JARVISUPPORT),discover/request new features, report bugs, and stay in the loop whenever a new update is available.
-# a new powerful browser
+## How to setup/deploy.
-This is a open source software for all
+### Read these notes carefully before proceeding
+
+ - Your code must be open source and a link to your fork's repository must be there in the start reply of the bot. [See this](https://github.com/HeLLxGodLike/IGRIS_BOT/blob/shiken/SaitamaRobot/__main__.py#L25)
+ - Lastly, if you are found to run this repo without the code being open sourced or the repository link not mentioned in the bot, we will push a gban for you in our network because of being in violation of the license, you are free to be a dick and not respect the open source code (we do not mind) but we will not be having you around our chats.
+
+ Steps to deploy on Heroku !!
+
+```
+Fill in all the details, Deploy!
+Now go to https://dashboard.heroku.com/apps/(app-name)/resources ( Replace (app-name) with your app name )
+Turn on worker dyno (Don't worry It's free :D) & Webhook
+Now send the bot /start, If it doesn't respond go to https://dashboard.heroku.com/apps/(app-name)/settings and remove webhook and port.
+```
+
+ [](https://heroku.com/deploy?template=https://github.com/zeuslord224/JARVISnomore)
+
+
+
+ Steps to self Host!!
+
+Note: This instruction set is just a copy-paste from Marie, note that [One Punch Support](https://t.me/OnePunchSupport) aims to handle support for @SaitamaRobot and not how to set up your own fork. If you find this bit confusing/tough to understand then we recommend you ask a dev, kindly avoid asking how to set up the bot instance in the support chat, it aims to help our own instance of the bot and not the forks.
+
+ ## Setting up the bot (Read this before trying to use!):
+Please make sure to use python3.6, as I cannot guarantee everything will work as expected on older Python versions!
+This is because markdown parsing is done by iterating through a dict, which is ordered by default in 3.6.
+
+ ### Configuration
+
+There are two possible ways of configuring your bot: a config.py file, or ENV variables.
+
+The preferred version is to use a `config.py` file, as it makes it easier to see all your settings grouped together.
+This file should be placed in your `SaitamaRobot` folder, alongside the `__main__.py` file.
+This is where your bot token will be loaded from, as well as your database URI (if you're using a database), and most of
+your other settings.
+
+It is recommended to import sample_config and extend the Config class, as this will ensure your config contains all
+defaults set in the sample_config, hence making it easier to upgrade.
+
+An example `config.py` file could be:
+```
+from SaitamaRobot.sample_config import Config
+
+class Development(Config):
+ OWNER_ID = 254318997 # your telegram ID
+ OWNER_USERNAME = "SonOfLars" # your telegram username
+ API_KEY = "your bot api key" # your api key, as provided by the @botfather
+ SQLALCHEMY_DATABASE_URI = 'postgresql://username:password@localhost:5432/database' # sample db credentials
+ JOIN_LOGGER = '-1234567890' # some group chat that your bot is a member of
+ USE_JOIN_LOGGER = True
+ SUDO_USERS = [18673980, 83489514] # List of id's for users which have sudo access to the bot.
+ LOAD = []
+ NO_LOAD = ['translation']
+```
+
+If you can't have a config.py file (EG on Heroku), it is also possible to use environment variables.
+The following env variables are supported:
+ - `ENV`: Setting this to ANYTHING will enable env variables
+
+ - `TOKEN`: Your bot token, as a string.
+ - `OWNER_ID`: An integer of consisting of your owner ID
+ - `OWNER_USERNAME`: Your username
+
+ - `DATABASE_URL`: Your database URL
+ - `JOIN_LOGGER`: optional: a chat where your replied saved messages are stored, to stop people deleting their old
+ - `LOAD`: Space-separated list of modules you would like to load
+ - `NO_LOAD`: Space-separated list of modules you would like NOT to load
+ - `WEBHOOK`: Setting this to ANYTHING will enable webhooks when in env mode
+ messages
+ - `URL`: The URL your webhook should connect to (only needed for webhook mode)
+
+ - `SUDO_USERS`: A space-separated list of user_ids which should be considered sudo users
+ - `SUPPORT_USERS`: A space-separated list of user_ids which should be considered support users (can gban/ungban,
+ nothing else)
+ - `WHITELIST_USERS`: A space-separated list of user_ids which should be considered whitelisted - they can't be banned.
+ - `DONATION_LINK`: Optional: link where you would like to receive donations.
+ - `CERT_PATH`: Path to your webhook certificate
+ - `PORT`: Port to use for your webhooks
+ - `DEL_CMDS`: Whether to delete commands from users which don't have rights to use that command
+ - `STRICT_GBAN`: Enforce gbans across new groups as well as old groups. When a gbanned user talks, he will be banned.
+ - `WORKERS`: Number of threads to use. 8 is the recommended (and default) amount, but your experience may vary.
+ __Note__ that going crazy with more threads wont necessarily speed up your bot, given the large amount of sql data
+ accesses, and the way python asynchronous calls work.
+ - `BAN_STICKER`: Which sticker to use when banning people.
+ - `ALLOW_EXCL`: Whether to allow using exclamation marks ! for commands as well as /.
+
+ ### Python dependencies
+
+Install the necessary Python dependencies by moving to the project directory and running:
+
+`pip3 install -r requirements.txt`.
+
+This will install all the necessary python packages.
+
+ ### Database
+
+If you wish to use a database-dependent module (eg: locks, notes, userinfo, users, filters, welcomes),
+you'll need to have a database installed on your system. I use Postgres, so I recommend using it for optimal compatibility.
+
+In the case of Postgres, this is how you would set up a database on a Debian/ubuntu system. Other distributions may vary.
+
+- install postgresql:
+
+`sudo apt-get update && sudo apt-get install postgresql`
+
+- change to the Postgres user:
+
+`sudo su - postgres`
+
+- create a new database user (change YOUR_USER appropriately):
+
+`createuser -P -s -e YOUR_USER`
+
+This will be followed by you need to input your password.
+
+- create a new database table:
+
+`createdb -O YOUR_USER YOUR_DB_NAME`
+
+Change YOUR_USER and YOUR_DB_NAME appropriately.
+
+- finally:
+
+`psql YOUR_DB_NAME -h YOUR_HOST YOUR_USER`
+
+This will allow you to connect to your database via your terminal.
+By default, YOUR_HOST should be 0.0.0.0:5432.
+
+You should now be able to build your database URI. This will be:
+
+`sqldbtype://username:pw@hostname:port/db_name`
+
+Replace sqldbtype with whichever DB you're using (eg Postgres, MySQL, SQLite, etc)
+repeat for your username, password, hostname (localhost?), port (5432?), and DB name.
+
+ ## Modules
+ ### Setting load order.
+
+The module load order can be changed via the `LOAD` and `NO_LOAD` configuration settings.
+These should both represent lists.
+
+If `LOAD` is an empty list, all modules in `modules/` will be selected for loading by default.
+
+If `NO_LOAD` is not present or is an empty list, all modules selected for loading will be loaded.
+
+If a module is in both `LOAD` and `NO_LOAD`, the module will not be loaded - `NO_LOAD` takes priority.
+
+ ### Creating your own modules.
+
+Creating a module has been simplified as much as possible - but do not hesitate to suggest further simplification.
+
+All that is needed is that your .py file is in the modules folder.
+
+To add commands, make sure to import the dispatcher via
+
+`from SaitamaRobot import dispatcher`.
+
+You can then add commands using the usual
+
+`dispatcher.add_handler()`.
+
+Assigning the `__help__` variable to a string describing this modules' available
+commands will allow the bot to load it and add the documentation for
+your module to the `/help` command. Setting the `__mod_name__` variable will also allow you to use a nicer, user-friendly name for a module.
+
+The `__migrate__()` function is used for migrating chats - when a chat is upgraded to a supergroup, the ID changes, so
+it is necessary to migrate it in the DB.
+
+The `__stats__()` function is for retrieving module statistics, eg number of users, number of chats. This is accessed
+through the `/stats` command, which is only available to the bot owner.
+
+## Starting the bot.
+
+Once you've set up your database and your configuration is complete, simply run the bat file(if on windows) or run (Linux):
+
+`python3 -m IGRISROBOT`
+
+You can use [nssm](https://nssm.cc/usage) to install the bot as service on windows and set it to restart on /gitpull
+Make sure to edit the start and restart bats to your needs.
+Note: the restart bat requires that User account control be disabled.
+
+For queries or any issues regarding the bot please open an issue ticket or visit us at [IGRIS BOT SUPPORT](https://t.me/IGRISBOTSUPPORT)
+## How to setup on Heroku
+For starters click on this button
+
+[](https://heroku.com/deploy?template=https://github.com/HeLLxGodLike/IGRISROBOT.git)
+## Credits
+The bot is based on the original work done by [PaulSonOfLars](https://github.com/PaulSonOfLars)
+This repo was just revamped to suit an Anime-centric community. All original credits go to Paul and his dedication, Without his efforts, this fork would not have been possible!
+
+Any other authorship/credits can be seen through the commits.
+
+Should any be missing kindly let us know at [IGRIS BOT SUPPORT](https://t.me/IGRISBOTSUPPORT) or simply submit a pull request on the readme.
From 7599f73455649d4232fa9de4929fbdca40d31ff9 Mon Sep 17 00:00:00 2001
From: jeevin1807 <81909169+jeevin1807@users.noreply.github.com>
Date: Sat, 10 Apr 2021 10:57:44 +0530
Subject: [PATCH 09/17] Update README.md
---
README.md | 215 ++----------------------------------------------------
1 file changed, 6 insertions(+), 209 deletions(-)
diff --git a/README.md b/README.md
index dceb199..0e37802 100644
--- a/README.md
+++ b/README.md
@@ -1,213 +1,10 @@
-JARVIS](https://telegra.ph/file/4cbc2ebe249e4c614a102.jpg)
-# JARVISBOT
-[](https://app.codacy.com/gh/AnimeKaizoku/SaitamaRobot?utm_source=github.com&utm_medium=referral&utm_content=AnimeKaizoku/SaitamaRobot&utm_campaign=Badge_Grade_Settings) [](https://GitHub.com/Naereen/StrapDown.js/graphs/commit-activity) [](https://perso.crans.org/besson/LICENSE.html) [](https://github.com/ellerbrock/open-source-badges/) [](https://makeapullrequest.com) [](https://t.me/IGRIS_BOT1)
+![BROWSER][https://telegra.ph/file/08e36f146292a4dc484d2.jpg
+]
+#browser info
+A powerful browser coded from python by module pyqt5
-A modular Telegram Python bot running on python3 with a sqlalchemy database.
-Originally a Marie fork, IGRIS evolved further and was built to be more robust and more dedicated to Anime Chats.
-Can be found on telegram as [JARVIS](https://t.me/darklord07_bot).
-
-The Support group can be reached out to at [SOLO•GUILD](https://t.me/JARVISUPPORT), where you can ask for help about [JARVIS](https://t.me/JARVISUPPORT),discover/request new features, report bugs, and stay in the loop whenever a new update is available.
-
-
-
-## How to setup/deploy.
-
-### Read these notes carefully before proceeding
-
- - Your code must be open source and a link to your fork's repository must be there in the start reply of the bot. [See this](https://github.com/HeLLxGodLike/IGRIS_BOT/blob/shiken/SaitamaRobot/__main__.py#L25)
- - Lastly, if you are found to run this repo without the code being open sourced or the repository link not mentioned in the bot, we will push a gban for you in our network because of being in violation of the license, you are free to be a dick and not respect the open source code (we do not mind) but we will not be having you around our chats.
-
-
-
- Steps to deploy on Heroku !!
-
-```
-Fill in all the details, Deploy!
-Now go to https://dashboard.heroku.com/apps/(app-name)/resources ( Replace (app-name) with your app name )
-Turn on worker dyno (Don't worry It's free :D) & Webhook
-Now send the bot /start, If it doesn't respond go to https://dashboard.heroku.com/apps/(app-name)/settings and remove webhook and port.
-```
-
- [](https://heroku.com/deploy?template=https://github.com/zeuslord224/JARVISnomore)
-
-
-
- Steps to self Host!!
-
-Note: This instruction set is just a copy-paste from Marie, note that [One Punch Support](https://t.me/OnePunchSupport) aims to handle support for @SaitamaRobot and not how to set up your own fork. If you find this bit confusing/tough to understand then we recommend you ask a dev, kindly avoid asking how to set up the bot instance in the support chat, it aims to help our own instance of the bot and not the forks.
-
- ## Setting up the bot (Read this before trying to use!):
-Please make sure to use python3.6, as I cannot guarantee everything will work as expected on older Python versions!
-This is because markdown parsing is done by iterating through a dict, which is ordered by default in 3.6.
-
- ### Configuration
-
-There are two possible ways of configuring your bot: a config.py file, or ENV variables.
-
-The preferred version is to use a `config.py` file, as it makes it easier to see all your settings grouped together.
-This file should be placed in your `SaitamaRobot` folder, alongside the `__main__.py` file.
-This is where your bot token will be loaded from, as well as your database URI (if you're using a database), and most of
-your other settings.
-
-It is recommended to import sample_config and extend the Config class, as this will ensure your config contains all
-defaults set in the sample_config, hence making it easier to upgrade.
-
-An example `config.py` file could be:
-```
-from SaitamaRobot.sample_config import Config
-
-class Development(Config):
- OWNER_ID = 254318997 # your telegram ID
- OWNER_USERNAME = "SonOfLars" # your telegram username
- API_KEY = "your bot api key" # your api key, as provided by the @botfather
- SQLALCHEMY_DATABASE_URI = 'postgresql://username:password@localhost:5432/database' # sample db credentials
- JOIN_LOGGER = '-1234567890' # some group chat that your bot is a member of
- USE_JOIN_LOGGER = True
- SUDO_USERS = [18673980, 83489514] # List of id's for users which have sudo access to the bot.
- LOAD = []
- NO_LOAD = ['translation']
-```
-
-If you can't have a config.py file (EG on Heroku), it is also possible to use environment variables.
-The following env variables are supported:
- - `ENV`: Setting this to ANYTHING will enable env variables
-
- - `TOKEN`: Your bot token, as a string.
- - `OWNER_ID`: An integer of consisting of your owner ID
- - `OWNER_USERNAME`: Your username
-
- - `DATABASE_URL`: Your database URL
- - `JOIN_LOGGER`: optional: a chat where your replied saved messages are stored, to stop people deleting their old
- - `LOAD`: Space-separated list of modules you would like to load
- - `NO_LOAD`: Space-separated list of modules you would like NOT to load
- - `WEBHOOK`: Setting this to ANYTHING will enable webhooks when in env mode
- messages
- - `URL`: The URL your webhook should connect to (only needed for webhook mode)
-
- - `SUDO_USERS`: A space-separated list of user_ids which should be considered sudo users
- - `SUPPORT_USERS`: A space-separated list of user_ids which should be considered support users (can gban/ungban,
- nothing else)
- - `WHITELIST_USERS`: A space-separated list of user_ids which should be considered whitelisted - they can't be banned.
- - `DONATION_LINK`: Optional: link where you would like to receive donations.
- - `CERT_PATH`: Path to your webhook certificate
- - `PORT`: Port to use for your webhooks
- - `DEL_CMDS`: Whether to delete commands from users which don't have rights to use that command
- - `STRICT_GBAN`: Enforce gbans across new groups as well as old groups. When a gbanned user talks, he will be banned.
- - `WORKERS`: Number of threads to use. 8 is the recommended (and default) amount, but your experience may vary.
- __Note__ that going crazy with more threads wont necessarily speed up your bot, given the large amount of sql data
- accesses, and the way python asynchronous calls work.
- - `BAN_STICKER`: Which sticker to use when banning people.
- - `ALLOW_EXCL`: Whether to allow using exclamation marks ! for commands as well as /.
-
- ### Python dependencies
-
-Install the necessary Python dependencies by moving to the project directory and running:
-
-`pip3 install -r requirements.txt`.
-
-This will install all the necessary python packages.
-
- ### Database
-
-If you wish to use a database-dependent module (eg: locks, notes, userinfo, users, filters, welcomes),
-you'll need to have a database installed on your system. I use Postgres, so I recommend using it for optimal compatibility.
-
-In the case of Postgres, this is how you would set up a database on a Debian/ubuntu system. Other distributions may vary.
-
-- install postgresql:
-
-`sudo apt-get update && sudo apt-get install postgresql`
-
-- change to the Postgres user:
-
-`sudo su - postgres`
-
-- create a new database user (change YOUR_USER appropriately):
-
-`createuser -P -s -e YOUR_USER`
-
-This will be followed by you need to input your password.
-
-- create a new database table:
-
-`createdb -O YOUR_USER YOUR_DB_NAME`
-
-Change YOUR_USER and YOUR_DB_NAME appropriately.
-
-- finally:
-
-`psql YOUR_DB_NAME -h YOUR_HOST YOUR_USER`
-
-This will allow you to connect to your database via your terminal.
-By default, YOUR_HOST should be 0.0.0.0:5432.
-
-You should now be able to build your database URI. This will be:
-
-`sqldbtype://username:pw@hostname:port/db_name`
-
-Replace sqldbtype with whichever DB you're using (eg Postgres, MySQL, SQLite, etc)
-repeat for your username, password, hostname (localhost?), port (5432?), and DB name.
-
- ## Modules
- ### Setting load order.
-
-The module load order can be changed via the `LOAD` and `NO_LOAD` configuration settings.
-These should both represent lists.
-
-If `LOAD` is an empty list, all modules in `modules/` will be selected for loading by default.
-
-If `NO_LOAD` is not present or is an empty list, all modules selected for loading will be loaded.
-
-If a module is in both `LOAD` and `NO_LOAD`, the module will not be loaded - `NO_LOAD` takes priority.
-
- ### Creating your own modules.
-
-Creating a module has been simplified as much as possible - but do not hesitate to suggest further simplification.
-
-All that is needed is that your .py file is in the modules folder.
-
-To add commands, make sure to import the dispatcher via
-
-`from SaitamaRobot import dispatcher`.
-
-You can then add commands using the usual
-
-`dispatcher.add_handler()`.
-
-Assigning the `__help__` variable to a string describing this modules' available
-commands will allow the bot to load it and add the documentation for
-your module to the `/help` command. Setting the `__mod_name__` variable will also allow you to use a nicer, user-friendly name for a module.
-
-The `__migrate__()` function is used for migrating chats - when a chat is upgraded to a supergroup, the ID changes, so
-it is necessary to migrate it in the DB.
-
-The `__stats__()` function is for retrieving module statistics, eg number of users, number of chats. This is accessed
-through the `/stats` command, which is only available to the bot owner.
-
-## Starting the bot.
-
-Once you've set up your database and your configuration is complete, simply run the bat file(if on windows) or run (Linux):
-
-`python3 -m IGRISROBOT`
-
-You can use [nssm](https://nssm.cc/usage) to install the bot as service on windows and set it to restart on /gitpull
-Make sure to edit the start and restart bats to your needs.
-Note: the restart bat requires that User account control be disabled.
-
-For queries or any issues regarding the bot please open an issue ticket or visit us at [IGRIS BOT SUPPORT](https://t.me/IGRISBOTSUPPORT)
-## How to setup on Heroku
-For starters click on this button
-
-[](https://heroku.com/deploy?template=https://github.com/HeLLxGodLike/IGRISROBOT.git)
-
-
-## Credits
-The bot is based on the original work done by [PaulSonOfLars](https://github.com/PaulSonOfLars)
-This repo was just revamped to suit an Anime-centric community. All original credits go to Paul and his dedication, Without his efforts, this fork would not have been possible!
-
-Any other authorship/credits can be seen through the commits.
-
-Should any be missing kindly let us know at [IGRIS BOT SUPPORT](https://t.me/IGRISBOTSUPPORT) or simply submit a pull request on the readme.
+##code Credit
+ProgrammingHero1
From bedc50985cc9798be55831b78e5492e4fd2be6a3 Mon Sep 17 00:00:00 2001
From: jeevin1807 <81909169+jeevin1807@users.noreply.github.com>
Date: Sat, 10 Apr 2021 10:59:37 +0530
Subject: [PATCH 10/17] Update README.md
---
README.md | 9 ++-------
1 file changed, 2 insertions(+), 7 deletions(-)
diff --git a/README.md b/README.md
index 0e37802..91c43bc 100644
--- a/README.md
+++ b/README.md
@@ -1,10 +1,5 @@
![BROWSER][https://telegra.ph/file/08e36f146292a4dc484d2.jpg
]
-#browser info
-A powerful browser coded from python by module pyqt5
-
-
-
-##code Credit
-ProgrammingHero1
+#powerfull browser
+Its better
From 92529233ce38a245b12e11ce94990208c6a11840 Mon Sep 17 00:00:00 2001
From: jeevin1807 <81909169+jeevin1807@users.noreply.github.com>
Date: Sat, 10 Apr 2021 11:00:18 +0530
Subject: [PATCH 11/17] Update README.md
---
README.md | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/README.md b/README.md
index 91c43bc..c489bd8 100644
--- a/README.md
+++ b/README.md
@@ -2,4 +2,9 @@
]
#powerfull browser
+
+
+
+
+
Its better
From 1072d4ecd036162f9a77006863e1cb199efe0302 Mon Sep 17 00:00:00 2001
From: jeevin1807 <81909169+jeevin1807@users.noreply.github.com>
Date: Sat, 10 Apr 2021 11:00:49 +0530
Subject: [PATCH 12/17] Delete README.md
---
README.md | 10 ----------
1 file changed, 10 deletions(-)
delete mode 100644 README.md
diff --git a/README.md b/README.md
deleted file mode 100644
index c489bd8..0000000
--- a/README.md
+++ /dev/null
@@ -1,10 +0,0 @@
-![BROWSER][https://telegra.ph/file/08e36f146292a4dc484d2.jpg
-]
-
-#powerfull browser
-
-
-
-
-
-Its better
From 957ee28782b93b8d71130e4adbc663970a5cb81d Mon Sep 17 00:00:00 2001
From: jeevin1807 <81909169+jeevin1807@users.noreply.github.com>
Date: Sat, 10 Apr 2021 11:02:13 +0530
Subject: [PATCH 13/17] Create README.md
---
README.md | 9 +++++++++
1 file changed, 9 insertions(+)
create mode 100644 README.md
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..af9b109
--- /dev/null
+++ b/README.md
@@ -0,0 +1,9 @@
+# my_browser
+
+![browser][https://telegra.ph/file/08e36f146292a4dc484d2.jpg
+]
+
+## browser_py
+
+
+Its better form of web
From 01773dbb10869698575d3e027a2fcc8a18f05c8d Mon Sep 17 00:00:00 2001
From: jeevin1807 <81909169+jeevin1807@users.noreply.github.com>
Date: Sat, 10 Apr 2021 11:03:52 +0530
Subject: [PATCH 14/17] Update __init__.py
---
__init__.py | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/__init__.py b/__init__.py
index b0d9d53..811bf3d 100644
--- a/__init__.py
+++ b/__init__.py
@@ -1,9 +1,11 @@
+#import everything required
+
import sys
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5.QtWebEngineWidgets import *
-
+# start coding of browser
class MainWindow(QMainWindow):
def __init__(self):
super(MainWindow, self).__init__()
@@ -12,7 +14,7 @@ class MainWindow(QMainWindow):
self.setCentralWidget(self.browser)
self.showMaximized()
- # navbar
+ # navigation bar
navbar = QToolBar()
self.addToolBar(navbar)
From 14c8cb04c428d92a2dfb971b79e8c397b8086aa6 Mon Sep 17 00:00:00 2001
From: jeevin1807 <81909169+jeevin1807@users.noreply.github.com>
Date: Sat, 10 Apr 2021 11:17:30 +0530
Subject: [PATCH 15/17] Update README.md
---
README.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/README.md b/README.md
index af9b109..e4cedfb 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
# my_browser
-![browser][https://telegra.ph/file/08e36f146292a4dc484d2.jpg
-]
+[![browser][https://telegra.ph/file/08e36f146292a4dc484d2.jpg
+]]
## browser_py
From ce355791dc44b8d4ece3760c11bcf2c6f2456a9e Mon Sep 17 00:00:00 2001
From: jeevin1807 <81909169+jeevin1807@users.noreply.github.com>
Date: Sat, 10 Apr 2021 11:19:15 +0530
Subject: [PATCH 16/17] Update README.md
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index e4cedfb..68bd800 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
# my_browser
[![browser][https://telegra.ph/file/08e36f146292a4dc484d2.jpg
-]]
+]](https://google.com)
## browser_py
From 179c049fd33f57fcf8a7f63c401e19d746b7f897 Mon Sep 17 00:00:00 2001
From: jeevin1807 <81909169+jeevin1807@users.noreply.github.com>
Date: Sat, 10 Apr 2021 11:24:59 +0530
Subject: [PATCH 17/17] Create config.yml
---
config.yml | 2851 ++++++++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 2851 insertions(+)
create mode 100644 config.yml
diff --git a/config.yml b/config.yml
new file mode 100644
index 0000000..20ad88e
--- /dev/null
+++ b/config.yml
@@ -0,0 +1,2851 @@
+version: 2.1
+
+parameters:
+ upload-to-s3:
+ type: string
+ default: '1'
+
+ run-lint:
+ type: boolean
+ default: true
+
+ run-build-linux:
+ type: boolean
+ default: true
+
+ run-build-mac:
+ type: boolean
+ default: true
+
+ run-linux-x64-publish:
+ type: boolean
+ default: false
+
+ run-linux-ia32-publish:
+ type: boolean
+ default: false
+
+ run-linux-arm-publish:
+ type: boolean
+ default: false
+
+ run-linux-arm64-publish:
+ type: boolean
+ default: false
+
+ run-osx-publish:
+ type: boolean
+ default: false
+
+ run-osx-publish-arm64:
+ type: boolean
+ default: false
+
+ run-mas-publish:
+ type: boolean
+ default: false
+
+ run-mas-publish-arm64:
+ type: boolean
+ default: false
+
+ run-linux-publish:
+ type: boolean
+ default: false
+
+ run-macos-publish:
+ type: boolean
+ default: false
+
+# The config expects the following environment variables to be set:
+# - "SLACK_WEBHOOK" Slack hook URL to send notifications.
+#
+# The publishing scripts expect access tokens to be defined as env vars,
+# but those are not covered here.
+#
+# CircleCI docs on variables:
+# https://circleci.com/docs/2.0/env-vars/
+
+# Build machines configs.
+docker-image: &docker-image
+ docker:
+ - image: electron.azurecr.io/build:6555a80939fb4c3ddf9343b3f140e573f40de225
+
+machine-linux-medium: &machine-linux-medium
+ <<: *docker-image
+ resource_class: medium
+
+machine-linux-xlarge: &machine-linux-xlarge
+ <<: *docker-image
+ resource_class: xlarge
+
+machine-linux-2xlarge: &machine-linux-2xlarge
+ <<: *docker-image
+ resource_class: 2xlarge+
+
+machine-mac: &machine-mac
+ macos:
+ xcode: "12.4.0"
+
+machine-mac-large: &machine-mac-large
+ resource_class: large
+ macos:
+ xcode: "12.4.0"
+
+machine-mac-large-arm: &machine-mac-large-arm
+ resource_class: large
+ macos:
+ xcode: "12.4.0"
+
+machine-mac-arm64: &machine-mac-arm64
+ resource_class: electronjs/macos-arm64
+ machine: true
+
+# Build configurations options.
+env-testing-build: &env-testing-build
+ GN_CONFIG: //electron/build/args/testing.gn
+ CHECK_DIST_MANIFEST: '1'
+
+env-release-build: &env-release-build
+ GN_CONFIG: //electron/build/args/release.gn
+ STRIP_BINARIES: true
+ GENERATE_SYMBOLS: true
+ CHECK_DIST_MANIFEST: '1'
+ IS_RELEASE: true
+
+env-headless-testing: &env-headless-testing
+ DISPLAY: ':99.0'
+
+env-stack-dumping: &env-stack-dumping
+ ELECTRON_ENABLE_STACK_DUMPING: '1'
+
+env-browsertests: &env-browsertests
+ GN_CONFIG: //electron/build/args/native_tests.gn
+ BUILD_TARGET: electron/spec:chromium_browsertests
+ TESTS_CONFIG: src/electron/spec/configs/browsertests.yml
+
+env-unittests: &env-unittests
+ GN_CONFIG: //electron/build/args/native_tests.gn
+ BUILD_TARGET: electron/spec:chromium_unittests
+ TESTS_CONFIG: src/electron/spec/configs/unittests.yml
+
+# Build targets options.
+env-ia32: &env-ia32
+ GN_EXTRA_ARGS: 'target_cpu = "x86"'
+ NPM_CONFIG_ARCH: ia32
+ TARGET_ARCH: ia32
+
+env-arm: &env-arm
+ GN_EXTRA_ARGS: 'target_cpu = "arm"'
+ MKSNAPSHOT_TOOLCHAIN: //build/toolchain/linux:clang_arm
+ BUILD_NATIVE_MKSNAPSHOT: 1
+ TARGET_ARCH: arm
+
+env-apple-silicon: &env-apple-silicon
+ GN_EXTRA_ARGS: 'target_cpu = "arm64" use_prebuilt_v8_context_snapshot = true'
+ TARGET_ARCH: arm64
+ USE_PREBUILT_V8_CONTEXT_SNAPSHOT: 1
+ npm_config_arch: arm64
+
+env-arm64: &env-arm64
+ GN_EXTRA_ARGS: 'target_cpu = "arm64" fatal_linker_warnings = false enable_linux_installer = false'
+ MKSNAPSHOT_TOOLCHAIN: //build/toolchain/linux:clang_arm64
+ BUILD_NATIVE_MKSNAPSHOT: 1
+ TARGET_ARCH: arm64
+
+env-mas: &env-mas
+ GN_EXTRA_ARGS: 'is_mas_build = true'
+ MAS_BUILD: 'true'
+
+env-mas-apple-silicon: &env-mas-apple-silicon
+ GN_EXTRA_ARGS: 'target_cpu = "arm64" is_mas_build = true use_prebuilt_v8_context_snapshot = true'
+ MAS_BUILD: 'true'
+ TARGET_ARCH: arm64
+ USE_PREBUILT_V8_CONTEXT_SNAPSHOT: 1
+
+env-send-slack-notifications: &env-send-slack-notifications
+ NOTIFY_SLACK: true
+
+env-global: &env-global
+ ELECTRON_OUT_DIR: Default
+
+env-linux-medium: &env-linux-medium
+ <<: *env-global
+ NUMBER_OF_NINJA_PROCESSES: 3
+
+env-linux-2xlarge: &env-linux-2xlarge
+ <<: *env-global
+ NUMBER_OF_NINJA_PROCESSES: 34
+
+env-linux-2xlarge-release: &env-linux-2xlarge-release
+ <<: *env-global
+ NUMBER_OF_NINJA_PROCESSES: 16
+
+env-machine-mac: &env-machine-mac
+ <<: *env-global
+ NUMBER_OF_NINJA_PROCESSES: 6
+
+env-mac-large: &env-mac-large
+ <<: *env-global
+ NUMBER_OF_NINJA_PROCESSES: 18
+
+env-mac-large-release: &env-mac-large-release
+ <<: *env-global
+ NUMBER_OF_NINJA_PROCESSES: 8
+
+env-ninja-status: &env-ninja-status
+ NINJA_STATUS: "[%r processes, %f/%t @ %o/s : %es] "
+
+env-disable-run-as-node: &env-disable-run-as-node
+ GN_BUILDFLAG_ARGS: 'enable_run_as_node = false'
+
+env-32bit-release: &env-32bit-release
+ # Set symbol level to 1 for 32 bit releases because of https://crbug.com/648948
+ GN_BUILDFLAG_ARGS: 'symbol_level = 1'
+
+env-macos-build: &env-macos-build
+ # Disable pre-compiled headers to reduce out size, only useful for rebuilds
+ GN_BUILDFLAG_ARGS: 'enable_precompiled_headers = false'
+
+# Individual (shared) steps.
+step-maybe-notify-slack-failure: &step-maybe-notify-slack-failure
+ run:
+ name: Send a Slack notification on failure
+ command: |
+ if [ "$NOTIFY_SLACK" == "true" ]; then
+ export MESSAGE="Build failed for *<$CIRCLE_BUILD_URL|$CIRCLE_JOB>* nightly build from *$CIRCLE_BRANCH*."
+ curl -g -H "Content-Type: application/json" -X POST \
+ -d "{\"text\": \"$MESSAGE\", \"attachments\": [{\"color\": \"#FC5C3C\",\"title\": \"$CIRCLE_JOB nightly build results\",\"title_link\": \"$CIRCLE_BUILD_URL\"}]}" $SLACK_WEBHOOK
+ fi
+ when: on_fail
+
+step-maybe-notify-slack-success: &step-maybe-notify-slack-success
+ run:
+ name: Send a Slack notification on success
+ command: |
+ if [ "$NOTIFY_SLACK" == "true" ]; then
+ export MESSAGE="Build succeeded for *<$CIRCLE_BUILD_URL|$CIRCLE_JOB>* nightly build from *$CIRCLE_BRANCH*."
+ curl -g -H "Content-Type: application/json" -X POST \
+ -d "{\"text\": \"$MESSAGE\", \"attachments\": [{\"color\": \"good\",\"title\": \"$CIRCLE_JOB nightly build results\",\"title_link\": \"$CIRCLE_BUILD_URL\"}]}" $SLACK_WEBHOOK
+ fi
+ when: on_success
+
+step-maybe-cleanup-arm64-mac: &step-maybe-cleanup-arm64-mac
+ run:
+ name: Cleanup after testing
+ command: |
+ if [ "$TARGET_ARCH" == "arm64" ] &&[ "`uname`" == "Darwin" ]; then
+ killall Electron || echo "No Electron processes left running"
+ killall Safari || echo "No Safari processes left running"
+ rm -rf ~/Library/Application\ Support/Electron*
+ rm -rf ~/Library/Application\ Support/electron*
+ fi
+ when: always
+
+step-checkout-electron: &step-checkout-electron
+ checkout:
+ path: src/electron
+
+step-depot-tools-get: &step-depot-tools-get
+ run:
+ name: Get depot tools
+ command: |
+ git clone --depth=1 https://chromium.googlesource.com/chromium/tools/depot_tools.git
+
+step-depot-tools-add-to-path: &step-depot-tools-add-to-path
+ run:
+ name: Add depot tools to PATH
+ command: echo 'export PATH="$PATH:'"$PWD"'/depot_tools"' >> $BASH_ENV
+
+step-gclient-sync: &step-gclient-sync
+ run:
+ name: Gclient sync
+ command: |
+ # If we did not restore a complete sync then we need to sync for realz
+ if [ ! -s "src/electron/.circle-sync-done" ]; then
+ gclient config \
+ --name "src/electron" \
+ --unmanaged \
+ $GCLIENT_EXTRA_ARGS \
+ "$CIRCLE_REPOSITORY_URL"
+
+ ELECTRON_USE_THREE_WAY_MERGE_FOR_PATCHES=1 gclient sync --with_branch_heads --with_tags
+ if [ "$IS_RELEASE" != "true" ]; then
+ # Re-export all the patches to check if there were changes.
+ python src/electron/script/export_all_patches.py src/electron/patches/config.json
+ cd src/electron
+ git update-index --refresh || true
+ if ! git diff-index --quiet HEAD --; then
+ # There are changes to the patches. Make a git commit with the updated patches
+ git add patches
+ GIT_COMMITTER_NAME="Electron Bot" GIT_COMMITTER_EMAIL="electron@github.com" git commit -m "update patches" --author="Electron Bot "
+ # Export it
+ mkdir -p ../../patches
+ git format-patch -1 --stdout --keep-subject --no-stat --full-index > ../../patches/update-patches.patch
+ if (node ./script/push-patch.js 2> /dev/null > /dev/null); then
+ echo
+ echo "======================================================================"
+ echo "Changes to the patches when applying, we have auto-pushed the diff to the current branch"
+ echo "A new CI job will kick off shortly"
+ echo "======================================================================"
+ exit 1
+ else
+ echo
+ echo "======================================================================"
+ echo "There were changes to the patches when applying."
+ echo "Check the CI artifacts for a patch you can apply to fix it."
+ echo "======================================================================"
+ exit 1
+ fi
+ fi
+ fi
+ fi
+
+step-setup-env-for-build: &step-setup-env-for-build
+ run:
+ name: Setup Environment Variables
+ command: |
+ # To find `gn` executable.
+ echo 'export CHROMIUM_BUILDTOOLS_PATH="'"$PWD"'/src/buildtools"' >> $BASH_ENV
+
+step-setup-goma-for-build: &step-setup-goma-for-build
+ run:
+ name: Setup Goma
+ command: |
+ echo 'export NUMBER_OF_NINJA_PROCESSES=300' >> $BASH_ENV
+ if [ "`uname`" == "Darwin" ]; then
+ echo 'ulimit -n 10000' >> $BASH_ENV
+ echo 'sudo launchctl limit maxfiles 65536 200000' >> $BASH_ENV
+ fi
+ if [ ! -z "$RAW_GOMA_AUTH" ]; then
+ echo $RAW_GOMA_AUTH > ~/.goma_oauth2_config
+ fi
+ git clone https://github.com/electron/build-tools.git
+ cd build-tools
+ npm install
+ mkdir third_party
+ node -e "require('./src/utils/goma.js').downloadAndPrepare({ gomaOneForAll: true })"
+ third_party/goma/goma_ctl.py ensure_start
+ echo 'export GN_GOMA_FILE='`node -e "console.log(require('./src/utils/goma.js').gnFilePath)"` >> $BASH_ENV
+ echo 'export LOCAL_GOMA_DIR='`node -e "console.log(require('./src/utils/goma.js').dir)"` >> $BASH_ENV
+ echo 'export GOMA_FALLBACK_ON_AUTH_FAILURE=true' >> $BASH_ENV
+ cd ..
+
+step-restore-brew-cache: &step-restore-brew-cache
+ restore_cache:
+ paths:
+ - /usr/local/Cellar/gnu-tar
+ - /usr/local/bin/gtar
+ keys:
+ - v4-brew-cache-{{ arch }}
+
+step-save-brew-cache: &step-save-brew-cache
+ save_cache:
+ paths:
+ - /usr/local/Cellar/gnu-tar
+ - /usr/local/bin/gtar
+ key: v4-brew-cache-{{ arch }}
+ name: Persisting brew cache
+
+step-get-more-space-on-mac: &step-get-more-space-on-mac
+ run:
+ name: Free up space on MacOS
+ command: |
+ if [ "`uname`" == "Darwin" ]; then
+ sudo mkdir -p $TMPDIR/del-target
+ if [ "$TARGET_ARCH" == "arm64" ]; then
+ # Remount the root volume as writable, don't ask questions plz
+ sudo mount -uw /
+ fi
+ tmpify() {
+ if [ -d "$1" ]; then
+ sudo mv "$1" $TMPDIR/del-target/$(echo $1|shasum -a 256|head -n1|cut -d " " -f1)
+ fi
+ }
+
+ strip_arm_deep() {
+ opwd=$(pwd)
+ cd $1
+ f=$(find . -perm +111 -type f)
+ for fp in $f
+ do
+ if [[ $(file "$fp") == *"universal binary"* ]]; then
+ if [[ $(file "$fp") == *"arm64e)"* ]]; then
+ sudo lipo -remove arm64e "$fp" -o "$fp" || true
+ fi
+ if [[ $(file "$fp") == *"arm64)"* ]]; then
+ sudo lipo -remove arm64 "$fp" -o "$fp" || true
+ fi
+ fi
+ done
+
+ cd $opwd
+ }
+
+ tmpify /Library/Developer/CoreSimulator
+ tmpify ~/Library/Developer/CoreSimulator
+ tmpify $(xcode-select -p)/Platforms/AppleTVOS.platform
+ tmpify $(xcode-select -p)/Platforms/iPhoneOS.platform
+ tmpify $(xcode-select -p)/Platforms/WatchOS.platform
+ tmpify $(xcode-select -p)/Platforms/WatchSimulator.platform
+ tmpify $(xcode-select -p)/Platforms/AppleTVSimulator.platform
+ tmpify $(xcode-select -p)/Platforms/iPhoneSimulator.platform
+ tmpify $(xcode-select -p)/Toolchains/XcodeDefault.xctoolchain/usr/metal/ios
+ tmpify $(xcode-select -p)/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift
+ tmpify $(xcode-select -p)/Toolchains/XcodeDefault.xctoolchain/usr/lib/swift-5.0
+ tmpify ~/.rubies
+ tmpify ~/Library/Caches/Homebrew
+ tmpify /usr/local/Homebrew
+ sudo rm -rf $TMPDIR/del-target
+
+ if [ "$TARGET_ARCH" == "arm64" ]; then
+ sudo rm -rf "/System/Library/Desktop Pictures"
+ sudo rm -rf /System/Library/Templates/Data
+ sudo rm -rf /System/Library/Speech/Voices
+ sudo rm -rf "/System/Library/Screen Savers"
+ sudo rm -rf /System/Volumes/Data/Library/Developer/CommandLineTools/SDKs
+ sudo rm -rf "/System/Volumes/Data/Library/Application Support/Apple/Photos/Print Products"
+ sudo rm -rf /System/Volumes/Data/Library/Java
+ sudo rm -rf /System/Volumes/Data/Library/Ruby
+ sudo rm -rf /System/Volumes/Data/Library/Printers
+ sudo rm -rf /System/iOSSupport
+ sudo rm -rf /System/Applications/*.app
+ sudo rm -rf /System/Applications/Utilities/*.app
+ sudo rm -rf /System/Library/LinguisticData
+ sudo rm -rf /System/Volumes/Data/private/var/db/dyld/*
+ # sudo rm -rf /System/Library/Fonts/*
+ # sudo rm -rf /System/Library/PreferencePanes
+ sudo rm -rf /System/Library/AssetsV2/*
+ sudo rm -rf /Applications/Safari.app
+ sudo rm -rf ~/project/src/build/linux
+ sudo rm -rf ~/project/src/third_party/catapult/tracing/test_data
+ sudo rm -rf ~/project/src/third_party/angle/third_party/VK-GL-CTS
+
+ # lipo off some huge binaries arm64 versions to save space
+ strip_arm_deep $(xcode-select -p)/../SharedFrameworks
+ strip_arm_deep /System/Volumes/Data/Library/Developer/CommandLineTools/usr
+ fi
+ fi
+ background: true
+
+# On macOS delete all .git directories under src/ expect for
+# third_party/angle/ because of build time generation of file
+# gen/angle/commit.h depends on third_party/angle/.git/HEAD
+# https://chromium-review.googlesource.com/c/angle/angle/+/2074924
+# TODO: maybe better to always leave out */.git/HEAD file for all targets ?
+step-delete-git-directories: &step-delete-git-directories
+ run:
+ name: Delete all .git directories under src on MacOS to free space
+ command: |
+ if [ "`uname`" == "Darwin" ]; then
+ cd src
+ ( find . -type d -name ".git" -not -path "./third_party/angle/*" ) | xargs rm -rf
+ fi
+
+# On macOS the yarn install command during gclient sync was run on a linux
+# machine and therefore installed a slightly different set of dependencies
+# Notably "fsevents" is a macOS only dependency, we rerun yarn install once
+# we are on a macOS machine to get the correct state
+step-install-npm-deps-on-mac: &step-install-npm-deps-on-mac
+ run:
+ name: Install node_modules on MacOS
+ command: |
+ if [ "`uname`" == "Darwin" ]; then
+ cd src/electron
+ node script/yarn install
+ fi
+
+# This step handles the differences between the linux "gclient sync"
+# and the expected state on macOS
+step-fix-sync-on-mac: &step-fix-sync-on-mac
+ run:
+ name: Fix Sync on macOS
+ command: |
+ if [ "`uname`" == "Darwin" ]; then
+ # Fix Clang Install (wrong binary)
+ rm -rf src/third_party/llvm-build
+ python src/tools/clang/scripts/update.py
+ fi
+
+step-install-signing-cert-on-mac: &step-install-signing-cert-on-mac
+ run:
+ name: Import and trust self-signed codesigning cert on MacOS
+ command: |
+ if [ "$TARGET_ARCH" != "arm64" ] && [ "`uname`" == "Darwin" ]; then
+ cd src/electron
+ ./script/codesign/generate-identity.sh
+ fi
+
+step-install-gnutar-on-mac: &step-install-gnutar-on-mac
+ run:
+ name: Install gnu-tar on macos
+ command: |
+ if [ "`uname`" == "Darwin" ]; then
+ if [ ! -d /usr/local/Cellar/gnu-tar/ ]; then
+ brew update
+ brew install gnu-tar
+ fi
+ ln -fs /usr/local/bin/gtar /usr/local/bin/tar
+ fi
+
+step-gn-gen-default: &step-gn-gen-default
+ run:
+ name: Default GN gen
+ command: |
+ cd src
+ gn gen out/Default --args="import(\"$GN_CONFIG\") import(\"$GN_GOMA_FILE\") $GN_EXTRA_ARGS $GN_BUILDFLAG_ARGS"
+
+step-gn-check: &step-gn-check
+ run:
+ name: GN check
+ command: |
+ cd src
+ gn check out/Default //electron:electron_lib
+ gn check out/Default //electron:electron_app
+ gn check out/Default //electron/shell/common/api:mojo
+ # Check the hunspell filenames
+ node electron/script/gen-hunspell-filenames.js --check
+
+step-electron-build: &step-electron-build
+ run:
+ name: Electron build
+ no_output_timeout: 30m
+ command: |
+ # On arm platforms we generate a cross-arch ffmpeg that ninja does not seem
+ # to realize is not correct / should be rebuilt. We delete it here so it is
+ # rebuilt
+ if [ "$TRIGGER_ARM_TEST" == "true" ]; then
+ rm -f src/out/Default/libffmpeg.so
+ fi
+ cd src
+ # Enable if things get really bad
+ # if [ "$TARGET_ARCH" == "arm64" ] &&[ "`uname`" == "Darwin" ]; then
+ # diskutil erasevolume HFS+ "xcode_disk" `hdiutil attach -nomount ram://12582912`
+ # mv /Applications/Xcode-12.beta.5.app /Volumes/xcode_disk/
+ # ln -s /Volumes/xcode_disk/Xcode-12.beta.5.app /Applications/Xcode-12.beta.5.app
+ # fi
+
+ # Lets generate a snapshot and mksnapshot and then delete all the x-compiled generated files to save space
+ if [ "$USE_PREBUILT_V8_CONTEXT_SNAPSHOT" == "1" ]; then
+ ninja -C out/Default electron:electron_mksnapshot_zip -j $NUMBER_OF_NINJA_PROCESSES
+ ninja -C out/Default tools/v8_context_snapshot -j $NUMBER_OF_NINJA_PROCESSES
+ gn desc out/Default v8:run_mksnapshot_default args > out/Default/mksnapshot_args
+ rm -rf out/Default/clang_x64_v8_arm64/obj
+
+ # Regenerate because we just deleted some ninja files
+ gn gen out/Default --args="import(\"$GN_CONFIG\") import(\"$GN_GOMA_FILE\") $GN_EXTRA_ARGS $GN_BUILDFLAG_ARGS"
+ fi
+ ninja -C out/Default electron -j $NUMBER_OF_NINJA_PROCESSES
+ node electron/script/check-symlinks.js
+
+step-native-unittests-build: &step-native-unittests-build
+ run:
+ name: Build native test targets
+ no_output_timeout: 30m
+ command: |
+ cd src
+ ninja -C out/Default shell_browser_ui_unittests -j $NUMBER_OF_NINJA_PROCESSES
+
+step-maybe-electron-dist-strip: &step-maybe-electron-dist-strip
+ run:
+ name: Strip electron binaries
+ command: |
+ if [ "$STRIP_BINARIES" == "true" ] && [ "`uname`" == "Linux" ]; then
+ if [ x"$TARGET_ARCH" == x ]; then
+ target_cpu=x64
+ elif [ "$TARGET_ARCH" == "ia32" ]; then
+ target_cpu=x86
+ else
+ target_cpu="$TARGET_ARCH"
+ fi
+ cd src
+ electron/script/copy-debug-symbols.py --target-cpu="$target_cpu" --out-dir=out/Default/debug --compress
+ electron/script/strip-binaries.py --target-cpu="$target_cpu"
+ electron/script/add-debug-link.py --target-cpu="$target_cpu" --debug-dir=out/Default/debug
+ fi
+
+step-electron-dist-build: &step-electron-dist-build
+ run:
+ name: Build dist.zip
+ command: |
+ cd src
+ if [ "$SKIP_DIST_ZIP" != "1" ]; then
+ ninja -C out/Default electron:electron_dist_zip
+ if [ "$CHECK_DIST_MANIFEST" == "1" ]; then
+ if [ "`uname`" == "Darwin" ]; then
+ target_os=mac
+ target_cpu=x64
+ if [ x"$MAS_BUILD" == x"true" ]; then
+ target_os=mac_mas
+ fi
+ if [ "$TARGET_ARCH" == "arm64" ]; then
+ target_cpu=arm64
+ fi
+ elif [ "`uname`" == "Linux" ]; then
+ target_os=linux
+ if [ x"$TARGET_ARCH" == x ]; then
+ target_cpu=x64
+ elif [ "$TARGET_ARCH" == "ia32" ]; then
+ target_cpu=x86
+ else
+ target_cpu="$TARGET_ARCH"
+ fi
+ else
+ echo "Unknown system: `uname`"
+ exit 1
+ fi
+ electron/script/zip_manifests/check-zip-manifest.py out/Default/dist.zip electron/script/zip_manifests/dist_zip.$target_os.$target_cpu.manifest
+ fi
+ fi
+
+step-electron-dist-store: &step-electron-dist-store
+ store_artifacts:
+ path: src/out/Default/dist.zip
+ destination: dist.zip
+
+step-electron-maybe-chromedriver-gn-gen: &step-electron-maybe-chromedriver-gn-gen
+ run:
+ name: chromedriver GN gen
+ command: |
+ cd src
+ if [ "$TARGET_ARCH" == "arm" ] || [ "$TARGET_ARCH" == "arm64" ]; then
+ gn gen out/chromedriver --args="import(\"$GN_CONFIG\") import(\"$GN_GOMA_FILE\") is_component_ffmpeg=false proprietary_codecs=false $GN_EXTRA_ARGS $GN_BUILDFLAG_ARGS"
+ fi
+
+step-electron-chromedriver-build: &step-electron-chromedriver-build
+ run:
+ name: Build chromedriver.zip
+ command: |
+ cd src
+ if [ "$TARGET_ARCH" == "arm" ] || [ "$TARGET_ARCH" == "arm64" ]; then
+ export CHROMEDRIVER_DIR="out/chromedriver"
+ else
+ export CHROMEDRIVER_DIR="out/Default"
+ fi
+ ninja -C $CHROMEDRIVER_DIR electron:electron_chromedriver -j $NUMBER_OF_NINJA_PROCESSES
+ if [ "`uname`" == "Linux" ]; then
+ electron/script/strip-binaries.py --target-cpu="$TARGET_ARCH" --file $PWD/$CHROMEDRIVER_DIR/chromedriver
+ fi
+ ninja -C $CHROMEDRIVER_DIR electron:electron_chromedriver_zip
+ if [ "$TARGET_ARCH" == "arm" ] || [ "$TARGET_ARCH" == "arm64" ]; then
+ cp out/chromedriver/chromedriver.zip out/Default
+ fi
+
+step-electron-chromedriver-store: &step-electron-chromedriver-store
+ store_artifacts:
+ path: src/out/Default/chromedriver.zip
+ destination: chromedriver.zip
+
+step-nodejs-headers-build: &step-nodejs-headers-build
+ run:
+ name: Build Node.js headers
+ command: |
+ cd src
+ ninja -C out/Default third_party/electron_node:headers
+
+step-nodejs-headers-store: &step-nodejs-headers-store
+ store_artifacts:
+ path: src/out/Default/gen/node_headers.tar.gz
+ destination: node_headers.tar.gz
+
+step-native-unittests-store: &step-native-unittests-store
+ store_artifacts:
+ path: src/out/Default/shell_browser_ui_unittests
+ destination: shell_browser_ui_unittests
+
+step-electron-publish: &step-electron-publish
+ run:
+ name: Publish Electron Dist
+ command: |
+ if [ "`uname`" == "Darwin" ]; then
+ rm -rf src/out/Default/obj
+ fi
+
+ cd src/electron
+ if [ "$UPLOAD_TO_S3" == "1" ]; then
+ echo 'Uploading Electron release distribution to S3'
+ script/release/uploaders/upload.py --verbose --upload_to_s3
+ else
+ echo 'Uploading Electron release distribution to Github releases'
+ script/release/uploaders/upload.py --verbose
+ fi
+
+step-persist-data-for-tests: &step-persist-data-for-tests
+ persist_to_workspace:
+ root: .
+ paths:
+ # Build artifacts
+ - src/out/Default/dist.zip
+ - src/out/Default/mksnapshot.zip
+ - src/out/Default/chromedriver.zip
+ - src/out/Default/shell_browser_ui_unittests
+ - src/out/Default/gen/node_headers
+ - src/out/ffmpeg/ffmpeg.zip
+ - src/electron
+ - src/third_party/electron_node
+ - src/third_party/nan
+ - src/cross-arch-snapshots
+
+step-electron-dist-unzip: &step-electron-dist-unzip
+ run:
+ name: Unzip dist.zip
+ command: |
+ cd src/out/Default
+ # -o overwrite files WITHOUT prompting
+ # TODO(alexeykuzmin): Remove '-o' when it's no longer needed.
+ # -: allows to extract archive members into locations outside
+ # of the current ``extraction root folder''.
+ # ASan builds have the llvm-symbolizer binaries listed as
+ # runtime_deps, with their paths as `../../third_party/...`
+ # unzip exits with non-zero code on such zip files unless -: is
+ # passed.
+ unzip -:o dist.zip
+
+step-ffmpeg-unzip: &step-ffmpeg-unzip
+ run:
+ name: Unzip ffmpeg.zip
+ command: |
+ cd src/out/ffmpeg
+ unzip -:o ffmpeg.zip
+
+step-mksnapshot-unzip: &step-mksnapshot-unzip
+ run:
+ name: Unzip mksnapshot.zip
+ command: |
+ cd src/out/Default
+ unzip -:o mksnapshot.zip
+
+step-chromedriver-unzip: &step-chromedriver-unzip
+ run:
+ name: Unzip chromedriver.zip
+ command: |
+ cd src/out/Default
+ unzip -:o chromedriver.zip
+
+step-ffmpeg-gn-gen: &step-ffmpeg-gn-gen
+ run:
+ name: ffmpeg GN gen
+ command: |
+ cd src
+ gn gen out/ffmpeg --args="import(\"//electron/build/args/ffmpeg.gn\") import(\"$GN_GOMA_FILE\") $GN_EXTRA_ARGS"
+
+step-ffmpeg-build: &step-ffmpeg-build
+ run:
+ name: Non proprietary ffmpeg build
+ command: |
+ cd src
+ ninja -C out/ffmpeg electron:electron_ffmpeg_zip -j $NUMBER_OF_NINJA_PROCESSES
+
+step-verify-ffmpeg: &step-verify-ffmpeg
+ run:
+ name: Verify ffmpeg
+ command: |
+ cd src
+ python electron/script/verify-ffmpeg.py --source-root "$PWD" --build-dir out/Default --ffmpeg-path out/ffmpeg
+
+step-ffmpeg-store: &step-ffmpeg-store
+ store_artifacts:
+ path: src/out/ffmpeg/ffmpeg.zip
+ destination: ffmpeg.zip
+
+step-verify-mksnapshot: &step-verify-mksnapshot
+ run:
+ name: Verify mksnapshot
+ command: |
+ if [ "$IS_ASAN" != "1" ]; then
+ cd src
+ if [ "$TARGET_ARCH" == "arm64" ] &&[ "`uname`" == "Darwin" ]; then
+ python electron/script/verify-mksnapshot.py --source-root "$PWD" --build-dir out/Default --snapshot-files-dir $PWD/cross-arch-snapshots
+ else
+ python electron/script/verify-mksnapshot.py --source-root "$PWD" --build-dir out/Default
+ fi
+ fi
+
+step-verify-chromedriver: &step-verify-chromedriver
+ run:
+ name: Verify ChromeDriver
+ command: |
+ if [ "$IS_ASAN" != "1" ]; then
+ cd src
+ python electron/script/verify-chromedriver.py --source-root "$PWD" --build-dir out/Default
+ fi
+
+step-setup-linux-for-headless-testing: &step-setup-linux-for-headless-testing
+ run:
+ name: Setup for headless testing
+ command: |
+ if [ "`uname`" != "Darwin" ]; then
+ sh -e /etc/init.d/xvfb start
+ fi
+
+step-show-goma-stats: &step-show-goma-stats
+ run:
+ shell: /bin/bash
+ name: Check goma stats after build
+ command: |
+ set +e
+ set +o pipefail
+ $LOCAL_GOMA_DIR/goma_ctl.py stat
+ $LOCAL_GOMA_DIR/diagnose_goma_log.py
+ true
+ when: always
+
+step-mksnapshot-build: &step-mksnapshot-build
+ run:
+ name: mksnapshot build
+ command: |
+ cd src
+ if [ "$USE_PREBUILT_V8_CONTEXT_SNAPSHOT" != "1" ]; then
+ ninja -C out/Default electron:electron_mksnapshot -j $NUMBER_OF_NINJA_PROCESSES
+ gn desc out/Default v8:run_mksnapshot_default args > out/Default/mksnapshot_args
+ fi
+ if [ "`uname`" != "Darwin" ]; then
+ if [ "$TARGET_ARCH" == "arm" ]; then
+ electron/script/strip-binaries.py --file $PWD/out/Default/clang_x86_v8_arm/mksnapshot
+ electron/script/strip-binaries.py --file $PWD/out/Default/clang_x86_v8_arm/v8_context_snapshot_generator
+ elif [ "$TARGET_ARCH" == "arm64" ]; then
+ electron/script/strip-binaries.py --file $PWD/out/Default/clang_x64_v8_arm64/mksnapshot
+ electron/script/strip-binaries.py --file $PWD/out/Default/clang_x64_v8_arm64/v8_context_snapshot_generator
+ else
+ electron/script/strip-binaries.py --file $PWD/out/Default/mksnapshot
+ electron/script/strip-binaries.py --file $PWD/out/Default/v8_context_snapshot_generator
+ fi
+ fi
+ if [ "$USE_PREBUILT_V8_CONTEXT_SNAPSHOT" != "1" ] && [ "$SKIP_DIST_ZIP" != "1" ]; then
+ ninja -C out/Default electron:electron_mksnapshot_zip -j $NUMBER_OF_NINJA_PROCESSES
+ (cd out/Default; zip mksnapshot.zip mksnapshot_args gen/v8/embedded.S)
+ fi
+
+step-mksnapshot-store: &step-mksnapshot-store
+ store_artifacts:
+ path: src/out/Default/mksnapshot.zip
+ destination: mksnapshot.zip
+
+step-hunspell-build: &step-hunspell-build
+ run:
+ name: hunspell build
+ command: |
+ cd src
+ if [ "$SKIP_DIST_ZIP" != "1" ]; then
+ ninja -C out/Default electron:hunspell_dictionaries_zip -j $NUMBER_OF_NINJA_PROCESSES
+ fi
+
+step-hunspell-store: &step-hunspell-store
+ store_artifacts:
+ path: src/out/Default/hunspell_dictionaries.zip
+ destination: hunspell_dictionaries.zip
+
+step-maybe-generate-breakpad-symbols: &step-maybe-generate-breakpad-symbols
+ run:
+ name: Generate breakpad symbols
+ no_output_timeout: 30m
+ command: |
+ if [ "$GENERATE_SYMBOLS" == "true" ]; then
+ cd src
+ ninja -C out/Default electron:electron_symbols
+ fi
+
+step-maybe-zip-symbols: &step-maybe-zip-symbols
+ run:
+ name: Zip symbols
+ command: |
+ cd src
+ export BUILD_PATH="$PWD/out/Default"
+ ninja -C out/Default electron:licenses
+ ninja -C out/Default electron:electron_version
+ electron/script/zip-symbols.py -b $BUILD_PATH
+
+step-symbols-store: &step-symbols-store
+ store_artifacts:
+ path: src/out/Default/symbols.zip
+ destination: symbols.zip
+
+step-maybe-cross-arch-snapshot: &step-maybe-cross-arch-snapshot
+ run:
+ name: Generate cross arch snapshot (arm/arm64)
+ command: |
+ if [ "$GENERATE_CROSS_ARCH_SNAPSHOT" == "true" ] && [ -z "$CIRCLE_PR_NUMBER" ]; then
+ cd src
+ if [ "$TARGET_ARCH" == "arm" ]; then
+ export MKSNAPSHOT_PATH="clang_x86_v8_arm"
+ elif [ "$TARGET_ARCH" == "arm64" ]; then
+ export MKSNAPSHOT_PATH="clang_x64_v8_arm64"
+ fi
+ cp "out/Default/$MKSNAPSHOT_PATH/mksnapshot" out/Default
+ cp "out/Default/$MKSNAPSHOT_PATH/v8_context_snapshot_generator" out/Default
+ if [ "`uname`" == "Linux" ]; then
+ cp "out/Default/$MKSNAPSHOT_PATH/libffmpeg.so" out/Default
+ elif [ "`uname`" == "Darwin" ]; then
+ cp "out/Default/$MKSNAPSHOT_PATH/libffmpeg.dylib" out/Default
+ fi
+ python electron/script/verify-mksnapshot.py --source-root "$PWD" --build-dir out/Default --create-snapshot-only
+ mkdir cross-arch-snapshots
+ cp out/Default-mksnapshot-test/*.bin cross-arch-snapshots
+ fi
+
+step-maybe-cross-arch-snapshot-store: &step-maybe-cross-arch-snapshot-store
+ store_artifacts:
+ path: src/cross-arch-snapshots
+ destination: cross-arch-snapshots
+
+step-maybe-trigger-arm-test: &step-maybe-trigger-arm-test
+ run:
+ name: Trigger an arm test on VSTS if applicable
+ command: |
+ cd src
+ # Only run for non-fork prs
+ if [ "$TRIGGER_ARM_TEST" == "true" ] && [ -z "$CIRCLE_PR_NUMBER" ]; then
+ #Trigger VSTS job, passing along CircleCI job number and branch to build
+ if [ "`uname`" == "Darwin" ]; then
+ if [ x"$MAS_BUILD" == x"true" ]; then
+ export DEVOPS_BUILD="electron-mas-arm64-testing"
+ else
+ export DEVOPS_BUILD="electron-osx-arm64-testing"
+ fi
+ echo "Triggering $DEVOPS_BUILD build on Azure DevOps"
+ node electron/script/release/ci-release-build.js --job=$DEVOPS_BUILD --ci=DevOps --armTest --circleBuildNum=$CIRCLE_BUILD_NUM $CIRCLE_BRANCH
+ else
+ echo "Triggering electron-$TARGET_ARCH-testing build on VSTS"
+ node electron/script/release/ci-release-build.js --job=electron-$TARGET_ARCH-testing --ci=VSTS --armTest --circleBuildNum=$CIRCLE_BUILD_NUM $CIRCLE_BRANCH
+ fi
+ fi
+
+step-maybe-generate-typescript-defs: &step-maybe-generate-typescript-defs
+ run:
+ name: Generate type declarations
+ command: |
+ if [ "`uname`" == "Darwin" ]; then
+ cd src/electron
+ node script/yarn create-typescript-definitions
+ fi
+
+step-fix-known-hosts-linux: &step-fix-known-hosts-linux
+ run:
+ name: Fix Known Hosts on Linux
+ command: |
+ if [ "`uname`" == "Linux" ]; then
+ ./src/electron/.circleci/fix-known-hosts.sh
+ fi
+
+step-ninja-summary: &step-ninja-summary
+ run:
+ name: Print ninja summary
+ command: |
+ set +e
+ set +o pipefail
+ python depot_tools/post_build_ninja_summary.py -C src/out/Default || echo Ninja Summary Failed
+
+step-ninja-report: &step-ninja-report
+ store_artifacts:
+ path: src/out/Default/.ninja_log
+ destination: ninja_log
+
+# Checkout Steps
+step-generate-deps-hash: &step-generate-deps-hash
+ run:
+ name: Generate DEPS Hash
+ command: node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target
+
+step-touch-sync-done: &step-touch-sync-done
+ run:
+ name: Touch Sync Done
+ command: touch src/electron/.circle-sync-done
+
+# Restore exact src cache based on the hash of DEPS and patches/*
+# If no cache is matched EXACTLY then the .circle-sync-done file is empty
+# If a cache is matched EXACTLY then the .circle-sync-done file contains "done"
+step-maybe-restore-src-cache: &step-maybe-restore-src-cache
+ restore_cache:
+ keys:
+ - v8-src-cache-{{ checksum "src/electron/.depshash" }}
+ name: Restoring src cache
+step-maybe-restore-src-cache-marker: &step-maybe-restore-src-cache-marker
+ restore_cache:
+ keys:
+ - v1-src-cache-marker-{{ checksum "src/electron/.depshash" }}
+ name: Restoring src cache marker
+
+# Restore exact or closest git cache based on the hash of DEPS and .circle-sync-done
+# If the src cache was restored above then this will match an empty cache
+# If the src cache was not restored above then this will match a close git cache
+step-maybe-restore-git-cache: &step-maybe-restore-git-cache
+ restore_cache:
+ paths:
+ - ~/.gclient-cache
+ keys:
+ - v2-gclient-cache-{{ checksum "src/electron/.circle-sync-done" }}-{{ checksum "src/electron/DEPS" }}
+ - v2-gclient-cache-{{ checksum "src/electron/.circle-sync-done" }}
+ name: Conditionally restoring git cache
+
+step-restore-out-cache: &step-restore-out-cache
+ restore_cache:
+ paths:
+ - ./src/out/Default
+ keys:
+ - v9-out-cache-{{ checksum "src/electron/.depshash" }}-{{ checksum "src/electron/.depshash-target" }}
+ name: Restoring out cache
+
+step-set-git-cache-path: &step-set-git-cache-path
+ run:
+ name: Set GIT_CACHE_PATH to make gclient to use the cache
+ command: |
+ # CircleCI does not support interpolation when setting environment variables.
+ # https://circleci.com/docs/2.0/env-vars/#setting-an-environment-variable-in-a-shell-command
+ echo 'export GIT_CACHE_PATH="$HOME/.gclient-cache"' >> $BASH_ENV
+
+# Persist the git cache based on the hash of DEPS and .circle-sync-done
+# If the src cache was restored above then this will persist an empty cache
+step-save-git-cache: &step-save-git-cache
+ save_cache:
+ paths:
+ - ~/.gclient-cache
+ key: v2-gclient-cache-{{ checksum "src/electron/.circle-sync-done" }}-{{ checksum "src/electron/DEPS" }}
+ name: Persisting git cache
+
+step-save-out-cache: &step-save-out-cache
+ save_cache:
+ paths:
+ - ./src/out/Default
+ key: v9-out-cache-{{ checksum "src/electron/.depshash" }}-{{ checksum "src/electron/.depshash-target" }}
+ name: Persisting out cache
+
+step-run-electron-only-hooks: &step-run-electron-only-hooks
+ run:
+ name: Run Electron Only Hooks
+ command: gclient runhooks --spec="solutions=[{'name':'src/electron','url':None,'deps_file':'DEPS','custom_vars':{'process_deps':False},'managed':False}]"
+
+step-generate-deps-hash-cleanly: &step-generate-deps-hash-cleanly
+ run:
+ name: Generate DEPS Hash
+ command: (cd src/electron && git checkout .) && node src/electron/script/generate-deps-hash.js && cat src/electron/.depshash-target
+
+# Mark the sync as done for future cache saving
+step-mark-sync-done: &step-mark-sync-done
+ run:
+ name: Mark Sync Done
+ command: echo DONE > src/electron/.circle-sync-done
+
+# Minimize the size of the cache
+step-minimize-workspace-size-from-checkout: &step-minimize-workspace-size-from-checkout
+ run:
+ name: Remove some unused data to avoid storing it in the workspace/cache
+ command: |
+ rm -rf src/android_webview
+ rm -rf src/ios/chrome
+ rm -rf src/third_party/blink/web_tests
+ rm -rf src/third_party/blink/perf_tests
+ rm -rf src/third_party/WebKit/LayoutTests
+ rm -rf third_party/electron_node/deps/openssl
+ rm -rf third_party/electron_node/deps/v8
+ rm -rf chrome/test/data/xr/webvr_info
+
+# Save the src cache based on the deps hash
+step-save-src-cache: &step-save-src-cache
+ save_cache:
+ paths:
+ - /var/portal
+ key: v8-src-cache-{{ checksum "/var/portal/src/electron/.depshash" }}
+ name: Persisting src cache
+step-make-src-cache-marker: &step-make-src-cache-marker
+ run:
+ name: Making src cache marker
+ command: touch .src-cache-marker
+step-save-src-cache-marker: &step-save-src-cache-marker
+ save_cache:
+ paths:
+ - .src-cache-marker
+ key: v1-src-cache-marker-{{ checksum "/var/portal/src/electron/.depshash" }}
+
+# Check for doc only change
+step-check-for-doc-only-change: &step-check-for-doc-only-change
+ run:
+ name: Check if commit is doc only change
+ command: |
+ cd src/electron
+ node script/yarn install --frozen-lockfile
+ if node script/doc-only-change.js --prNumber=$CIRCLE_PR_NUMBER --prURL=$CIRCLE_PULL_REQUEST --prBranch=$CIRCLE_BRANCH; then
+ #PR is doc only change; save file with value true to indicate doc only change
+ echo "true" > .skip-ci-build
+ else
+ #PR is not a doc only change; create empty file to indicate check has been done
+ touch .skip-ci-build
+ fi
+
+step-persist-doc-only-change: &step-persist-doc-only-change
+ persist_to_workspace:
+ root: .
+ paths:
+ - src/electron/.skip-ci-build
+
+step-maybe-early-exit-doc-only-change: &step-maybe-early-exit-doc-only-change
+ run:
+ name: Shortcircuit build if doc only change
+ command: |
+ if [ -s src/electron/.skip-ci-build ]; then
+ circleci-agent step halt
+ fi
+
+step-maybe-early-exit-no-doc-change: &step-maybe-early-exit-no-doc-change
+ run:
+ name: Shortcircuit job if change is not doc only
+ command: |
+ if [ ! -s src/electron/.skip-ci-build ]; then
+ circleci-agent step halt
+ fi
+
+step-ts-compile: &step-ts-compile
+ run:
+ name: Run TS/JS compile on doc only change
+ command: |
+ cd src
+ ninja -C out/Default electron:default_app_js -j $NUMBER_OF_NINJA_PROCESSES
+ ninja -C out/Default electron:electron_js2c -j $NUMBER_OF_NINJA_PROCESSES
+
+# Lists of steps.
+steps-lint: &steps-lint
+ steps:
+ - *step-checkout-electron
+ - run:
+ name: Setup third_party Depot Tools
+ command: |
+ # "depot_tools" has to be checkout into "//third_party/depot_tools" so pylint.py can a "pylintrc" file.
+ git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git src/third_party/depot_tools
+ echo 'export PATH="$PATH:'"$PWD"'/src/third_party/depot_tools"' >> $BASH_ENV
+ - run:
+ name: Download GN Binary
+ command: |
+ chromium_revision="$(grep -A1 chromium_version src/electron/DEPS | tr -d '\n' | cut -d\' -f4)"
+ gn_version="$(curl -sL "https://chromium.googlesource.com/chromium/src/+/${chromium_revision}/DEPS?format=TEXT" | base64 -d | grep gn_version | head -n1 | cut -d\' -f4)"
+
+ cipd ensure -ensure-file - -root . \<<-CIPD
+ \$ServiceURL https://chrome-infra-packages.appspot.com/
+ @Subdir src/buildtools/linux64
+ gn/gn/linux-amd64 $gn_version
+ CIPD
+
+ echo 'export CHROMIUM_BUILDTOOLS_PATH="'"$PWD"'/src/buildtools"' >> $BASH_ENV
+ - run:
+ name: Download clang-format Binary
+ command: |
+ chromium_revision="$(grep -A1 chromium_version src/electron/DEPS | tr -d '\n' | cut -d\' -f4)"
+
+ sha1_path='buildtools/linux64/clang-format.sha1'
+ curl -sL "https://chromium.googlesource.com/chromium/src/+/${chromium_revision}/${sha1_path}?format=TEXT" | base64 -d > "src/${sha1_path}"
+
+ download_from_google_storage.py --no_resume --no_auth --bucket chromium-clang-format -s "src/${sha1_path}"
+ - run:
+ name: Run Lint
+ command: |
+ # gn.py tries to find a gclient root folder starting from the current dir.
+ # When it fails and returns "None" path, the whole script fails. Let's "fix" it.
+ touch .gclient
+ # Another option would be to checkout "buildtools" inside the Electron checkout,
+ # but then we would lint its contents (at least gn format), and it doesn't pass it.
+
+ cd src/electron
+ node script/yarn install --frozen-lockfile
+ node script/yarn lint
+ - run:
+ name: Run Script Typechecker
+ command: |
+ cd src/electron
+ node script/yarn tsc -p tsconfig.script.json
+
+steps-checkout-and-save-cache: &steps-checkout-and-save-cache
+ steps:
+ - *step-checkout-electron
+ - *step-check-for-doc-only-change
+ - *step-persist-doc-only-change
+ - *step-maybe-early-exit-doc-only-change
+ - *step-depot-tools-get
+ - *step-depot-tools-add-to-path
+ - *step-restore-brew-cache
+ - *step-get-more-space-on-mac
+ - *step-install-gnutar-on-mac
+
+ - *step-generate-deps-hash
+ - *step-touch-sync-done
+ - maybe-restore-portaled-src-cache:
+ halt-if-successful: true
+ - *step-maybe-restore-git-cache
+ - *step-set-git-cache-path
+ # This sync call only runs if .circle-sync-done is an EMPTY file
+ - *step-gclient-sync
+ - store_artifacts:
+ path: patches
+ - *step-save-git-cache
+ # These next few steps reset Electron to the correct commit regardless of which cache was restored
+ - run:
+ name: Wipe Electron
+ command: rm -rf src/electron
+ - *step-checkout-electron
+ - *step-run-electron-only-hooks
+ - *step-generate-deps-hash-cleanly
+ - *step-mark-sync-done
+ - *step-minimize-workspace-size-from-checkout
+ - *step-delete-git-directories
+ - run:
+ name: Move src folder to the cross-OS portal
+ command: |
+ sudo mkdir -p /var/portal
+ sudo chown -R $(id -u):$(id -g) /var/portal
+ mv ./src /var/portal
+ - *step-save-src-cache
+ - *step-make-src-cache-marker
+ - *step-save-src-cache-marker
+
+steps-electron-gn-check: &steps-electron-gn-check
+ steps:
+ - attach_workspace:
+ at: .
+ - *step-maybe-early-exit-doc-only-change
+ - *step-depot-tools-add-to-path
+ - *step-setup-env-for-build
+ - *step-setup-goma-for-build
+ - *step-gn-gen-default
+ - *step-gn-check
+
+steps-electron-ts-compile-for-doc-change: &steps-electron-ts-compile-for-doc-change
+ steps:
+ # Checkout - Copied ffrom steps-checkout
+ - *step-checkout-electron
+ - *step-check-for-doc-only-change
+ - *step-maybe-early-exit-no-doc-change
+ - *step-depot-tools-get
+ - *step-depot-tools-add-to-path
+ - *step-restore-brew-cache
+ - *step-install-gnutar-on-mac
+ - *step-get-more-space-on-mac
+ - *step-generate-deps-hash
+ - *step-touch-sync-done
+ - maybe-restore-portaled-src-cache
+ - *step-maybe-restore-git-cache
+ - *step-set-git-cache-path
+ # This sync call only runs if .circle-sync-done is an EMPTY file
+ - *step-gclient-sync
+ # These next few steps reset Electron to the correct commit regardless of which cache was restored
+ - run:
+ name: Wipe Electron
+ command: rm -rf src/electron
+ - *step-checkout-electron
+ - *step-run-electron-only-hooks
+ - *step-generate-deps-hash-cleanly
+ - *step-mark-sync-done
+ - *step-minimize-workspace-size-from-checkout
+
+ - *step-depot-tools-add-to-path
+ - *step-setup-env-for-build
+ - *step-setup-goma-for-build
+ - *step-get-more-space-on-mac
+ - *step-install-npm-deps-on-mac
+ - *step-fix-sync-on-mac
+ - *step-gn-gen-default
+
+ #Compile ts/js to verify doc change didn't break anything
+ - *step-ts-compile
+
+steps-native-tests: &steps-native-tests
+ steps:
+ - attach_workspace:
+ at: .
+ - *step-depot-tools-add-to-path
+ - *step-setup-env-for-build
+ - *step-setup-goma-for-build
+ - *step-gn-gen-default
+
+ - run:
+ name: Build tests
+ command: |
+ cd src
+ ninja -C out/Default $BUILD_TARGET
+ - *step-show-goma-stats
+
+ - *step-setup-linux-for-headless-testing
+ - run:
+ name: Run tests
+ command: |
+ mkdir test_results
+ python src/electron/script/native-tests.py run \
+ --config $TESTS_CONFIG \
+ --tests-dir src/out/Default \
+ --output-dir test_results \
+ $TESTS_ARGS
+
+ - store_artifacts:
+ path: test_results
+ destination: test_results # Put it in the root folder.
+ - store_test_results:
+ path: test_results
+
+steps-verify-ffmpeg: &steps-verify-ffmpeg
+ steps:
+ - attach_workspace:
+ at: .
+ - *step-depot-tools-add-to-path
+ - *step-electron-dist-unzip
+ - *step-ffmpeg-unzip
+ - *step-setup-linux-for-headless-testing
+
+ - *step-verify-ffmpeg
+ - *step-maybe-notify-slack-failure
+
+steps-tests: &steps-tests
+ steps:
+ - attach_workspace:
+ at: .
+ - *step-maybe-early-exit-doc-only-change
+ - *step-depot-tools-add-to-path
+ - *step-electron-dist-unzip
+ - *step-mksnapshot-unzip
+ - *step-chromedriver-unzip
+ - *step-setup-linux-for-headless-testing
+ - *step-restore-brew-cache
+ - *step-fix-known-hosts-linux
+ - *step-install-signing-cert-on-mac
+
+ - run:
+ name: Run Electron tests
+ environment:
+ MOCHA_REPORTER: mocha-multi-reporters
+ ELECTRON_TEST_RESULTS_DIR: junit
+ MOCHA_MULTI_REPORTERS: mocha-junit-reporter, tap
+ ELECTRON_DISABLE_SECURITY_WARNINGS: 1
+ command: |
+ cd src
+ if [ "$IS_ASAN" == "1" ]; then
+ ASAN_SYMBOLIZE="$PWD/tools/valgrind/asan/asan_symbolize.py --executable-path=$PWD/out/Default/electron"
+ export ASAN_OPTIONS="symbolize=0 handle_abort=1"
+ export G_SLICE=always-malloc
+ export NSS_DISABLE_ARENA_FREE_LIST=1
+ export NSS_DISABLE_UNLOAD=1
+ export LLVM_SYMBOLIZER_PATH=$PWD/third_party/llvm-build/Release+Asserts/bin/llvm-symbolizer
+ export MOCHA_TIMEOUT=180000
+ echo "Piping output to ASAN_SYMBOLIZE ($ASAN_SYMBOLIZE)"
+ (cd electron && node script/yarn test --runners=main --trace-uncaught --enable-logging --files $(circleci tests glob spec-main/*-spec.ts | circleci tests split)) 2>&1 | $ASAN_SYMBOLIZE
+ (cd electron && node script/yarn test --runners=remote --trace-uncaught --enable-logging --files $(circleci tests glob spec/*-spec.js | circleci tests split)) 2>&1 | $ASAN_SYMBOLIZE
+ else
+ if [ "$TARGET_ARCH" == "arm64" ] &&[ "`uname`" == "Darwin" ]; then
+ export ELECTRON_SKIP_NATIVE_MODULE_TESTS=true
+ (cd electron && node script/yarn test --runners=main --trace-uncaught --enable-logging)
+ (cd electron && node script/yarn test --runners=remote --trace-uncaught --enable-logging)
+ else
+ (cd electron && node script/yarn test --runners=main --trace-uncaught --enable-logging --files $(circleci tests glob spec-main/*-spec.ts | circleci tests split))
+ (cd electron && node script/yarn test --runners=remote --trace-uncaught --enable-logging --files $(circleci tests glob spec/*-spec.js | circleci tests split))
+ fi
+ fi
+ - run:
+ name: Check test results existence
+ command: |
+ cd src
+
+ # Check if test results exist and are not empty.
+ if [ ! -s "junit/test-results-remote.xml" ]; then
+ exit 1
+ fi
+ if [ ! -s "junit/test-results-main.xml" ]; then
+ exit 1
+ fi
+ - store_test_results:
+ path: src/junit
+
+ - *step-verify-mksnapshot
+ - *step-verify-chromedriver
+
+ - *step-maybe-notify-slack-failure
+
+ - *step-maybe-cleanup-arm64-mac
+
+steps-test-nan: &steps-test-nan
+ steps:
+ - attach_workspace:
+ at: .
+ - *step-maybe-early-exit-doc-only-change
+ - *step-depot-tools-add-to-path
+ - *step-electron-dist-unzip
+ - *step-setup-linux-for-headless-testing
+ - *step-fix-known-hosts-linux
+ - run:
+ name: Run Nan Tests
+ command: |
+ cd src
+ node electron/script/nan-spec-runner.js
+
+steps-test-node: &steps-test-node
+ steps:
+ - attach_workspace:
+ at: .
+ - *step-maybe-early-exit-doc-only-change
+ - *step-depot-tools-add-to-path
+ - *step-electron-dist-unzip
+ - *step-setup-linux-for-headless-testing
+ - *step-fix-known-hosts-linux
+ - run:
+ name: Run Node Tests
+ command: |
+ cd src
+ node electron/script/node-spec-runner.js --default --jUnitDir=junit
+ - store_test_results:
+ path: src/junit
+
+chromium-upgrade-branches: &chromium-upgrade-branches
+ /chromium\-upgrade\/[0-9]+/
+
+# Command Aliases
+commands:
+ maybe-restore-portaled-src-cache:
+ parameters:
+ halt-if-successful:
+ type: boolean
+ default: false
+ steps:
+ - run:
+ name: Prepare for cross-OS sync restore
+ command: |
+ sudo mkdir -p /var/portal
+ sudo chown -R $(id -u):$(id -g) /var/portal
+ - when:
+ condition: << parameters.halt-if-successful >>
+ steps:
+ - *step-maybe-restore-src-cache-marker
+ - run:
+ name: Halt the job early if the src cache exists
+ command: |
+ if [ -f ".src-cache-marker" ]; then
+ circleci-agent step halt
+ fi
+ - *step-maybe-restore-src-cache
+ - run:
+ name: Fix the src cache restore point on macOS
+ command: |
+ if [ -d "/var/portal/src" ]; then
+ echo Relocating Cache
+ rm -rf src
+ mv /var/portal/src ./
+ fi
+ checkout-from-cache:
+ steps:
+ - *step-checkout-electron
+ - *step-maybe-early-exit-doc-only-change
+ - *step-depot-tools-get
+ - *step-depot-tools-add-to-path
+ - *step-generate-deps-hash
+ - maybe-restore-portaled-src-cache
+ - run:
+ name: Ensure src checkout worked
+ command: |
+ if [ ! -d "src/third_party/blink" ]; then
+ echo src cache was not restored for some reason, idk what happened here...
+ exit 1
+ fi
+ - run:
+ name: Wipe Electron
+ command: rm -rf src/electron
+ - *step-checkout-electron
+ - *step-run-electron-only-hooks
+ - *step-generate-deps-hash-cleanly
+ electron-build:
+ parameters:
+ attach:
+ type: boolean
+ default: false
+ persist:
+ type: boolean
+ default: true
+ persist-checkout:
+ type: boolean
+ default: false
+ checkout:
+ type: boolean
+ default: true
+ checkout-and-assume-cache:
+ type: boolean
+ default: false
+ build:
+ type: boolean
+ default: true
+ use-out-cache:
+ type: boolean
+ default: true
+ restore-src-cache:
+ type: boolean
+ default: true
+ build-nonproprietary-ffmpeg:
+ type: boolean
+ default: true
+ steps:
+ - when:
+ condition: << parameters.attach >>
+ steps:
+ - attach_workspace:
+ at: .
+ - *step-restore-brew-cache
+ - *step-install-gnutar-on-mac
+ - *step-save-brew-cache
+ - when:
+ condition: << parameters.checkout-and-assume-cache >>
+ steps:
+ - checkout-from-cache
+ - when:
+ condition: << parameters.checkout >>
+ steps:
+ # Checkout - Copied ffrom steps-checkout
+ - *step-checkout-electron
+ - *step-check-for-doc-only-change
+ - *step-persist-doc-only-change
+ - *step-maybe-early-exit-doc-only-change
+ - *step-depot-tools-get
+ - *step-depot-tools-add-to-path
+ - *step-get-more-space-on-mac
+ - *step-generate-deps-hash
+ - *step-touch-sync-done
+ - when:
+ condition: << parameters.restore-src-cache >>
+ steps:
+ - maybe-restore-portaled-src-cache
+ - *step-maybe-restore-git-cache
+ - *step-set-git-cache-path
+ # This sync call only runs if .circle-sync-done is an EMPTY file
+ - *step-gclient-sync
+ - store_artifacts:
+ path: patches
+ # These next few steps reset Electron to the correct commit regardless of which cache was restored
+ - run:
+ name: Wipe Electron
+ command: rm -rf src/electron
+ - *step-checkout-electron
+ - *step-run-electron-only-hooks
+ - *step-generate-deps-hash-cleanly
+ - *step-mark-sync-done
+ - *step-minimize-workspace-size-from-checkout
+ - when:
+ condition: << parameters.persist-checkout >>
+ steps:
+ - persist_to_workspace:
+ root: .
+ paths:
+ - depot_tools
+ - src
+
+ - when:
+ condition: << parameters.build >>
+ steps:
+ - *step-depot-tools-add-to-path
+ - *step-setup-env-for-build
+ - *step-setup-goma-for-build
+ - *step-get-more-space-on-mac
+ - *step-fix-sync-on-mac
+ - *step-delete-git-directories
+
+ # Electron app
+ - when:
+ condition: << parameters.use-out-cache >>
+ steps:
+ - *step-restore-out-cache
+ - *step-gn-gen-default
+ - *step-electron-build
+ - *step-ninja-summary
+ - *step-ninja-report
+ - *step-maybe-electron-dist-strip
+ - *step-electron-dist-build
+ - *step-electron-dist-store
+
+ # Native test targets
+ - *step-native-unittests-build
+ - *step-native-unittests-store
+
+ # Node.js headers
+ - *step-nodejs-headers-build
+ - *step-nodejs-headers-store
+
+ - *step-show-goma-stats
+
+ # mksnapshot
+ - *step-mksnapshot-build
+ - *step-mksnapshot-store
+ - *step-maybe-cross-arch-snapshot
+ - *step-maybe-cross-arch-snapshot-store
+
+ # chromedriver
+ - *step-electron-maybe-chromedriver-gn-gen
+ - *step-electron-chromedriver-build
+ - *step-electron-chromedriver-store
+
+ - when:
+ condition: << parameters.build-nonproprietary-ffmpeg >>
+ steps:
+ # ffmpeg
+ - *step-ffmpeg-gn-gen
+ - *step-ffmpeg-build
+ - *step-ffmpeg-store
+
+ # hunspell
+ - *step-hunspell-build
+ - *step-hunspell-store
+
+ # Save all data needed for a further tests run.
+ - when:
+ condition: << parameters.persist >>
+ steps:
+ - *step-persist-data-for-tests
+
+ - when:
+ condition: << parameters.build >>
+ steps:
+ - *step-maybe-generate-breakpad-symbols
+ - *step-maybe-zip-symbols
+ - *step-symbols-store
+
+ - when:
+ condition: << parameters.build >>
+ steps:
+ - run:
+ name: Remove the big things on macOS, this seems to be better on average
+ command: |
+ if [ "`uname`" == "Darwin" ]; then
+ mkdir -p src/out/Default
+ cd src/out/Default
+ find . -type f -size +50M -delete
+ mkdir -p gen/electron
+ cd gen/electron
+ # These files do not seem to like being in a cache, let us remove them
+ find . -type f -name '*_pkg_info' -delete
+ fi
+ - when:
+ condition: << parameters.use-out-cache >>
+ steps:
+ - *step-save-out-cache
+
+ # Trigger tests on arm hardware if needed
+ - *step-maybe-trigger-arm-test
+
+ - *step-maybe-notify-slack-failure
+
+ electron-publish:
+ parameters:
+ attach:
+ type: boolean
+ default: false
+ checkout:
+ type: boolean
+ default: true
+ steps:
+ - when:
+ condition: << parameters.attach >>
+ steps:
+ - attach_workspace:
+ at: .
+ - when:
+ condition: << parameters.checkout >>
+ steps:
+ - *step-depot-tools-get
+ - *step-depot-tools-add-to-path
+ - *step-restore-brew-cache
+ - *step-get-more-space-on-mac
+ - when:
+ condition: << parameters.checkout >>
+ steps:
+ - *step-checkout-electron
+ - *step-gclient-sync
+ - *step-delete-git-directories
+ - *step-minimize-workspace-size-from-checkout
+ - *step-fix-sync-on-mac
+ - *step-setup-env-for-build
+ - *step-setup-goma-for-build
+ - *step-gn-gen-default
+
+ # Electron app
+ - *step-electron-build
+ - *step-show-goma-stats
+ - *step-maybe-generate-breakpad-symbols
+ - *step-maybe-electron-dist-strip
+ - *step-electron-dist-build
+ - *step-electron-dist-store
+ - *step-maybe-zip-symbols
+ - *step-symbols-store
+
+ # mksnapshot
+ - *step-mksnapshot-build
+ - *step-mksnapshot-store
+
+ # chromedriver
+ - *step-electron-maybe-chromedriver-gn-gen
+ - *step-electron-chromedriver-build
+ - *step-electron-chromedriver-store
+
+ # Node.js headers
+ - *step-nodejs-headers-build
+ - *step-nodejs-headers-store
+
+ # ffmpeg
+ - *step-ffmpeg-gn-gen
+ - *step-ffmpeg-build
+ - *step-ffmpeg-store
+
+ # hunspell
+ - *step-hunspell-build
+ - *step-hunspell-store
+
+ # typescript defs
+ - *step-maybe-generate-typescript-defs
+
+ # Publish
+ - *step-electron-publish
+
+# List of all jobs.
+jobs:
+ # Layer 0: Lint. Standalone.
+ lint:
+ <<: *machine-linux-medium
+ environment:
+ <<: *env-linux-medium
+ <<: *steps-lint
+
+ ts-compile-doc-change:
+ <<: *machine-linux-medium
+ environment:
+ <<: *env-linux-medium
+ <<: *env-testing-build
+ <<: *steps-electron-ts-compile-for-doc-change
+
+ # Layer 1: Checkout.
+ linux-checkout:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True'
+ steps:
+ - electron-build:
+ persist: false
+ build: false
+ checkout: true
+ persist-checkout: true
+ restore-src-cache: false
+
+ linux-checkout-fast:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True'
+ steps:
+ - electron-build:
+ persist: false
+ build: false
+ checkout: true
+ persist-checkout: true
+
+ linux-checkout-and-save-cache:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True'
+ <<: *steps-checkout-and-save-cache
+
+ linux-checkout-for-native-tests:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_pyyaml=True'
+ steps:
+ - electron-build:
+ persist: false
+ build: false
+ checkout: true
+ persist-checkout: true
+
+ linux-checkout-for-native-tests-with-no-patches:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge
+ GCLIENT_EXTRA_ARGS: '--custom-var=apply_patches=False --custom-var=checkout_pyyaml=True'
+ steps:
+ - electron-build:
+ persist: false
+ build: false
+ checkout: true
+ persist-checkout: true
+
+ mac-checkout:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge
+ <<: *env-testing-build
+ <<: *env-macos-build
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_mac=True --custom-var=host_os=mac'
+ steps:
+ - electron-build:
+ persist: false
+ build: false
+ checkout: true
+ persist-checkout: true
+ restore-src-cache: false
+
+ mac-checkout-fast:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge
+ <<: *env-testing-build
+ <<: *env-macos-build
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_mac=True --custom-var=host_os=mac'
+ steps:
+ - electron-build:
+ persist: false
+ build: false
+ checkout: true
+ persist-checkout: true
+
+ mac-checkout-and-save-cache:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge
+ <<: *env-testing-build
+ <<: *env-macos-build
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_mac=True --custom-var=host_os=mac'
+ <<: *steps-checkout-and-save-cache
+
+ # Layer 2: Builds.
+ linux-x64-testing:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-global
+ <<: *env-testing-build
+ <<: *env-ninja-status
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True'
+ steps:
+ - electron-build:
+ persist: true
+ checkout: true
+ use-out-cache: false
+
+ linux-x64-testing-asan:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-global
+ <<: *env-testing-build
+ <<: *env-ninja-status
+ CHECK_DIST_MANIFEST: '0'
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True'
+ GN_EXTRA_ARGS: 'is_asan = true'
+ steps:
+ - electron-build:
+ persist: true
+ checkout: true
+ use-out-cache: false
+ build-nonproprietary-ffmpeg: false
+
+ linux-x64-testing-no-run-as-node:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge
+ <<: *env-testing-build
+ <<: *env-ninja-status
+ <<: *env-disable-run-as-node
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True'
+ steps:
+ - electron-build:
+ persist: false
+ checkout: true
+ use-out-cache: false
+
+ linux-x64-testing-gn-check:
+ <<: *machine-linux-medium
+ environment:
+ <<: *env-linux-medium
+ <<: *env-testing-build
+ <<: *steps-electron-gn-check
+
+ linux-x64-release:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge-release
+ <<: *env-release-build
+ <<: *env-send-slack-notifications
+ <<: *env-ninja-status
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True'
+ steps:
+ - electron-build:
+ persist: true
+ checkout: true
+
+ linux-x64-publish:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge-release
+ <<: *env-release-build
+ UPLOAD_TO_S3: << pipeline.parameters.upload-to-s3 >>
+ <<: *env-ninja-status
+ steps:
+ - electron-publish:
+ attach: false
+ checkout: true
+
+ linux-x64-publish-skip-checkout:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge-release
+ <<: *env-release-build
+ UPLOAD_TO_S3: << pipeline.parameters.upload-to-s3 >>
+ <<: *env-ninja-status
+ steps:
+ - electron-publish:
+ attach: true
+ checkout: false
+
+ linux-ia32-testing:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-global
+ <<: *env-ia32
+ <<: *env-testing-build
+ <<: *env-ninja-status
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True'
+ steps:
+ - electron-build:
+ persist: true
+ checkout: true
+ use-out-cache: false
+
+ linux-ia32-release:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge-release
+ <<: *env-ia32
+ <<: *env-release-build
+ <<: *env-send-slack-notifications
+ <<: *env-ninja-status
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True'
+ steps:
+ - electron-build:
+ persist: true
+ checkout: true
+
+ linux-ia32-publish:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge-release
+ <<: *env-ia32
+ <<: *env-release-build
+ <<: *env-32bit-release
+ UPLOAD_TO_S3: << pipeline.parameters.upload-to-s3 >>
+ <<: *env-ninja-status
+ steps:
+ - electron-publish:
+ attach: false
+ checkout: true
+
+ linux-ia32-publish-skip-checkout:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge-release
+ <<: *env-ia32
+ <<: *env-release-build
+ <<: *env-32bit-release
+ UPLOAD_TO_S3: << pipeline.parameters.upload-to-s3 >>
+ <<: *env-ninja-status
+ steps:
+ - electron-publish:
+ attach: true
+ checkout: false
+
+ linux-arm-testing:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-global
+ <<: *env-arm
+ <<: *env-testing-build
+ <<: *env-ninja-status
+ TRIGGER_ARM_TEST: true
+ GENERATE_CROSS_ARCH_SNAPSHOT: true
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True'
+ steps:
+ - electron-build:
+ persist: false
+ checkout: true
+ use-out-cache: false
+
+ linux-arm-release:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge-release
+ <<: *env-arm
+ <<: *env-release-build
+ <<: *env-send-slack-notifications
+ <<: *env-ninja-status
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True'
+ steps:
+ - electron-build:
+ persist: false
+ checkout: true
+
+ linux-arm-publish:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge-release
+ <<: *env-arm
+ <<: *env-release-build
+ <<: *env-32bit-release
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_arm=True'
+ UPLOAD_TO_S3: << pipeline.parameters.upload-to-s3 >>
+ <<: *env-ninja-status
+ steps:
+ - electron-publish:
+ attach: false
+ checkout: true
+
+ linux-arm-publish-skip-checkout:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge-release
+ <<: *env-arm
+ <<: *env-release-build
+ <<: *env-32bit-release
+ UPLOAD_TO_S3: << pipeline.parameters.upload-to-s3 >>
+ <<: *env-ninja-status
+ steps:
+ - electron-publish:
+ attach: true
+ checkout: false
+
+ linux-arm64-testing:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-global
+ <<: *env-arm64
+ <<: *env-testing-build
+ <<: *env-ninja-status
+ TRIGGER_ARM_TEST: true
+ GENERATE_CROSS_ARCH_SNAPSHOT: true
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True'
+ steps:
+ - electron-build:
+ persist: false
+ checkout: true
+ use-out-cache: false
+
+ linux-arm64-testing-gn-check:
+ <<: *machine-linux-medium
+ environment:
+ <<: *env-linux-medium
+ <<: *env-arm64
+ <<: *env-testing-build
+ <<: *steps-electron-gn-check
+
+ linux-arm64-release:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge-release
+ <<: *env-arm64
+ <<: *env-release-build
+ <<: *env-send-slack-notifications
+ <<: *env-ninja-status
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_arm=True --custom-var=checkout_arm64=True'
+ steps:
+ - electron-build:
+ persist: false
+ checkout: true
+
+ linux-arm64-publish:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge-release
+ <<: *env-arm64
+ <<: *env-release-build
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_arm64=True'
+ UPLOAD_TO_S3: << pipeline.parameters.upload-to-s3 >>
+ <<: *env-ninja-status
+ steps:
+ - electron-publish:
+ attach: false
+ checkout: true
+
+ linux-arm64-publish-skip-checkout:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge-release
+ <<: *env-arm64
+ <<: *env-release-build
+ UPLOAD_TO_S3: << pipeline.parameters.upload-to-s3 >>
+ <<: *env-ninja-status
+ steps:
+ - electron-publish:
+ attach: true
+ checkout: false
+
+ osx-testing-x64:
+ <<: *machine-mac-large
+ environment:
+ <<: *env-mac-large
+ <<: *env-testing-build
+ <<: *env-ninja-status
+ <<: *env-macos-build
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_mac=True --custom-var=host_os=mac'
+ steps:
+ - electron-build:
+ persist: true
+ checkout: false
+ checkout-and-assume-cache: true
+ attach: false
+
+ osx-testing-x64-gn-check:
+ <<: *machine-mac
+ environment:
+ <<: *env-machine-mac
+ <<: *env-testing-build
+ <<: *steps-electron-gn-check
+
+ osx-release-x64:
+ <<: *machine-mac-large
+ environment:
+ <<: *env-mac-large
+ <<: *env-release-build
+ <<: *env-ninja-status
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_mac=True --custom-var=host_os=mac'
+ steps:
+ - electron-build:
+ persist: true
+ checkout: false
+ checkout-and-assume-cache: true
+ attach: false
+
+ osx-publish-x64:
+ <<: *machine-mac-large
+ environment:
+ <<: *env-mac-large-release
+ <<: *env-release-build
+ UPLOAD_TO_S3: << pipeline.parameters.upload-to-s3 >>
+ <<: *env-ninja-status
+ steps:
+ - electron-publish:
+ attach: false
+ checkout: true
+
+ osx-publish-arm64:
+ <<: *machine-mac-large-arm
+ environment:
+ <<: *env-mac-large-release
+ <<: *env-release-build
+ <<: *env-apple-silicon
+ UPLOAD_TO_S3: << pipeline.parameters.upload-to-s3 >>
+ <<: *env-ninja-status
+ steps:
+ - electron-publish:
+ attach: false
+ checkout: true
+
+ osx-publish-x64-skip-checkout:
+ <<: *machine-mac-large
+ environment:
+ <<: *env-mac-large-release
+ <<: *env-release-build
+ UPLOAD_TO_S3: << pipeline.parameters.upload-to-s3 >>
+ <<: *env-ninja-status
+ steps:
+ - electron-publish:
+ attach: true
+ checkout: false
+
+ osx-publish-arm64-skip-checkout:
+ <<: *machine-mac-large-arm
+ environment:
+ <<: *env-mac-large-release
+ <<: *env-release-build
+ <<: *env-apple-silicon
+ UPLOAD_TO_S3: << pipeline.parameters.upload-to-s3 >>
+ <<: *env-ninja-status
+ steps:
+ - electron-publish:
+ attach: true
+ checkout: false
+
+ osx-testing-arm64:
+ <<: *machine-mac-large-arm
+ environment:
+ <<: *env-mac-large
+ <<: *env-testing-build
+ <<: *env-ninja-status
+ <<: *env-macos-build
+ <<: *env-apple-silicon
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_mac=True --custom-var=host_os=mac'
+ GENERATE_CROSS_ARCH_SNAPSHOT: true
+ steps:
+ - electron-build:
+ persist: true
+ checkout: false
+ checkout-and-assume-cache: true
+ attach: false
+
+ mas-testing-x64:
+ <<: *machine-mac-large
+ environment:
+ <<: *env-mac-large
+ <<: *env-mas
+ <<: *env-testing-build
+ <<: *env-ninja-status
+ <<: *env-macos-build
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_mac=True --custom-var=host_os=mac'
+ steps:
+ - electron-build:
+ persist: true
+ checkout: false
+ checkout-and-assume-cache: true
+ attach: false
+
+ mas-testing-x64-gn-check:
+ <<: *machine-mac
+ environment:
+ <<: *env-machine-mac
+ <<: *env-mas
+ <<: *env-testing-build
+ <<: *steps-electron-gn-check
+
+ mas-release:
+ <<: *machine-mac-large
+ environment:
+ <<: *env-mac-large
+ <<: *env-mas
+ <<: *env-release-build
+ <<: *env-ninja-status
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_mac=True --custom-var=host_os=mac'
+ steps:
+ - electron-build:
+ persist: true
+ checkout: false
+ checkout-and-assume-cache: true
+ attach: false
+
+ mas-publish:
+ <<: *machine-mac-large
+ environment:
+ <<: *env-mac-large-release
+ <<: *env-mas
+ <<: *env-release-build
+ UPLOAD_TO_S3: << pipeline.parameters.upload-to-s3 >>
+ <<: *env-ninja-status
+ steps:
+ - electron-publish:
+ attach: false
+ checkout: true
+
+ mas-publish-arm64:
+ <<: *machine-mac-large-arm
+ environment:
+ <<: *env-mac-large-release
+ <<: *env-mas-apple-silicon
+ <<: *env-release-build
+ UPLOAD_TO_S3: << pipeline.parameters.upload-to-s3 >>
+ <<: *env-ninja-status
+ steps:
+ - electron-publish:
+ attach: false
+ checkout: true
+
+ mas-publish-x64-skip-checkout:
+ <<: *machine-mac-large
+ environment:
+ <<: *env-mac-large-release
+ <<: *env-mas
+ <<: *env-release-build
+ UPLOAD_TO_S3: << pipeline.parameters.upload-to-s3 >>
+ steps:
+ - electron-publish:
+ attach: true
+ checkout: false
+
+ mas-publish-arm64-skip-checkout:
+ <<: *machine-mac-large-arm
+ environment:
+ <<: *env-mac-large-release
+ <<: *env-mas-apple-silicon
+ <<: *env-release-build
+ UPLOAD_TO_S3: << pipeline.parameters.upload-to-s3 >>
+ <<: *env-ninja-status
+ steps:
+ - electron-publish:
+ attach: true
+ checkout: false
+
+ mas-testing-arm64:
+ <<: *machine-mac-large-arm
+ environment:
+ <<: *env-mac-large
+ <<: *env-testing-build
+ <<: *env-ninja-status
+ <<: *env-macos-build
+ <<: *env-mas-apple-silicon
+ GCLIENT_EXTRA_ARGS: '--custom-var=checkout_mac=True --custom-var=host_os=mac'
+ GENERATE_CROSS_ARCH_SNAPSHOT: true
+ steps:
+ - electron-build:
+ persist: true
+ checkout: false
+ checkout-and-assume-cache: true
+ attach: false
+
+ # Layer 3: Tests.
+ linux-x64-unittests:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge
+ <<: *env-unittests
+ <<: *env-headless-testing
+ <<: *steps-native-tests
+
+ linux-x64-disabled-unittests:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge
+ <<: *env-unittests
+ <<: *env-headless-testing
+ TESTS_ARGS: '--only-disabled-tests'
+ <<: *steps-native-tests
+
+ linux-x64-chromium-unittests:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge
+ <<: *env-unittests
+ <<: *env-headless-testing
+ TESTS_ARGS: '--include-disabled-tests'
+ <<: *steps-native-tests
+
+ linux-x64-browsertests:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-2xlarge
+ <<: *env-browsertests
+ <<: *env-testing-build
+ <<: *env-headless-testing
+ <<: *steps-native-tests
+
+ linux-x64-testing-tests:
+ <<: *machine-linux-medium
+ environment:
+ <<: *env-linux-medium
+ <<: *env-headless-testing
+ <<: *env-stack-dumping
+ parallelism: 3
+ <<: *steps-tests
+
+ linux-x64-testing-asan-tests:
+ <<: *machine-linux-xlarge
+ environment:
+ <<: *env-linux-medium
+ <<: *env-headless-testing
+ <<: *env-stack-dumping
+ IS_ASAN: '1'
+ DISABLE_CRASH_REPORTER_TESTS: '1'
+ parallelism: 3
+ <<: *steps-tests
+
+ linux-x64-testing-nan:
+ <<: *machine-linux-medium
+ environment:
+ <<: *env-linux-medium
+ <<: *env-headless-testing
+ <<: *env-stack-dumping
+ <<: *steps-test-nan
+
+ linux-x64-testing-node:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-medium
+ <<: *env-headless-testing
+ <<: *env-stack-dumping
+ <<: *steps-test-node
+
+ linux-x64-release-tests:
+ <<: *machine-linux-medium
+ environment:
+ <<: *env-linux-medium
+ <<: *env-headless-testing
+ <<: *env-send-slack-notifications
+ <<: *steps-tests
+
+ linux-x64-verify-ffmpeg:
+ <<: *machine-linux-medium
+ environment:
+ <<: *env-linux-medium
+ <<: *env-headless-testing
+ <<: *env-send-slack-notifications
+ <<: *steps-verify-ffmpeg
+
+ linux-ia32-testing-tests:
+ <<: *machine-linux-medium
+ environment:
+ <<: *env-linux-medium
+ <<: *env-ia32
+ <<: *env-headless-testing
+ <<: *env-stack-dumping
+ parallelism: 3
+ <<: *steps-tests
+
+ linux-ia32-testing-nan:
+ <<: *machine-linux-medium
+ environment:
+ <<: *env-linux-medium
+ <<: *env-ia32
+ <<: *env-headless-testing
+ <<: *env-stack-dumping
+ <<: *steps-test-nan
+
+ linux-ia32-testing-node:
+ <<: *machine-linux-2xlarge
+ environment:
+ <<: *env-linux-medium
+ <<: *env-ia32
+ <<: *env-headless-testing
+ <<: *env-stack-dumping
+ <<: *steps-test-node
+
+ linux-ia32-release-tests:
+ <<: *machine-linux-medium
+ environment:
+ <<: *env-linux-medium
+ <<: *env-ia32
+ <<: *env-headless-testing
+ <<: *env-send-slack-notifications
+ <<: *steps-tests
+
+ linux-ia32-verify-ffmpeg:
+ <<: *machine-linux-medium
+ environment:
+ <<: *env-linux-medium
+ <<: *env-ia32
+ <<: *env-headless-testing
+ <<: *env-send-slack-notifications
+ <<: *steps-verify-ffmpeg
+
+ osx-testing-x64-tests:
+ <<: *machine-mac-large
+ environment:
+ <<: *env-mac-large
+ <<: *env-stack-dumping
+ parallelism: 2
+ <<: *steps-tests
+
+ osx-release-x64-tests:
+ <<: *machine-mac-large
+ environment:
+ <<: *env-mac-large
+ <<: *env-stack-dumping
+ <<: *env-send-slack-notifications
+ <<: *steps-tests
+
+ osx-verify-ffmpeg:
+ <<: *machine-mac
+ environment:
+ <<: *env-machine-mac
+ <<: *env-send-slack-notifications
+ <<: *steps-verify-ffmpeg
+
+ osx-testing-arm64-tests:
+ <<: *machine-mac-arm64
+ environment:
+ <<: *env-mac-large
+ <<: *env-stack-dumping
+ <<: *env-apple-silicon
+ <<: *steps-tests
+
+ mas-testing-x64-tests:
+ <<: *machine-mac-large
+ environment:
+ <<: *env-mac-large
+ <<: *env-stack-dumping
+ parallelism: 2
+ <<: *steps-tests
+
+ mas-release-tests:
+ <<: *machine-mac-large
+ environment:
+ <<: *env-mac-large
+ <<: *env-stack-dumping
+ <<: *env-send-slack-notifications
+ <<: *steps-tests
+
+ mas-verify-ffmpeg:
+ <<: *machine-mac
+ environment:
+ <<: *env-machine-mac
+ <<: *env-send-slack-notifications
+ <<: *steps-verify-ffmpeg
+
+ mas-testing-arm64-tests:
+ <<: *machine-mac-arm64
+ environment:
+ <<: *env-mac-large
+ <<: *env-stack-dumping
+ <<: *env-apple-silicon
+ <<: *steps-tests
+
+ # Layer 4: Summary.
+ linux-x64-release-summary:
+ <<: *machine-linux-medium
+ environment:
+ <<: *env-linux-medium
+ <<: *env-send-slack-notifications
+ steps:
+ - *step-maybe-notify-slack-success
+
+ linux-ia32-release-summary:
+ <<: *machine-linux-medium
+ environment:
+ <<: *env-linux-medium
+ <<: *env-send-slack-notifications
+ steps:
+ - *step-maybe-notify-slack-success
+
+ linux-arm-release-summary:
+ <<: *machine-linux-medium
+ environment:
+ <<: *env-linux-medium
+ <<: *env-send-slack-notifications
+ steps:
+ - *step-maybe-notify-slack-success
+
+ linux-arm64-release-summary:
+ <<: *machine-linux-medium
+ environment:
+ <<: *env-linux-medium
+ <<: *env-send-slack-notifications
+ steps:
+ - *step-maybe-notify-slack-success
+
+ mas-release-summary:
+ <<: *machine-mac
+ environment:
+ <<: *env-machine-mac
+ <<: *env-send-slack-notifications
+ steps:
+ - *step-maybe-notify-slack-success
+
+ osx-release-x64-summary:
+ <<: *machine-mac
+ environment:
+ <<: *env-machine-mac
+ <<: *env-send-slack-notifications
+ steps:
+ - *step-maybe-notify-slack-success
+
+workflows:
+ version: 2.1
+
+ # The publish workflows below each contain one job so that they are
+ # compatible with how sudowoodo works today. If these workflows are
+ # changed to have multiple jobs, then scripts/release/ci-release-build.js
+ # will need to be updated and there will most likely need to be changes to
+ # sudowoodo
+
+ publish-linux:
+ when: << pipeline.parameters.run-linux-publish >>
+ jobs:
+ - linux-checkout
+ - linux-x64-publish-skip-checkout:
+ requires:
+ - linux-checkout
+ context: release-env
+ - linux-ia32-publish-skip-checkout:
+ requires:
+ - linux-checkout
+ context: release-env
+ - linux-arm-publish-skip-checkout:
+ requires:
+ - linux-checkout
+ context: release-env
+ - linux-arm64-publish-skip-checkout:
+ requires:
+ - linux-checkout
+ context: release-env
+
+ publish-x64-linux:
+ when: << pipeline.parameters.run-linux-x64-publish >>
+ jobs:
+ - linux-x64-publish:
+ context: release-env
+
+ publish-ia32-linux:
+ when: << pipeline.parameters.run-linux-ia32-publish >>
+ jobs:
+ - linux-ia32-publish:
+ context: release-env
+
+ publish-arm-linux:
+ when: << pipeline.parameters.run-linux-arm-publish >>
+ jobs:
+ - linux-arm-publish:
+ context: release-env
+
+ publish-arm64-linux:
+ when: << pipeline.parameters.run-linux-arm64-publish >>
+ jobs:
+ - linux-arm64-publish:
+ context: release-env
+
+ publish-osx:
+ when: << pipeline.parameters.run-osx-publish >>
+ jobs:
+ - osx-publish-x64:
+ context: release-env
+
+ publish-mas:
+ when: << pipeline.parameters.run-mas-publish >>
+ jobs:
+ - mas-publish:
+ context: release-env
+
+ publish-osx-arm64:
+ when: << pipeline.parameters.run-osx-publish-arm64 >>
+ jobs:
+ - osx-publish-arm64:
+ context: release-env
+
+ publish-mas-arm64:
+ when: << pipeline.parameters.run-mas-publish-arm64 >>
+ jobs:
+ - mas-publish-arm64:
+ context: release-env
+
+ publish-macos:
+ when: << pipeline.parameters.run-macos-publish >>
+ jobs:
+ - mac-checkout
+ - osx-publish-x64-skip-checkout:
+ requires:
+ - mac-checkout
+ - mas-publish-x64-skip-checkout:
+ requires:
+ - mac-checkout
+ - osx-publish-arm64-skip-checkout:
+ requires:
+ - mac-checkout
+ - mas-publish-arm64-skip-checkout:
+ requires:
+ - mac-checkout
+
+ lint:
+ when: << pipeline.parameters.run-lint >>
+ jobs:
+ - lint
+
+ build-linux:
+ when: << pipeline.parameters.run-build-linux >>
+ jobs:
+ - linux-checkout-fast
+ - linux-checkout-and-save-cache
+
+ - linux-x64-testing
+ - linux-x64-testing-asan
+ - linux-x64-testing-no-run-as-node
+ - linux-x64-testing-gn-check:
+ requires:
+ - linux-checkout-fast
+ - linux-x64-testing-tests:
+ requires:
+ - linux-x64-testing
+ - linux-x64-testing-asan-tests:
+ requires:
+ - linux-x64-testing-asan
+ - linux-x64-testing-nan:
+ requires:
+ - linux-x64-testing
+ - linux-x64-testing-node:
+ requires:
+ - linux-x64-testing
+
+ - linux-ia32-testing
+ - linux-ia32-testing-tests:
+ requires:
+ - linux-ia32-testing
+ - linux-ia32-testing-nan:
+ requires:
+ - linux-ia32-testing
+ - linux-ia32-testing-node:
+ requires:
+ - linux-ia32-testing
+
+ - linux-arm-testing
+
+ - linux-arm64-testing
+ - linux-arm64-testing-gn-check:
+ requires:
+ - linux-checkout-fast
+ - ts-compile-doc-change
+
+ build-mac:
+ when: << pipeline.parameters.run-build-mac >>
+ jobs:
+ - mac-checkout-fast
+ - mac-checkout-and-save-cache
+
+ - osx-testing-x64:
+ requires:
+ - mac-checkout-and-save-cache
+
+ - osx-testing-x64-gn-check:
+ requires:
+ - mac-checkout-fast
+
+ - osx-testing-x64-tests:
+ requires:
+ - osx-testing-x64
+
+ - osx-testing-arm64:
+ requires:
+ - mac-checkout-and-save-cache
+
+ - osx-testing-arm64-tests:
+ filters:
+ branches:
+ # Do not run this on forked pull requests
+ ignore: /pull\/[0-9]+/
+ requires:
+ - osx-testing-arm64
+
+ - mas-testing-x64:
+ requires:
+ - mac-checkout-and-save-cache
+
+ - mas-testing-x64-gn-check:
+ requires:
+ - mac-checkout-fast
+
+ - mas-testing-x64-tests:
+ requires:
+ - mas-testing-x64
+
+ - mas-testing-arm64:
+ requires:
+ - mac-checkout-and-save-cache
+
+ - mas-testing-arm64-tests:
+ filters:
+ branches:
+ # Do not run this on forked pull requests
+ ignore: /pull\/[0-9]+/
+ requires:
+ - mas-testing-arm64
+
+ nightly-linux-release-test:
+ triggers:
+ - schedule:
+ cron: "0 0 * * *"
+ filters:
+ branches:
+ only:
+ - master
+ - *chromium-upgrade-branches
+ jobs:
+ - linux-checkout-fast
+ - linux-checkout-and-save-cache
+
+ - linux-x64-release
+ - linux-x64-release-tests:
+ requires:
+ - linux-x64-release
+ - linux-x64-verify-ffmpeg:
+ requires:
+ - linux-x64-release
+ - linux-x64-release-summary:
+ requires:
+ - linux-x64-release
+ - linux-x64-release-tests
+ - linux-x64-verify-ffmpeg
+
+ - linux-ia32-release
+ - linux-ia32-release-tests:
+ requires:
+ - linux-ia32-release
+ - linux-ia32-verify-ffmpeg:
+ requires:
+ - linux-ia32-release
+ - linux-ia32-release-summary:
+ requires:
+ - linux-ia32-release
+ - linux-ia32-release-tests
+ - linux-ia32-verify-ffmpeg
+
+ - linux-arm-release
+ - linux-arm-release-summary:
+ requires:
+ - linux-arm-release
+
+ - linux-arm64-release
+ - linux-arm64-release-summary:
+ requires:
+ - linux-arm64-release
+
+ nightly-mac-release-test:
+ triggers:
+ - schedule:
+ cron: "0 0 * * *"
+ filters:
+ branches:
+ only:
+ - master
+ - *chromium-upgrade-branches
+ jobs:
+ - mac-checkout-fast
+ - mac-checkout-and-save-cache
+
+ - osx-release-x64:
+ requires:
+ - mac-checkout-and-save-cache
+ - osx-release-x64-tests:
+ requires:
+ - osx-release-x64
+ - osx-verify-ffmpeg:
+ requires:
+ - osx-release-x64
+ - osx-release-x64-summary:
+ requires:
+ - osx-release-x64
+ - osx-release-x64-tests
+ - osx-verify-ffmpeg
+
+ - mas-release:
+ requires:
+ - mac-checkout-and-save-cache
+ - mas-release-tests:
+ requires:
+ - mas-release
+ - mas-verify-ffmpeg:
+ requires:
+ - mas-release
+ - mas-release-summary:
+ requires:
+ - mas-release
+ - mas-release-tests
+ - mas-verify-ffmpeg
+
+ # Various slow and non-essential checks we run only nightly.
+ # Sanitizer jobs should be added here.
+ linux-checks-nightly:
+ triggers:
+ - schedule:
+ cron: "0 0 * * *"
+ filters:
+ branches:
+ only:
+ - master
+ - *chromium-upgrade-branches
+ jobs:
+ - linux-checkout-for-native-tests
+
+ # TODO(alexeykuzmin): Enable it back.
+ # Tons of crashes right now, see
+ # https://circleci.com/gh/electron/electron/67463
+# - linux-x64-browsertests:
+# requires:
+# - linux-checkout-for-native-tests
+
+ - linux-x64-unittests:
+ requires:
+ - linux-checkout-for-native-tests
+
+ - linux-x64-disabled-unittests:
+ requires:
+ - linux-checkout-for-native-tests
+
+ - linux-checkout-for-native-tests-with-no-patches
+
+ - linux-x64-chromium-unittests:
+ requires:
+ - linux-checkout-for-native-tests-with-no-patches