Compare commits

...

320 Commits

Author SHA1 Message Date
Atemo
357f211a8b Small correction 2022-08-07 17:24:24 +02:00
Atemo
394e3fd6a5 NPC_EMOTION and NPC_EMOTION_ON
* Moved the emotion in NPC_EMOTION and NPC_EMOTION_ON skills in a separated field
** "...always,0,9,0x308D,,,,," : the emotion "9" is now saved in emotion2, this emotion coming from another table != the "global" emotion
* the corrections on NPC_EMOTION and NPC_EMOTION_ON are reverted, the corrections will be made in next PR after merge of mob skill DB
2022-08-07 17:19:41 +02:00
Atemo
cd962a2fda Added cond1 definitions in csv2yaml 2022-08-02 18:23:35 +02:00
Atemo
7a99555013 Small update 2022-07-31 21:01:13 +02:00
Atemo
88cfa8aab9 Converted mob skill DB to YAML 2022-07-29 18:09:04 +02:00
Aleos
97b5f3b4ac Fixes a RENEWAL_CAST compile warning (#7137)
* Fixes a compile warning when RENEWAL is enabled but RENEWAL_CAST is disabled.
Thanks to @Pokye and @Lemongrass3110!
2022-07-28 12:56:04 -04:00
munkrej
84d295e784 Adds new mob skill condition MSC_MOBNEARBYGT (#7130)
* Adds new mob skill condition MSC_MOBNEARBYGT as on official some monsters trigger skills when specific amount of other monsters are nearby.
2022-07-25 11:11:20 -04:00
Aleos
322709f78d Adjusts Extended Ammunition default sell prices (#7125)
* Fixes #7098.
* Adjusts the Extended Ammunition NPCs to use the default prices set via the item database.
Thanks to @Badarosk0!
2022-07-23 09:43:33 -04:00
Aleos
8038e71322 Speed up item and mob information commands (#7084)
* Fixes #7081.
* Small refactor of the atcommands: iteminfo, mobinfo, whodrops, and idsearch.
* Item information gathering is now sped up.
* Refactored itemdb_searchname_array to store results in a std::map so that the data is sorted by ID automatically.
* Cleanups across the board to remove extra calls for itemdb_exists().
Thanks to @voyfmyuh, @CairoLee, and @Lemongrass3110!
2022-07-22 15:11:44 -04:00
Joam
e3cb3fa01f Renewal Physical damage cleanup (#6997)
Corrected bAtkRate item bonus behavior to be the same as official.
Correction order of calculation of physical attack
Enchant Deadly Poison now grants 25% poison pseudo elemental bonus. also Magnum break also grants 20% fire pseudo elemental damage.
Advanced Katar Mastery bonus formula is corrected.
Critical attacks now always use max attack value like Maximize Power-buffed attacks.
Correction item-script of many items/combo from bAddClass,Class_All to bAtkRate according to official sources.

Co-authored-by: Daegaladh <4557962+Daegaladh@users.noreply.github.com>
Co-authored-by: Aleos <aleos89@users.noreply.github.com>
Co-authored-by: Jittapan Pluemsumran <nightsheep@outlook.com>
2022-07-21 16:29:42 +07:00
Aleos
78b4f4420d Fixes AddEffWhenHit not triggering Magic or Misc (#5628)
* Fixes #5623.
* Fixes an issue with AddEffWhenHit not triggering on Magic or Misc type attacks.
Thanks to @24msz6eo and @Atemo!
2022-07-18 13:46:41 -04:00
Aleos
c2303c8f65 Resolves a mapflag overlap warning with PK Mode (#7109)
* Fixes #7099.
* Hides a warning message when PK Mode is enabled and a Battleground mapflag is attempted to overwrite the PvP mapflag.
Thanks to @LolyAll!
2022-07-18 11:08:44 -04:00
eppc0330
caa5cdd678 Fix displayed attack when missed (#7093)
Fix miss when a unit dodges attack from player
2022-07-17 06:58:46 +07:00
eppc0330
85454cc7ae Minor change of TF_BACKSLIDING behavior (#7094)
Fix Backsliding blow only when a character is moved by the skill.

If wall is placed behind the character, skill will show casting animation.

Thanks to @Haydrich!
2022-07-17 06:57:38 +07:00
Vincent Stumpf
595fc925e3 Fix an invalid access (#7102) 2022-07-15 20:08:02 -07:00
Vincent Stumpf
ca740549e2 Fix va_arg in getareausers (#7108) 2022-07-15 20:07:40 -07:00
idk-whoami
b2feb2e9b0 Corrected script for some item (#7091)
Fixed #7073
2022-07-15 19:33:12 +02:00
Atemo
6637f0a73c Updated laphine datas (#7110) 2022-07-15 19:26:19 +02:00
Atemo
bb16e43d87 Added missing DropEffect for some items (#7107) 2022-07-15 15:00:22 +02:00
idk-whoami
03b40f7101 Corrected script for some item #8 (#7044)
* Fixed item IDs 19669 Costume Reginleif Hairband
* Fixed items locations IDs 28310 and 28311
2022-07-15 14:58:35 +02:00
Balfear
ec659d576a Added robe for monster avail feature (#7103)
* Added robe to mob avail.
2022-07-14 14:08:29 -04:00
Atemo
f6bc10bfb1 Mob quest target (#7071)
* Added functionality to select monsters that increase the quest counter
* Removed the extra conditions on Id
* Added an example in quest_db.yml for MapMobTargets
* Clean-up the docs
* Added the possibility to define "All" to Location

Thanks to @aleos89 !
2022-07-14 19:00:48 +02:00
Atemo
0fa3e6a672 Fixed a bug with the random options (#7100)
Fixed an issue in the current random option system when the options are re-applied.
2022-07-12 22:32:16 +02:00
Atemo
961951108c Implemented 17.2 items (#7101)
Credits to @crazy-arashi, @JohnnyPlayy and @Everade
2022-07-12 01:34:07 +02:00
Aleos
66c441d765 Fixes itemskill use items being blocked (#7077)
* Fixes #7075.
* Adjusts the skill status check to look for OPT1 flags again instead of cant.cast as this is value is checked later on.
* Resolves items like Fly Wing not working when Silenced.
Thanks to @LadyNanuia!
2022-07-08 08:52:17 -04:00
idk-whoami
8bdff8002c Corrected script for Payon Soldier Card (#7090)
* Fixes #7085.
* Corrected script for Payon Soldier Card.
2022-07-08 08:43:53 -04:00
Daegaladh
15a5bcb2cf Fixed Baphomet Jr. pet Stun resistance (#7079) 2022-07-01 00:30:31 +02:00
Daegaladh
b2cbf263de Fixed MATK rate on pre-re staffs (#7074) 2022-06-30 19:40:04 +02:00
Balfear
79b97bdc51 Auto Spell rate fix (#7068)
* Updates the check for Auto Spell rate values to range from -1000~1000.
Thanks to @Balferian!
2022-06-30 12:40:27 -04:00
eppc0330
56a47cd0a5 Partial revert of #7072 (#7078)
* Baphomet Jr's stun resistance should increase, not decrease.
* Glorious Holy Avenger has been confirmed to give +6 INT.
Thanks to @eppc0330 and @Daegladh!
2022-06-30 12:38:34 -04:00
Aleos
fe60611892 Fixes some bonus inconsistencies (#7072)
* Fixes #7031.
* Resolves Glorious Holy Avenger giving an extra 6 INT in renewal.
* Also cleans up the refine checks for Glorious Holy Avenger in pre-renewal.
* Baphomet Jr pet should now reduce resistance to stun in renewal.
Thanks to @Melk3000!
2022-06-30 09:55:00 -04:00
Aleos
394dab1237 Cleans up Thief's Double Attack item bonuses (#7070)
* Items that grant TF_DOUBLE now no longer require bDoubleRate.
* Adds pc_checkskill_flag() to return a skill's flag value.
Thanks to @eppc0330 and @secretdataz!
2022-06-28 12:01:57 -04:00
Lemongrass3110
17d6381901 Fixed reloadpcdb (#7065)
Fixes #7063

Thanks to @kaninhot004
2022-06-26 14:16:46 +02:00
Aleos
5e6fd03848 Fixes a potential crash with database reloading (#7061)
* Fixes #7060.
* The cached YAML content will now properly reallocate the memory when the clear command is called.
Thanks to eppc0330!
2022-06-24 15:43:45 -04:00
Kanin Temsrisuk
d173755afc Fixed the item script of Item ID 23012 (#7056) 2022-06-24 16:41:55 +02:00
Aleos
256735a136 Fixes mob and item atcommand sorting issues (#7058)
* Fixes #7055.
* Fixes several atcommands used for mob and item information were not properly accounting for order.
* Resolves an issue where YAML databases were not properly utilizing the cache feature.
* Increased MAX_SEARCH to 10 by default as in renewal there are many more items with duplicated names.
Thanks to @Playtester!
Co-authored-by: Playtester <Kenji.Ito@gmx.de>
2022-06-24 09:53:31 -04:00
Aleos
563a7012ff Weapon Blocking can now trigger on ATK_MISS (#7042)
* Fixes #6886.
* Weapon Blocking is able to trigger on missed attacks.
* Weapon Blocking can still be triggered even if the target has Kyrie, Safety Wall, or other damage nullification statuses.
Thanks to @Atemo!
2022-06-22 09:33:07 -04:00
Daegaladh
96208f28ba Fixed tu_archer NPC not showing dialog (#7050) 2022-06-21 18:27:22 +02:00
Atemo
c93ba77c0c Updated mapcache (#7047)
Added support for Episode 19 maps
2022-06-21 16:55:44 +02:00
idk-whoami
e9292378f1 Fixed Splendid Box2 Type (#7046)
Thanks to @XanKriegor1
2022-06-21 15:50:26 +02:00
Playtester
08edcb5b6b Fixed bosses not being immune to StoneWait (#7040)
- Bosses are now immune to StoneWait again (fixes #7037)
- Improved documentation on the bone_drop config (fixes #7026)
2022-06-18 13:25:51 +02:00
Aleos
a05112b4a2 Allow Skill Damage Adjustments account for reflect (#6999)
* Fixes #4455 and fixes #6274.
* Reflected damage will now take into account if a skill has a modified damage ratio from skill_damage_db.
* Includes the GvG, Battlegrounds, and PK damage config adjustments as well.
Thanks to @cydh and @imaqtdan!
2022-06-17 14:00:45 -04:00
Aleos
7d356806e1 Adds support for Spirit Handler in SQL items (#7041) 2022-06-17 12:06:49 -04:00
Pokye
39d9b12229 Added missing script on some items (#7036)
Fixed #7022

Thanks to @XanKriegor1 !
2022-06-17 17:22:52 +02:00
Kakaroto
68d1af492b Add support for Spirit Handler equip Summoner items (#7035)
* Fixes #7034.
* Added definition for Spirit Handler class to be able to equip the same items as the Summoner class.
Thanks to @admkakaroto!
2022-06-16 14:45:39 -04:00
Daegaladh
eff452fa35 Fixed intimacy reduction for Vanilmirth Self-destruction and S.B.R.44 (#6400)
* Fixed homun intimacy reduction for S.B.R.44 and Self-destruction

* Self-destruction must lower intimacy even if doesn't hit any enemies

* Fixed intimacy for SBR44

* Added extra checks as suggested by @aleos89

Thanks to @Daraen1, @Lemongrass3110 and @aleos89!

Co-authored-by: Lemongrass3110 <lemongrass@kstp.at>
2022-06-16 18:59:02 +02:00
Aleos
73811d10a4 Fixes an issue with instance timer display (#6988)
* Fixes #6835.
* Follow up to ac7292c.
* Instance packet updates were ignored if instances had infinite duration or timeouts.
* Infinite duration or timeout instances now store the cap as INT64_MAX instead of 0.
* Converts the TimeLimit and IdleTimeOut storage types from uint32 to int64 to allow longer 'infinite' duration instances.
Thanks to @samers1 and @Atemo!
2022-06-16 10:49:41 -04:00
Kakaroto
fbbd0d0a8e Minor Expanded Class Job Level Adjust (#7029)
* Small change to fix the expanded Job level, the definition was missing, causing the new jobs to have 250 job levels.
Thanks to @admkakaroto!
2022-06-16 10:30:25 -04:00
Aleos
b01cac01c1 Resolves duplicate status calculation on players (#6980)
* Fixes #6930.
* Follow up to a7ee51d.
* Resolves an issue on sc_start where players are doubly calculated on their items and stats.
* Resolves an edge case where AutoSpell3 bonuses can cause a crash if success rate is high or delay is low.
Thanks to @CairoLee!
2022-06-16 10:07:08 -04:00
eppc0330
2992879ee3 Fix SC_POISONINGWEAPON and SC_PYREXIA damage bonus (#7004)
* Poisoning Weapon will now only increase the caster's short weapon damage.
* Pyrexia will now only increase the caster's critical damage and normal attack damage.
Thanks to @eppc0330!
2022-06-15 15:37:29 -04:00
Atemo
12711585a0 Correction of the bonus of Shield of Naga (ID: 2134) (#7027)
Thanks to @Playtester
2022-06-14 16:50:52 +02:00
Atemo
756f9a73a7 Updates item_db using new constants (#7019)
* item_db updated using new constants

Additionally :
* sort the id in item_db_usable
* corrected sc_start* for SC_L_LIFEPOTION and SC_S_LIFEPOTION in item_db_usable
* removed useless value in item_randomopt_db.yml
* corrected the option for GRAY_CHARCOAL_MELEE
2022-06-08 21:26:41 +02:00
idk-whoami
7fd27a6c27 Corrected script for some item #7 (#6863)
* Fix Item 1435 & Combo 1631
* Added a bunch of new items
2022-06-08 19:30:31 +02:00
Pokye
9be8b5e591 Store item prices update (#7013)
Updated the price of items from stores, prices are official according to kRO.
2022-06-08 19:29:01 +02:00
Pokye
dd209f64c7 Small fix on duplicate item (#7018)
Thanks to @devilcrossit
2022-06-08 18:20:16 +02:00
Aleos
aad3a70bbc Fixes script commands unitwalkto/unitwalk (#7007)
* Fixes #5340.
* Resolves an issue where monsters may tend to lose their target if they are too slow and the destination is too far when using script commands unitwalkto/unitwalk.
* Allow script command unitstopwalk to remove the state tracking for script commands unitwalk/unitwalkto.
* Print a warning to the console if a unit is forced to walk again and hasn't yet reached its initial destination.
Thanks to @NeutralDev and @Lemongrass3110!
2022-06-07 15:07:51 -04:00
Pokye
2bb79d0200 Some items from Kachua Secret Key (#6866) 2022-06-07 19:20:16 +02:00
Aleos
2ff76e5060 Adds the CONVERT_ALL macro for YAML tools (#6986)
* Fixes #6813.
* Adds the CONVERT_ALL macro define to the header file.
* Adds a readme.md for the tools directory.
Thanks to @cbrgm and @Lemongrass3110!
2022-06-07 13:19:27 -04:00
Aleos
a7a9b68ed2 Increases the server config character limit (#7006)
* Fixes #5324.
* Adjusts the Login, Character and Map server's ID, password, schema, and codepage character limit from 31 to 1023.
* Adjusts the Login, Character, and Map server's IP character limit from 63 to 1023.
* Removes password echos during failed connections.
Thanks to @reunite-ro!
2022-06-07 11:23:50 -04:00
Aleos
354d2605f2 Corrects Doram healing skill effects (#6978)
* Fixes #6962.
* Tuna Belly and Fresh Shrimp don't apply normal green heal effects.
* Resolves an issue with the HP bars not properly updating for party members as well.
Thanks to @KrokusPokus!
2022-06-07 11:05:37 -04:00
Lemongrass3110
001981cf66 Initial implementation of enchantgrade UI (#6913)
Includes walkscript conversion of kRO scripts

Thanks to @Asheraf, @Balferian, @JohnnyPlayy, @aleos89, @Atemo, @eppc0330 and @Pokye.

Co-authored-by: JohnnyPlayy <lenon32@gmail.com>
Co-authored-by: Asheraf <Asheraf@users.noreply.github.com>
Co-authored-by: Balferian <balfear@yandex.ru>
Co-authored-by: Aleos <aleos89@users.noreply.github.com>
Co-authored-by: Atemo <Atemo@users.noreply.github.com>
2022-06-07 16:45:35 +02:00
mazvi
24cfdc68de Added Arch Mage on Quest magic_books (#7009) 2022-06-07 12:03:21 +02:00
Aleos
038c1778e1 Player skills no longer proc equip break on mobs (#7005)
* Fixes #5906.
* Player skills and items that have the ability to break a target's equipment will no longer "break" a non-player's equipment.
* WS_MELTDOWN (Shattering Strike) is able to bypass the non-player check as it does affect all target types.
* Adds a battle config to toggle the behavior.
Thanks to @Tydus1, @Xelliekins, @Atemo, @Daegaladh, and @Lemongrass3110!
2022-06-06 10:42:42 -04:00
Aleos
479954af30 Fixes Ride In Lightning SP cost (#6998)
* Fixes #6386.
* Applies the new SP cost.
Thanks to @Badarosk0!
2022-06-02 21:12:56 -04:00
Aleos
b804120246 Fixes Battlegrounds and atcommand reloadscript (#7002)
* Fixes #6175.
* Fixes an issue where the Battleground Queue System could become stuck if a GM uses atcommand reloadscript.
Thanks to @roSBK!
2022-06-02 20:38:26 -04:00
Pokye
86c8950e7f Added Beginner Items (#7001)
Fixed #7000

Thanks to @Takiusu !
2022-06-02 15:04:32 +02:00
Atemo
54a3724c70 Added barter npcs to sell/exchange refine ore (#6993)
Thanks to @il3ol2ed @Pokye !
Co-authored-by: Aleos <aleos89@users.noreply.github.com>
2022-06-02 15:03:29 +02:00
Aleos
4aba94ca01 Updates Poisonous Smoke behavior (#6263)
* Fixes #6199.
* Adjusts the interval to every 2 seconds.
* Adjusts the success rate to 50%.
* Fixes the Poison Weapon effect not applying the 'enemy' status to those inside of Poisonous Smoke.
* Venom Bleed should only be active for 15 seconds, not 5 minutes like the rest of the poisons.
* Pyrexia will now cancel skill cast on each damage input.
Thanks to @Everade!
2022-06-01 14:27:23 -04:00
Aleos
c01c673313 Corrects Finger Offensive and Tiger Cannon (#6996)
* Fixes #6524.
* Finger Offensive will now have 5 hits across all levels.
* Fixes the HP bonus damage formula of Tiger Cannon.
Thanks to @Akaineko-astasi!
2022-06-01 11:33:03 -04:00
Aleos
18050028e3 Adds safety check for Barter Shops (#6995)
* Fixes #6895.
* Adds a safety check for multiple non-stackable items being used as the purchase type.
Thanks to @samers1!
2022-06-01 11:00:48 -04:00
Pokye
0b3cd7f1f7 Fixed some names and added some missing items (#6859) 2022-05-29 22:53:16 +02:00
KrokusPokus
38c03a21d9 Add QuestInfo display for Lost Knife quest in Kunlun (#6969) 2022-05-29 22:50:20 +02:00
Vincent Stumpf
26ec100fa6 Fix RODEX mail weight calculation (#6987)
Fixes #6924
Thanks @voyfmyuh
2022-05-27 08:11:21 -07:00
Aleos
9bf78ee5a3 Clean up to how map cache is loaded by mode (#6984)
* The db/(pre-)re/map_cache.dat now contain maps that are only different between the modes.
* Moves all general maps into db/map_cache.dat for loading across both modes.
* Adds support for the new RSW water height.
* Adds an error message when loading GRFs if the file size is over 2GB.
2022-05-26 09:55:46 -04:00
Aleos
cc93494cf9 Shifts the WoE TE castles to renewal (#6981)
* Moves the WoE FE, WoE Novice, and WoE SE castles to the general database.
* Moves the WoE TE castles to the renewal database to match the loading of the NPC.
2022-05-24 13:43:41 -04:00
Aleos
dacfa6b8c4 Fixes Change Cart not applying style (#6979)
* Fixes #6957.
* Adds a missing flag for SC_PUSH_CART to make sure the display when changing cart styles is sent to the client.
Thanks to @jofvgaming!
2022-05-23 14:16:19 -04:00
Atemo
ae371cb9d0 Corrected BlackSmith quest skills (#6951)
After reborn players that have quest skills before cant learn it after reborn or if player use Neuralizer/reset skills.

Thanks to @Balferian !
2022-05-23 19:44:03 +02:00
imlevi
b8c3e885da Fix optional argument not being considered in setbattleflag (#6973) 2022-05-23 12:21:37 -04:00
Atemo
a37b72670c Added getequiprefinerycnt check in an ep17.1 enchant npc (#6952)
* Added/corrected check in an ep17.1 enchant npc

Thanks to @mazvi, @taiga-x
2022-05-23 17:26:21 +02:00
Atemo
4b7fb7d6b4 Added a missing rebellion weapon shop (#6953)
* Correction a shop type

Thanks to @mazvi
2022-05-23 17:25:32 +02:00
Daegaladh
60813b28f0 Blocked cart item movement on NoUseCart maps (#6970) 2022-05-21 12:05:59 +02:00
Daegaladh
fa363b972d Fixed dead monsters stuck after killed (#5263) 2022-05-20 13:27:26 +02:00
KrokusPokus
667645c2e2 Misc typo fixes (#6963)
Co-authored-by: Gernot Federspiel <ba212908@gmx.net>
2022-05-20 10:36:10 +02:00
Aleos
5cee1cb3b1 Corrects every attack returning at least 1 damage (#6967)
* Follow up to 33a99b5.
* Resolves an issue where normal attacks which have no reflect damage were being capped to at least 1.
Thanks to @XanKriegor1!
2022-05-19 21:27:53 -04:00
Playtester
6bf1b7c2ad Fixed rebirthed monster exp (#6966)
- Fixes #6965 
- When a rebirthed monster is killed it will now only give exp for the HP it was revived with (e.g. if it revived with 40% HP then it will give 40% of its exp on the second kill)
- The first kill will still give 100% exp and drops
2022-05-19 21:40:24 +02:00
Aleos
33a99b5993 Fixes reflected damage from overflowing (#6717)
* Fixes #6693.
* Reflected damage should never be negative.
* Reflect Damage no longer has a 'reflect counter'.
* Reflect Damage is removed on log out.
* Reflect Damage Reduction should only reduce in normal cases (outside of WoE/BG).
* Adjusts the variable names to better match their use.
* General cleanups to speed up certain scenarios.
Thanks to @kaninhot004 and @Lemongrass3110's review!
2022-05-19 12:24:34 -04:00
Aleos
2726770c1c Fixes Assumptio status icon (#6960)
* Fixes #6929.
* Resolves an issue with Assumptio utilizing the wrong status icon.
Thanks to @KrokusPokus!
2022-05-18 20:29:47 -04:00
Playtester
f5e75d28ec Fixed looters getting stuck (#6958)
- Fixes #6939
- Looters will now use complex pathing to find a way to an item they can see
- Monsters no longer stop when using NPC_EMOTION or NPC_EMOTION_ON
- Added a security check to prevent endless loops when easy pathing is used (no longer used by default)

Special thanks to @secretdataz.
2022-05-18 23:25:37 +02:00
Aleos
20d9fa6cc7 Fixes Orcish face display (#6956)
* Fixes #6954.
* Resolves an issue where the Orcish face would not display properly after casting SA_REVERSEORCISH.
Thanks to @mimishi!
2022-05-18 10:49:51 -04:00
Aleos
f1b181ec9c Removes SCF_OVERLAPFAIL in favor of Fail list (#6865)
* Removes the SCF_OVERLAPFAIL status flag in favor of adding the status to the Fail list.
* Confirmed that Burning does renew itself.
2022-05-17 16:45:07 -04:00
Playtester
d9ae335edf Official client range distance offset (#6950)
- Fixed the client range distance offset which is really 1.1 and not 1.0625
- This fixes the issue that when you sometimes tried to use a skill, nothing happened -> the client didn't make you move closer but the server said you are not in range yet
- I originally got an incorrect value through testing, because I didn't know that the client calculates range in a 3D room rather than in a 2D room, now I retested on a completely flat map; fully confirmed and official now
- See also #6949
2022-05-17 18:47:31 +02:00
Aleos
b857e2749a Resolves some statuses ending early from items (#5817)
* Partial fix for #5766.
* Items that Auto Guard, Ruwach, or Angelus skills should stay active when the item is removed.
Thanks to @mrjnumber1!
2022-05-17 12:41:25 -04:00
Atemo
a8276339bd Updated the script to use the commands cloakonnpcself/cloakoffnpcself (#6946) 2022-05-16 16:17:36 +02:00
Playtester
d3cc0c5656 Updated pre-renewal mob_db for Brasilis monsters (#6945)
* Updated Brasilis monsters in pre-re/mob_db.yml to pre-renewal jRO stats (jRO has higher ATK than bRO)
* There never was an official pre-re kRO version because kRO updated to renewal before adding Brasilis, but having all the common values like aDelay/aMotion and drop order official makes it easier to update these monsters via /Import/
* Will provide an import file to revert to bRO ATK here: https://rathena.org/board/topic/80992-official-pre-renewal-brasilis/

Special thanks to @Daegaladh for providing additional data to cross check these values.
2022-05-16 10:53:18 +02:00
Daegaladh
2bb740115e Renamed NoMineEffect mapflag to ForceMinEffect to clarify it's behavior (#6940) 2022-05-14 13:20:10 +02:00
Playtester
b0b5bfd055 Fixed Steel Body not being removed on logout (#6938)
- Fixes #6910
2022-05-11 23:11:14 +02:00
Lemongrass3110
3298ca9eb7 Added roulette window state tracking (#6936)
Fixes #6931

Thanks to @mazvi, @aleos89 and Ziu
2022-05-11 08:50:05 +02:00
Playtester
a027900711 Fixed range of NPC_HELLPOWER and AM_RESURRECTHOMUN
- Follow-up to e9ead1f
2022-05-10 21:25:41 +02:00
Atemo
e6f8e7549d Added cloakonnpcself/cloakoffnpcself script commands (#6934) 2022-05-10 17:59:01 +02:00
Aleos
0615b843d7 Removes the 100% rate cap for SC defense (#6926)
* Status changes are simply not capped to 100%.
* Adjusts to utilize util::safe_addition_cap.
Thanks to @Playtester!
2022-05-09 17:12:59 -04:00
Playtester
cfa9776a28 Fixed Grand Cross and Grand Darkness (#6927)
- Fixes #6647 (b)
- Related to #1140
- Follow-up to ce4aed2
- Re-implemented Grand Cross / Grand Darkness
- Both skills can hit up to 4 times
- Each cell of both skills will stop dealing damage if it dealt damage at least 3 times
- The interval is now fixed to 300ms regardless of how many targets are on a single cell
2022-05-09 19:14:48 +02:00
Lemongrass3110
252e873e98 Update packets to Hercules v2022.04.07 (#6933)
Syncs the packet structures to HerculesWS/Hercules@9d0de90

Thanks to @4144 and @Asheraf for maintaining it.
2022-05-09 18:08:55 +02:00
eppc0330
9acb50006a Fix equip order (#6876)
* Fixes equip order
* Accessoires now prioritize the left side of the equipment window (if both slots are occupied)
* Weapons now prioritize the left side (right hand) in renewal
* Weapons still prioritize the right side (shield hand) in pre-renewal
2022-05-07 17:48:20 +02:00
Daegaladh
c200e67ae2 Fixed targeting on duels (#6918) - Followup to 01afe46 2022-05-07 15:26:32 +02:00
Playtester
94c80a9108 SC_HALLUCINATION now shows damage even when blocked (#6920)
- When under the HALLUCINATION status change you will now also see damage when the damage was reduced to 0
- This includes damage being reduced to 0 by e.g. Pneuma or Safety Wall
- Documented under #6790 (but does not fix the original report which I can't reproduce)

Note: Modern clients handle the display of random damage themselves, so you will see higher damage numbers than what clif_hallucination_damage() returns. It still requires to return a number higher than 0 for it to display.
2022-05-04 23:08:30 +02:00
Playtester
e9ead1fcb7 Fixed skill ranges of NPC and HOM skills (#6917)
- Fixes #6884
- Updated skill ranges of NPC and Homunculus skills to their official values
- Implemented the official skill selection mechanic where when a mob picks a skill but can't cast it due to range, it will not check for any skills below
- Added a configuration to reset this to the previous behavior
2022-05-04 19:04:22 +02:00
Aleos
8bd77496da Fixes atcommand reloadatcommand (#6891)
* Fixes #6874.
* Removes an extra config file check that is no longer needed.
Thanks to @reunite-ro!
2022-05-03 23:13:28 -04:00
Aleos
972ada7c4a Fixes unlimited supply market shops (#6889)
* Fixes #6883.
* Follow up to 6ccf153.
* Resolves a final check market shop loading check resetting items marked as unlimited supply to a specific amount if previously saved in the SQL table as such.
Thanks to @gravity-ro!
2022-05-03 22:49:50 -04:00
Aleos
86cca12399 Fixes SC_HOMUN_TIME starting too early (#6890) 2022-05-03 22:35:19 -04:00
Playtester
b25fda1370 Added HP/SP bonus from VIT/INT equips (#6914)
- Fixes #6911
- Each VIT on an equipment gives an additional +1 MaxHP
- Each INT on an equipment gives an additional +1 MaxSP
- Bonus is applied before % equip bonus
- Only applies for VIT/INT bonus directly on an equipment, but not for cards
- Replaced some hard-coded values with constants

Special thanks to @aleos89 and @secretdataz
2022-05-03 09:42:49 +02:00
Playtester
fe7626dfdc Follow-up to 64f10ed
Just fixed a typo.
2022-05-02 16:33:01 +02:00
Playtester
64f10ed10b Fixes HPFactor being way too high (#6909)
- Fixes #6908
- HpFactor default changed from 20000 to 0
- Renamed HPMultiplicator to HpIncrease and SPFactor to SpIncrease so the naming is more consistent
- Fixed an issue with case-sensitivity (HP/SP -> Hp/Sp)
- YAML version of JOB_STATS is now 2
- "csv2yaml" and "yamlupgrade" converters updated accordingly
- Improved documentation

Special thanks to @Lemongrass3110
2022-05-02 13:50:09 +02:00
Atemo
78bf9f72c8 Updated ticket_refiner.txt (#6905)
Support for more certificate.

Thanks to @AsurielRO !
2022-05-02 00:52:28 +02:00
Playtester
5ec5f4891c Fixed status change chance from items being too high (#6907)
- Fixes #6906
- Chance was too high by factor 100
2022-05-01 09:30:41 +02:00
Playtester
c127d2875b Fixed MIN_MOBLINKTIME not working (#6904) 2022-05-01 08:29:58 +02:00
Playtester
5181c70626 Improved chasing behavior (#6903)
- Fixes #6899
- Monsters will now always walk to the end of their chase path before checking for their target again (you can still change this by changing monster_chase_refresh in monster.conf)
- A monster's chase range is now exactly as defined in mob_db.yml except when it was just attacked
- A just-attacked monster's chase range is now only enhanced until it reaches its original target cell
- Ranged monsters will now always stop when their target is in attack range, even if they still have attack delay
- Fixed a small math error when calculating chase path
- When a monster loses its target, it will now always spread to an empty cell when it was chasing something and reaches the end of its chase path, but will no longer spread when it was already attacking the target that disappeared (it will still spread if you set the custom mob_ai setting for this)
- Fixed monsters not starting to chase a target while they are randomly walking

Thanks to @aleos89 for support.
2022-04-29 21:34:39 +02:00
Everade
e56977b5f2 Fixes NC_REPAIR item consumption (#6898)
Fixes NC_REPAIR consuming multiple repair items on all skill levels due to missing level definitions.
2022-04-28 20:00:25 +02:00
Aleos
3b9c28aa46 Minor SC_STONEWAIT fixes (#6892)
* The SC_STONEWAIT status should not be applicable to the Undead element.
* Fixes the overall duration for SC_STONEWAIT and SC_STONE because of incorrect order of resistance application.
* Adds the delay parameter to sc_start functions since some statuses, if not most, have a delay of some sort before being applied to a target.
* Swaps the duration values for Stone Curse related skills.
Thanks to @Playtester!
2022-04-27 12:40:38 -04:00
Vincent Stumpf
ef6a682ca8 Fix possible crash in TypesafeCachedYamlDatabase (#6873) 2022-04-26 19:56:31 +07:00
Atemo
fe7dd38773 Removed renewal exp and drop penalty in Illusions dungeons (#6887) 2022-04-25 19:38:21 +02:00
Atemo
ceb1d10c20 Removed NoRemoveOnDead flags on S_Lifepotion and L_Lifepotion (#6888) 2022-04-25 19:37:27 +02:00
mazvi
5d34c7b16b Illusion of Vampire Boss Spawn Fixes (#6875)
Fixes an exploit which allowed to spawn Berserk Bomi indefinitely upon first quest completion.

Thanks to @Everade @Atemo
2022-04-25 02:06:12 +02:00
Aleos
ec3a287831 Resolves a compile warning (#6868) 2022-04-23 07:10:41 -04:00
Aleos
ea4a9d9e2f Fixes a quest packet buffer issue (#6870) 2022-04-23 01:30:53 -04:00
Aleos
3c36814c1f Adjusts swapping equipment behavior (#6869) 2022-04-22 17:15:43 -04:00
Aleos
d617d9f083 Updates SC_CHANGEUNDEAD behavior (#6867)
* Fixes #6834.
* Versus Players
- Animation will be properly displayed for Blessing/Increase Agility when the target has Change Undead active (buffs are not applied even though animation is displayed).
- Target can no longer be killed through the single damage applied by Blessing/Increase Agility and Change Undead.
- If the target has Curse and Stone active, only Curse is removed by Blessing first (buffs are not applied).
- Shadow or Undead armor have no impact on Blessing or Increase Agility at all.
* Versus Monsters
- Blessing is applied normally to the target as long as it's not an Undead element or Demon race.
- Blessing does not cancel out Curse or Stone.
Thanks to @Playtester!
2022-04-22 11:46:28 -04:00
Lemongrass3110
b42dbe4a9f Fixed RES/MRES for players (#6857)
* Fixes #6841.
Co-authored-by: Playtester <Kenji.lto@gmx.de>
2022-04-22 09:42:00 -04:00
Aleos
53bc2376a6 Updates stacking for common statuses (#6807)
* Fixes #6798.
* Updates the Fail, End, and EndReturn lists for OPT1 and OPT2 statuses.
* Removes the hardcoded OPT1 overwrite prevention check.
* OPT1 that have RemoveOnDamaged flag should not get applied again in the same attack.
* Fixes Stone status not properly being inflicted by the bAddEff, bAddEff2, bAddEffWhenHit, and bAddEffOnSkill item bonuses.
* Fixes Stone status not properly being inflicted by The Hanged Man from Tarot Card of Fate.
Thanks to @Playtester!
2022-04-21 09:31:27 -04:00
mazvi
b17b0c7a0b Fixes Token of Honor Barter shops for EP16.1/16.2/17.1 (#6852)
Fixes Barter quests_16_1.yml, quests_16_2.yml, quests_17_1.yml

Co-authored-by: Everade <Everade@users.noreply.github.com>
2022-04-20 14:36:48 +02:00
Everade
20422622aa Illusion of Vampire monster fixes (#6851)
Fixes https://github.com/rathena/rathena/issues/6849
Corrected monster mode and skills for Black Mushroom

Thanks to @mazvi
2022-04-19 15:10:15 +02:00
Daegaladh
3641dcf426 Fixed exp rewards in EP 17.1 quests (#6850) 2022-04-18 16:59:54 +02:00
Pokye
d7e114179d Some Renewal Mobs Fixes (#6846)
Updated MvpExp to be 50% of BaseExp
Corrected some AegisNames and Names
2022-04-18 02:41:03 +02:00
Pokye
3650a7f805 Some item name fixes (#6816) 2022-04-17 23:13:58 +02:00
Lemongrass3110
aae930198d Fixed SQL db loading (#6843) 2022-04-17 22:31:57 +02:00
Lemongrass3110
1a7a26bc39 Added speed bonus to Serenade of Jawaii (#6842) 2022-04-17 21:32:27 +02:00
Aleos
1aaa32bd1a Minor Adjustment to Expanded Class skills (#6829)
* Kihop now applies physical damage bonus directly no longer depending on the number of party members. Physical damage bonus: (10 + (15 x skill level))%.
* Catnip Meteor:
- Reduces fixed casting time from 3 seconds to 1.5 seconds.
- Reduces variable casting time from 4 seconds to 2 seconds.
- From base level 100, deals additional damage depending on the user's base level and INT.
- Adjusts splash to 7x7 for all levels.
* Lunatic Carrot Beat now deals additional damage (from base level 100) depending on user's base level and STR.
2022-04-17 14:57:38 -04:00
Atemo
ce7fc06692 Fixes 17.1 Enchanter (#6840)
* Replaced consumeitem by getgroupitem

Fixes #6838
2022-04-17 19:58:52 +02:00
Everade
696320fc49 Episode 16.2 - Illusion of Vampire (#6800)
Fixes https://github.com/rathena/rathena/issues/6455
Implements Episode 16.2 - Illusion of Vampire

Thanks to @Atemo 

Co-authored-by: Atemo <capucrath@gmail.com>
2022-04-16 17:20:53 +02:00
Lemongrass3110
51ddc63a02 Converted player groups to YAML (#6488)
Finally adds import functionality for player groups and therefore atcommand and permission defining.

Co-authored-by: Aleos <aleos89@users.noreply.github.com>
2022-04-15 23:47:44 +02:00
Playtester
6e5461a34e Follow-up to da93c19 (NPC_CHANGEUNDEAD)
- NPC_CHANGEUNDEAD is now also working in renewal
2022-04-15 17:41:01 +02:00
Playtester
da93c190c5 Correct Implementation of NPC_CHANGEUNDEAD (#6837)
- Fixes #6832
- NPC_CHANGEUNDEAD is now a single target attack skill
- 100% physical, undead property damage
- When it deals damage, 10%*skill_lv chance to cause SC_CHANGEUNDEAD for 60 seconds
- Skill range is 2
- All NPC status skills now have 120% hit rate
2022-04-15 09:00:53 +02:00
Lemongrass3110
4845a128ba Added more laphine synthesis items (#6823)
Adds support for 52 more laphine synthesis items.

Thanks to @Badarosk0
2022-04-14 15:42:34 +02:00
Lemongrass3110
6e58ab7d40 Disabled random options for stolen items (#6836)
Added a battle config to enable/disable random options for stolen items.

Fixes #6833
2022-04-14 15:21:49 +02:00
Lemongrass3110
14d6052e5b Fixed some 4th class ex issues (#6806)
Added all required skill and skill unit constants - thanks to @Rytech2
Fixed the order of some jobchange checks
Added missing MAPID to JOBID conversion
Fixed pc_calc_skilltree_normalize and added support for gaps in the job tree
2022-04-14 15:04:02 +02:00
Aleos
f083caf28d Fixes Status Database All CalcFlag (#6826)
* Fixes #6812.
* Resolves an issue with the All CalcFlag not properly being parsed for the Status Database resulting in these statuses not starting and ending the bonuses.
Thanks to @eppc0330!
2022-04-13 13:37:40 -04:00
idk-whoami
a7fbcfa1f8 Fixed Crown of Deceit Script (#6830)
Fixes #6809

Thanks to @Leemonn
2022-04-13 15:10:33 +02:00
Everade
ff7bbb9d0c Episode 17.1 - Illusion (#6643)
Fixes https://github.com/rathena/rathena/issues/6463
Implements Episode 17.1 - Illusion

Adds viewpointmap script command
Adds getbaseexp_ratio and getjobexp_ratio script command

Comments quest log support for the Nameless Island Quest, Nameless Island Access Quest and Geoborg Family Curse Quest due to utilizing quest IDs that are used elsewhere.


Credits to @crazy-arashi for the initial content release

Co-authored-by: crazy-arashi <meganekiho@gmail.com>
Co-authored-by: aleos <aleos89@users.noreply.github.com>
Co-authored-by: Atemo <capucrath@gmail.com>
Co-authored-by: Lemongrass3110 <lemongrass@kstp.at>

Thanks to @crazy-arashi  @Atemo  @aleos89  @Lemongrass3110  @secretdataz  @Dia  @humanwizzard  @idk-whoami  @Megelith  @Daegaladh
2022-04-13 11:23:48 +02:00
Lemongrass3110
8bc43091c9 Fixed DK_CHARGINGPIERCE display
Thanks to @limitro
2022-04-11 22:37:05 +02:00
Pokye
f4410fba6b Added script command openbank (#6821) 2022-04-11 01:56:17 +02:00
Singe Horizontal
2cde45e569 Fixed debugx64 compilation in MSVS (#6819)
Fixes #6817
2022-04-10 12:32:56 +02:00
Vincent Stumpf
9c80c24976 Fix common compilation (#6814)
Fixes #6811
2022-04-09 14:32:15 -07:00
idk-whoami
4592ae6c09 Corrected script for some item #6 (#6745) 2022-04-09 14:52:02 +02:00
Aleos
cee161e677 Implements SC_STONEWAIT (#6794)
* Fixes #6748.
* Implements SC_STONEWAIT to be used with OPT1_STONEWAIT.
* Removes a lot of hard coded OPT1_STONE checks now that the two states are split to their own statuses.
* Fixes SC_STONE not ending when the target receives damage.
* Fixes SC_STONE getting overwritten by other statuses the have OPT1 states.
* SC_STONE should damage the target by 1% of MaxHP, not CurrentHP.
* All skills that give StoneWait aside from Stone Curse are 100ms.
* Confirmed that Sienna Execrate is 8 + 2 * SkillLv seconds of Stone.
* Moves Provoke and Mind Breaker status clearings to the status database.
* Converts EndReturn in the status database from a boolean to a list of statuses. Statuses defined in this list will be ended and then the status making the call will not take affect.
Thanks to @Singe-Horizontal, @Playtester, and @Lemongrass3110!
2022-04-08 13:16:18 -04:00
CairoLee
3b47defa35 Fixed ItemCost is invalid when specified level (#6796)
* Fixed #6573.
* Fixes ItemCost being invalid when specifying level.
Thanks to @dimasshotta and @CairoLee!
2022-04-08 11:26:38 -04:00
Lemongrass3110
ff3b731951 Fixed compilation for PACKETVER 20100303 (#6804)
Fixes #6803

Thanks to @blurryrox
2022-04-07 22:47:52 +02:00
Aleos
41da8646ff Fixes an issue with Joint Beat resistance (#6805)
* Follow up to cb2c553.
* Fixes a math error in the status resistance and duration for Joint Beat.
Thanks to @Tokeiburu and @Playtester!
2022-04-07 14:41:15 -04:00
Aleos
2993c0baa6 Updates the status resistances for renewal (#6764)
* Fixes #4694.
* Updates the status resistance formulas for several of the common statuses.
* Updates a few skills to their appropriate durations.
* When the caster is a higher base level, some statuses now take this difference into effect.
* Item resistances are also applied to the duration.
* Fixes Chaos Panic not reading the skill database duration.
* Points Bash's Stun duration to itself and not Fatal Blow.
* Adjusts SC_CURSE to look up the duration from NPC_WIDECURSE as it's set to the the default 18 seconds.
* Burning will fail if the target is under the effect of White Imprison.
* Burning will fail on level 2 Fire targets.
Thanks to @Playtester!
2022-04-06 10:53:32 -04:00
Aleos
a7ee51da7d Fixes statuses with SCB_ALL not recalculating (#6793)
* Fixes #6784.
* Adds a missing call for statuses that give the "All" CalcFlags.
Thanks to @eppc0330!
2022-04-04 11:03:21 -04:00
Lemongrass3110
113cb2f066 Removed duplicate item 22814
Fixes #6792

Thanks to @kaninhot004
2022-04-04 16:01:50 +02:00
Aleos
436e5a6f7d Fixes SQL mob database parsing (#6789)
* Fixes #6787.
* Resolves the SQL monster database not being properly parsed into YAML nodes.
* Also resolves the skill database improperly reporting issues.
Thanks to @L4M3573R and @Lemongrass3110!
Co-authored-by: Lemongrass3110 <lemongrass@kstp.at>
2022-04-03 19:14:00 -04:00
Playtester
5df808d5fd Restored chance of NPC_PETRIFYATTACK, see issue #1811 2022-04-03 23:24:05 +02:00
Lemongrass3110
01261dbf3d Cleanup after RYML merge (#6785)
Speeded things up even more by making use of C++'s reference feature.
Fixed #6770 warning while at it.
Added back support for case insensitive booleans.
Fixed a very rare issue with cached databases.
Removed .children() calls
Added back type safety reports
Added ryml to login-server makefile

Thanks to @aleos89, @Atemo and @idk-whoami
2022-04-03 04:02:13 +02:00
Atemo
b3343df2ab Added imperial set barter shop (#6783) 2022-04-02 21:55:39 +02:00
Aleos
163f8c2c35 Fixes error reporting with SQL and ryml (#6778)
* Fixes #6768.
* Send 0 as the parse line in SQL mode for item scripts as the ryml parser isn't fully populated in SQL mode.
* Also cleans up some documentation references for EquipScript and UnEquipScript.
Thanks to @dralucon87 and @Lemongrass3110!
2022-04-02 12:44:30 -04:00
Lemongrass3110
185344a252 Fixed sell price calculation (#6775)
Fixes #6773
Additionally added some MAX_ZENY checks.

Thanks to @mazvi
2022-04-02 18:36:47 +02:00
mazvi
945a5cd993 Fix Logic Angry Moonlight Flower Summon Illusion (#6777)
Fix Logic Angry Moonlight Flower Summon Illusion
2022-04-02 16:51:47 +02:00
Aleos
47cb6dc637 Minor corrections to Break Through (#5917)
* Fixes the attack bonus not properly applying to masteries.
* Fixes the healing bonus not increasing per skill level learned.
2022-04-02 08:19:28 -04:00
Playtester
3206054205 Fixed refinement success and def for pre-re (#6782) 2022-04-02 14:42:56 +07:00
Atemo
aa81411c60 Added missing Noblesse exchange npc (#6761)
* Added missing Noblesse exchange npc
2022-04-01 23:01:31 +02:00
Playtester
5eb0a2ea32 Fixed small rounding issue with Divine Protection (#6781)
* Fixes #6779
* The reduction will now be calculated as float and then rounded
2022-04-01 22:40:21 +02:00
CairoLee
2172bb835d Update the comment of Log System and Fixed stylist logging (#6776) 2022-04-01 09:45:01 +02:00
HAO YAN
a2a614198a Update RE Renter NPC (#6750)
Include 4th job classes
Adjusted header and added changelog

Co-authored-by: Lemongrass3110 <lemongrass@kstp.at>
2022-03-31 23:29:15 +02:00
Cydh Ramdh
16ccad42e8 Update vending and buyingstore on warp (#6718)
Fixed #6671, vendor & buyer location never been updated if the location is changed
Added MF_NOBUYINGSTORE and CELL_NOBUYINGSTORE to separate from MF_VENDING & CELL_NOVENDING
Added some missing check for buyingstore states
Updated mapflag doc
Added new constants

Co-authored-by: Aleos <aleos89@users.noreply.github.com>
Co-authored-by: Lemongrass3110 <lemongrass@kstp.at>
2022-03-31 23:08:35 +02:00
eppc0330
e4e8063ec3 Update Renewal Elemental Atrribute Damage (#6765) 2022-03-31 22:43:52 +02:00
Lemongrass3110
db540124ad Fixed some LGTM warnings (#6767)
Co-authored-by: aleos <aleos89@users.noreply.github.com>
2022-03-31 22:20:17 +02:00
Playtester
0ef5fa905c Fixed Pre-Re SoftDEF Formula (#6766)
* Fixed Pre-Re SoftDEF Formula
* Fixes #6648

Note: This does not fix the order of processing which is also wrong, but at least the damage is official now in 1vs1 combat when if no item bonuses or status changes are involved that directly impact DEF.
2022-03-31 18:09:53 +02:00
Lemongrass3110
80b133c08f Fixed a typo in configure
Thanks to @Pokye
2022-03-31 02:10:49 +02:00
Jittapan Pluemsumran
d1b7061f5a YAML loading optimization (#5997)
* Use rapidyaml library to parse YAML databases instead of yaml-cpp.
* Drastically reduces the parse time for yaml databases.
* Removes yaml-cpp content from main servers, except for tool emitter.
Co-authored-by: Vincent Stumpf <vincents.995@gmail.com>
Co-authored-by: Atemo <capucrath@gmail.com>
Co-authored-by: Lemongrass3110 <lemongrass@kstp.at>
Co-authored-by: Aleos <aleos89@users.noreply.github.com>
2022-03-30 16:38:52 -04:00
Aleos
9bc1c53db4 Fixes Elemental Converter behavior (#6743)
* Fixes #6627.
* Elemental Converters will now be removed on weapon unequip/swap.
* Adds RemoveOnUnequip, RemoveOnUnequipWeapon, and RemoveOnUnequipArmor status flags and dehardcoded statuses to utilize these flags.
* Hovering Booster's cancellation of SC_HOVERING now takes place in the UnEquipScript, releasing the hard coded check.
Thanks to @Everade, @Daraen1, @Toshiro90, and @Lemongrass3110!
2022-03-30 10:19:27 -04:00
Playtester
414d43dd05 Natural recovery formula and interval behavior (#6755)
* Implemented official natural recovery formula and interval behavior (fixes #6754)
* Fixed HP recovery per tick being 1 too high (after reaching 200 HP)
* The interval will now work similar to official servers where it remembers the time of the last recovery and checks if the interval has passed since that time
* The natural recovery interval will now be continuous even when at full health (i.e. if you are full while the interval passes and afterwards you lose HP/SP, you will have to wait for the next interval)
* Fixed anything blocking recovery, such as walking, not resetting the natural recovery tick (e.g. you have to wait 6 seconds to recover HP after you stop walking, unless you have moving recovery)
* Applies to players, homunculus, mercenaries and elementals
* Note: This also works with custom intervals, but you should make sure they are multiples of 4*NATURAL_HEAL_INTERVAL, otherwise it will round to the closest possible interval (you can reduce the timer interval in map.hpp when needed)
2022-03-30 12:13:42 +02:00
Pokye
630ca789c8 Some Items Fixes (#6758) 2022-03-30 10:58:22 +02:00
Atemo
de990f71ad Fixes mobs res/mres always 0 (#6760)
Fixes an issue where status_calc_misc resets the value of res/mres (potentially defined in mob_db.yml) to 0.

Thanks to @limitro !
2022-03-29 22:47:45 +02:00
Aleos
aebf99c32b Fixes Reading Spellbook behavior (#6711)
* Fixes #6705.
* Comet should now give the target Magic Poison instead of Burning.
* Confirmed on kRO that Reading Spellbook persists through relog but not death.
* Converts the Reading Spellbook function to use by reference.
* Removed some left over content from the YAML database conversion.
Thanks to @Relliksuriv!
Co-authored-by: Lemongrass3110 <lemongrass@kstp.at>
2022-03-29 10:55:54 -04:00
Aleos
51335efec2 Fixes monster_ai battle config (#6757)
* Fixes #6751.
* Expands the cap of the monster_ai config to support new settings.
Thanks to @xZee0523!
2022-03-29 09:44:04 -04:00
Everade
9e151c736b Renames SQL Upgrade script (#6759)
Corrects sql upgrade file name from upgrade_20221103 to upgrade_20220311
2022-03-29 14:28:55 +02:00
Jittapan Pluemsumran
695de6313a Implemented new bonus for official Sarah Card behavior. Fixes #6189 (#6281) 2022-03-29 15:35:15 +07:00
idk-whoami
d2826827df Added Eff_Deepsleep status effect (#6735) 2022-03-24 22:59:10 +01:00
Atemo
a5eaacb1c3 Fixed malangdo costume NPC Menu (#6738)
Fixed #6721

Thanks to @Surefirer
2022-03-24 18:52:22 +01:00
Atemo
2a8264ae22 Fixed a mistake in quests_rockridge.txt (#6739)
Fixed #6712

Thanks to @mazvi
2022-03-24 18:51:29 +01:00
Aleos
0323aa37c8 Adds INF2_ISTOGGLEABLE skill flag (#6737)
* Adds the INF2_ISTOGGLEABLE skill flag which is used to enable or disable a skill's status. When toggled off the skill doesn't consume HP/SP.
Thanks to @Lemongrass3110!
2022-03-24 12:50:02 -04:00
Aleos
7c275f3130 Updates Quest skills from kRO patch (#6736)
* Fixes #6732.
* kRO Changelog: https://ro.gnjoy.com/news/notice/View.asp?BBSMode=10001&seq=7177&curpage=1
Thanks to @Atemo, @attackjom, and @Lemongrass3110!
2022-03-24 12:31:56 -04:00
Aleos
5a7d2dc2a1 Fixes remaining status calculation flag issues (#6740)
* Follow up to d739e8f and 302168d.
* Resolves the status_calc_bl_main call not properly assigning any sub status calculation effects based on any primary stat changes.
Thanks to @Singe-Horizontal!
2022-03-24 11:54:33 -04:00
idk-whoami
d7ac7e98b9 Corrected script for some item #5 (#6663)
Thanks to @Lemongrass3110
2022-03-23 16:07:19 +01:00
Aleos
ce4aed26bb Fixes Grand Cross self damage (#6716)
* Fixes #6647.
* Grand Cross will no longer display miss on self.
Thanks to @eppc0330 and @secretdataz!
2022-03-23 10:38:43 -04:00
Aleos
302168d1f3 Fixes SCB calculations (#6733)
Follow up to d739e8f.
Resolves issues with status calculations getting improperly checked.
Thanks to @mazvi, @kaninhot004, and @Rayvakarian!
2022-03-23 10:40:35 +01:00
Everade
a534628996 Adds 10 new enchant items (#6728)
Added a handful of new enchant items.
Updated some placeholders with aegis names.

Thanks to @Lemongrass3110
2022-03-21 00:02:30 +01:00
Aleos
242983f0dd Fixes Wug Rider allowing physical attacks (#6714)
* Fixes #6713.
* Physical attacks are now disabled when a Ranger-type class is riding a Wug.
Thanks to @Surefirer!
2022-03-18 14:10:43 -04:00
Aleos
bf3b12bc3d Fixes NoWalkDelay item bonus (#6707)
* Fixes #6636.
* Resolves an issue with NoWalkDelay being checked too late and not properly giving the bonus to the player.
Thanks to @iraciz!
2022-03-17 14:36:50 -04:00
Aleos
a46b54a893 Adjusts bonus3 item bonuses to support ALL types (#6706)
* Fixes #6691.
* Adjusts bonus3 bAddEle, bonus3 bSubEle, bonus3 bSubRace item bonuses to support ELE_ALL/RC_ALL flags.
Thanks to @eppc0330!
2022-03-17 14:04:36 -04:00
HAO YAN
00be4ee9ca Update Item Card slot count (#6301)
Should cover all card slots since these slots doesn't only store card anymore.
2022-03-17 18:20:17 +01:00
Lemongrass3110
5193fd6c92 Fixed some status related issues (#6702)
Fixed stone curse not ending
Fixed SR_GENTLETOUCH_CURE not being able to cure poison, silence and blind
Cleaned up SR_GENTLETOUCH_CURE formula
Fixed casting SR_GENTLETOUCH_CURE on self during status change
Added missing NoMove state to PF_SPIDERWEB
Fixed GN_HELLS_PLANT being removed on mapchange

Fixes #6701

Thanks to @mazvi
2022-03-17 15:05:35 +01:00
Lemongrass3110
79d978e010 Cleanup for some Soul Reaper skills (#6700) 2022-03-16 21:54:12 +01:00
Lemongrass3110
0449f39015 Fixed SL_HIGH (#6699)
Added a central random function for chance calculation
Added a missing status change end for SC_SPIRIT on jobchange
2022-03-16 21:28:38 +01:00
Aleos
d739e8f44e Converts the SCB flags to bitset (#6661) 2022-03-16 15:59:23 -04:00
Lemongrass3110
e3262d0be1 Fixed mado gear display (#6698)
Fixes #6695

Thanks to @Rushtic
2022-03-16 20:34:25 +01:00
Lemongrass3110
a62859fdb0 Fixed soul linking (#6697)
Fixes #6696

Thanks to @mazvi and @aleos89
2022-03-16 16:46:48 +01:00
Everade
414023a14a Implemented a few Reform items. (#6684)
Implements new Reform UI related items.
Other minor item corrections.
2022-03-16 00:59:45 +01:00
Aleos
6787f68452 Expands the SC start check (#6690) 2022-03-15 17:15:01 -04:00
Singe Horizontal
4ef2f33284 Fixes Can't attack when casting is disabled (#6687)
* Fixes #6686.
* Expands upon the status checks to allow physical attacks when casting is disabled.
2022-03-15 14:22:02 -04:00
Singe Horizontal
25212bf8b9 Fixes Mounted characters immune to freeze/stone (#6689)
* Fixes #6688.
* Adds a missing break.
2022-03-15 10:34:30 -04:00
Lemongrass3110
213406013d Fixed EFST initialization (#6683)
Fixes #6675

Thanks to @jofvgaming, @Singe-Horizontal, @secretdataz and @aleos89.
2022-03-14 23:15:34 +01:00
Toshiro90
46b2af3843 kRO patch 2022-02-16 items (#6623)
Adds new items and updated some existing ones, based on kRO patches (main & zero) from 2022-02-16.

Thanks to @Lemongrass3110 

Co-authored-by: Lemongrass3110 <lemongrass@kstp.at>
Co-authored-by: Everade <Everade@users.noreply.github.com>
2022-03-14 22:18:33 +01:00
Aleos
a3d85d5a64 Adds some missing statuses to skills (#6682)
* Fixes #6664 and fixes #6676.
* Adds some missing skill to status associations.
Thanks to @surefirer, @ryomahostingph, and @Lemongrass3110!
2022-03-14 16:13:58 -04:00
Toshiro90
95af807462 kRO patch 2022-02-03/04 items (#6592)
Adds new items and updated some existing ones, based on kRO patches from 2022-02-03/04.
Includes 3 new items from kRO Zero patch.

Thanks to @Atemo @Takerio 

Co-authored-by: Everade <Everade@users.noreply.github.com>
Co-authored-by: Atemo <Atemo@users.noreply.github.com>
2022-03-14 21:01:53 +01:00
Lemongrass3110
11f3d4b100 Added a new flag SCF_NOFORCEDEND (#6679)
The old hardcoded lists were not in sync and this was a specific list of status changes that could not even be forcibly ended with sc_end. Therefore a new flag is needed. Additionally improved error reporting.

Fixes #6666

Thanks to @eppc0330
2022-03-14 20:52:31 +01:00
Lemongrass3110
0663e7b43d Fixed atcommand jail (#6678)
Fixes #6677

Thanks to @gen1x8
2022-03-14 20:35:45 +01:00
Singe Horizontal
a032cd1f74 Spiral pierce immune fix (#6668)
Fixes the inversion of target type being stopped by spiral pierce, ie it should stop normal monsters and not status immune

Fixes #6667
2022-03-14 12:26:50 +01:00
Singe Horizontal
cf519c0866 sc_end SC_ALL fix (#6674)
Fixes #6673
2022-03-14 10:04:53 +01:00
Singe Horizontal
a86832fe6b Fixes safety wall status (#6672)
* Fixes #6670.
* Adds the missing Safety Wall statuses to the applicable skills.
Thanks to @Singe-Horizontal!
2022-03-13 20:45:42 -04:00
Atemo
73a8d1365e open_quest_ui script command (#6662)
* script command to force open the quest UI for the attached player.

Thanks to @Lemongrass3110 !
Co-authored-by: Aleos <aleos89@users.noreply.github.com>
2022-03-13 14:56:03 +01:00
idk-whoami
3be73773f4 Implementations + corrections of a bunch of items (#6652) 2022-03-11 23:05:25 +01:00
Atemo
a6a3c165b2 randomoptgroup script command (#6655)
* script command to get the random value of the random option ID, value and param of a random option group ID

Co-authored-by: Lemongrass3110 <lemongrass@kstp.at>

Thanks to @aleos89 !
2022-03-11 19:04:59 +01:00
Atemo
2012857815 Updated rockridge npcs to official (#6503)
* Included the sql update

Thanks to @Pokye
2022-03-11 18:55:00 +01:00
Aleos
365d3d7a80 Fixes Slim Potion Pitcher item lookup (#6660)
* Fixes #6646.
* Fixes the item requirement check for Slim Potion Pitcher being offset by one.
Thanks to @Indigo000!
2022-03-11 12:35:05 -05:00
Lemongrass3110
d79db934cb Added some more checks to Laphine UIs (#6654) 2022-03-11 17:31:49 +01:00
Aleos
0f4f612878 Fixes a crash with script command sc_end (#6659)
* Fixes #6657.
* Adds an extra sanity check.
* Moves the database check post the sanity checks for faster processing.
Thanks to @kaninhot004!
2022-03-11 11:19:42 -05:00
Lemongrass3110
0ca5f450b7 Added a Github Action for CMake (#6542) 2022-03-10 22:43:38 +01:00
Cydh Ramdh
f28d207274 Implementation of Status Changes in YAML (#1685)
* Removes the initialization of Status Changes via hard-code and puts it into YAML form.
* From this database it is much easier to delegate icons, calc flags, opt flags, miscellaneous flags, and several other things for all stats changes.
* Statuses can quickly be reloaded via atcommand reloadstatus.
Thanks to @cydh, @Atemo, @Lemongrass3110, and the others who helped!
Signed-off-by: Cydh Ramdh <cydh@pservero.com>
Co-authored-by: atemo <capucrath@gmail.com>
Co-authored-by: Lemongrass3110 <lemongrass@kstp.at>
2022-03-10 13:11:59 -05:00
Lemongrass3110
e74bbfd37e Added missing export for bWeaponSubSize (#6651)
Fixes #6650

Authored-by: Cydh <cydh.ramdh@gmail.com>
2022-03-10 15:46:49 +01:00
Everade
92c3e19621 Illusion of Moonlight (#6637)
Fixes #6453 
Implemented Illusion of Moonlight
Implemented related monsters and enchanter

Thanks to @crazy-arashi @Atemo @Lemongrass3110  @secretdataz 
Co-authored-by: Atemo <capucrath@gmail.com>
Credits to @crazy-arashi for the initial script release
2022-03-08 00:29:17 +01:00
Lemongrass3110
e2c15afd9d Fixed barter packetversion checks 2022-03-07 22:55:42 +01:00
JohnnyPlayy
50d7c4052f Implementing Script of the Ammo Vendors Extension (#6550)
Thanks to @Toshiro90 and @Lemongrass3110 for their help.

Co-authored-by: Lemongrass3110 <lemongrass@kstp.at>
2022-03-07 18:19:22 +01:00
Lemongrass3110
77e56cf838 Fixed CG_HERMODE (#6119)
Fixes #6114

Thanks to @Everade
2022-03-07 16:32:59 +01:00
idk-whoami
249c69921c Corrected script for some item #3 (#6614)
Thanks to @attackjom
2022-03-07 15:41:27 +01:00
Everade
27c0586d79 Mapflag Cleanup
For the sake of readability and consistency.
Moved some mapflags to their appropriate files.
No functional changes.
2022-03-07 03:33:32 +01:00
Atemo
df65d5ddfc Implemented Ghost Palace exchange NPC (#4003)
Fixed #3936

Thanks to @aleos89 @Angelic234 and @zackdreaver !
2022-03-02 15:25:29 +01:00
mazvi
6d0eef0a20 Adjusted Homun S Quest to support Biolo (#6634)
Co-authored-by: Lemongrass3110 <lemongrass@kstp.at>
2022-02-27 15:29:05 +01:00
idk-whoami
67d1261659 Added RC2_Malangdo race groups (#6633) 2022-02-24 01:20:01 +01:00
Lemongrass3110
08192a35bc Revamped Laphine UIs (#6625)
Fixes #3302
Closes #4348

Thanks for the initial release by @Cydh in #4348 and everyone that contributed to it.

All existing data was migrated and cleaned up where necessary.
Thanks to @Everade for his help here.

Laphine UIs are now fully yamlified and not dependent on the script engine.
They make use of new item group features and of the already existing random option group feature.
This way they will be far easier to be maintained, even though they are a little less customize able.

Thanks to @limitro, @CairoLee, @dimasshotta and everyone else who contributed!

Co-authored-by: Cydh <cydh.ramdh@gmail.com>
Co-authored-by: Everade <Everade@users.noreply.github.com>
Co-authored-by: Aleos <aleos89@users.noreply.github.com>
2022-02-22 21:52:27 +01:00
Lemongrass3110
5227167716 Removed leftovers from unreleased classes (#6597) 2022-02-21 16:50:23 +01:00
mazvi
883518bf08 Dummy Summoner Star Emperor Soul Reaper Level 175+ (#6600)
Dummy data for Summoner, Star Emperor, Soul Reaper Level 176-200
Will be removed when get actual data from kRO 

Get from PandasWS/Pandas@9ec9a9a
2022-02-21 16:17:31 +01:00
Cydh Ramdh
5f3c0ce40b Removed duplicate entry on re/job_exp.yml (#6629) 2022-02-21 16:00:46 +01:00
Cydh Ramdh
7eb125d5f1 Corrected property name on skill_db.yml header doc (#6626) 2022-02-19 02:38:58 +07:00
Aleos
47e515888c Changes mapflag storage type to vector (#6624)
* Changes the mapflag storage type from an unordered_map to vector giving it a performance boost.
* This changes all lookups and modifications to O(1) rather than a possible O(log).
2022-02-18 08:10:57 -05:00
Joam
fc7ed9d397 Update Biolo & Meister summon packet (#6505)
Co-authored-by: Lemongrass3110 <lemongrass@kstp.at>
2022-02-18 00:26:27 +01:00
Joam
44188e39d4 Add getitempos script (#6621)
item script that return equip position.
2022-02-17 22:46:56 +01:00
merry
31c7e4b157 elemental: Add std::hash specialization for e_elemental_skillmode (#6619)
Required for e_elemental_skillmode to be used as a key in std::unordered_map.
See also #6591.
2022-02-17 03:23:36 +07:00
Aleos
84248255b7 Implements MF_NOPETCAPTURE (#6618)
* Fixes #6556.
* Implements the nopetcapture mapflag which is used to stop pet-type monsters from being caught.
Thanks to @RagnaWay and @Lemongras3110!
2022-02-16 12:51:25 -05:00
Cydh Ramdh
c454443a0b Added @addfame (#6588)
* Added `@addfame`
* Added alias `famepoint`, `famepoints`
* Changed `sd` in `pc_addfame` to reference

Co-authored-by: Lemongrass3110 <lemongrass@kstp.at>
Co-authored-by: Aleos <aleos89@users.noreply.github.com>
2022-02-16 01:00:59 +07:00
Aleos
a8e516730a Fixes Tuna Belly and Shrimp Party behaviors (#6616)
* Fixes #6611.
* Resolves an issue with Tuna Belly not sending the visual update to healed players.
* Resolves Shrimp Party SP recovery not increasing the heal rate.
Thanks to @Balferian!
2022-02-15 09:49:33 -05:00
Atemo
c092c8711b Changes the default value of the conf item_enabled_npc to no (#6574)
* Split pre-re/re setting
** Changing of equipment while interacting with NPCs is allowed on pre-renewal by default
** Changing of equipment while interacting with NPCs is not allowed on renewal by default
* Added a message when the player tries to change the equipment while talking to npc

Fixed #6566

Thanks to @aleos89, @Lemongrass3110 and @Balferian !
2022-02-14 16:04:47 +01:00
Atemo
9ed2a90900 Added missing mapflags for recent maps (#6612) 2022-02-14 15:15:15 +01:00
Sader Fawall
ac7292c92d Allow creating instance without timer or data (#5112)
* Allow creating instance with no timer
* Added NoNpc to prevent copying NPCs from the source map
* Added NoMapFlag to prevent copying Mapflags from the source map
* Added instance_list script command to retrieve the instance IDs for the given map name/instance mode

Co-authored-by: Aleos <aleos89@users.noreply.github.com>
Co-authored-by: Atemo <atemo@users.noreply.github.com>
Co-authored-by: Lemongrass3110 <3517879+Lemongrass3110@users.noreply.github.com>
2022-02-11 18:45:17 +01:00
Everade
c429747f73 Fixes monster MD_SWORD_FISH modes
Adds another synthesis relevant item
2022-02-09 15:05:53 +01:00
Aleos
4b1446bd09 Fixes cooking value not updating properly (#6605)
* Fixes #6602.
* Resolves cooking items only updating the player's cooking variable and not properly updating the player's session data along with it.
Thanks to @Shaktohh and @Lemongrass3110!
2022-02-08 15:04:53 -05:00
Everade
645ff8591a Implemented Synthesis and Upgrade UI items (#6599) 2022-02-08 00:27:11 +01:00
Danilo Lemes
51706f2ba1 Wrap function body to match struct directives (#6604)
Fixes #6603
2022-02-08 00:22:04 +01:00
Kanin Temsrisuk
a1b7fab19b Fix wrong command to get armor level. (#6593) 2022-02-06 20:37:46 +01:00
Danilo Lemes
70220e16f0 Improve docker-compose tool (#6344) 2022-02-06 14:49:58 +07:00
Lemongrass3110
6ccf15330e Added support for unlimited supplies in market shops (#6571)
Fixes #6569

Thanks to @JohnnyPlayy, @Balferian and @Toshiro90

Co-authored-by: Aleos <aleos89@users.noreply.github.com>
2022-02-04 16:17:30 +01:00
Atemo
3da8426fe7 Removed extra space/tab and sort the items id in item_db (#6590) 2022-02-03 19:39:35 +01:00
Aleos
e7f58b1221 Removes an extra sanity check for cardfix bonuses (#6582)
* Fixes #6579.
* Partial revert to ad2660b.
* Cardfix bonuses can go above 1000.
Thanks to @kaninhot004!
2022-02-02 21:04:53 -05:00
idk-whoami
61d9cb1360 Corrected script for some item #2 (#6565)
Implementations + corrections of a bunch of items

Thanks to @Atemo
2022-02-02 19:58:11 +01:00
Everade
69de8663a7 Infinite Space Instance (#6554)
Implemented Infinite Space instance (Episode 16.1 version)
Implemented related shops, enchanter and misc NPCs.
Implemented item and monster from the 16.1 Update.

Thanks to @crazy-arashi, @Atemo, @Lemongrass3110, @hannicaldummy 
Credits to @crazy-arashi for the initial script release
2022-02-02 17:28:18 +01:00
Chris
65af393097 Episode 19 items (kRO 2022-01-19 patch) (#6563)
* * Adds Episode 19 items (patch 2022-01-19)
* * Adds some older missing Enchant Stone Boxes obtainable from episode 19 boxes.

Thanks to @Everade

Co-authored-by: Atemo <Atemo@users.noreply.github.com>
Co-authored-by: Lemongrass3110 <lemongrass@kstp.at>
2022-02-02 16:15:29 +01:00
Everade
2c48725761 Changes the default value of itemDB's "Override" to 100 (#6577)
Changes the Default of both "Override" settings ("NoUse" and "Trade" restrictions) from 0 to 100 and removes all "Override" settings currently set.
Minor formatting and doc fixes.
Fixes CD_In_MouthK (420003) headgear Location.

Thanks to @aleos89
2022-02-02 01:05:29 +01:00
Aleos
3deb3e2048 Updates Golden Thief Bug card behavior (#6562)
* Fixes #5918.
* Target magic skills should get blocked even when cast on self.
* Self magic skills should get blocked on all targets except self.
* Adds an IgnoreGtb skill flag to explicitly allow a skill to bypass these checks.
Thanks to @Playtester!
2022-02-01 15:53:27 -05:00
Aleos
181bc0e856 Updates to a few Sorcerer skills (#6561)
* Fixes #6387 and fixes #6425.
* Updates the damage formulas for Earth Grave, Diamond Dust, and Varetyr Spear.
* Removes the NoOverlap flag for Diamond Dust and Comet.
Thanks to @Rayvakarian, @Everade, @Balferian, @kuranwinterfall, @Badarosk0, @attackjom, and @Lemongrass3110!
2022-02-01 14:41:04 -05:00
Atemo
80307eb86e Correction of a bug with mob avail (#6578)
Introduced in a725844553
Fixed #6576

Thanks to @Vandersexxx !
2022-02-01 20:36:49 +01:00
Aleos
ad2660b037 Resolves cardfix calculation going below zero (#6560)
* Fixes an issue where cardfix bonuses cause the final result to go below zero ultimately giving unwanted effects.
Thanks to @Tokeiburu and@alycecil!
2022-02-01 14:01:58 -05:00
Kakaroto
8ebba124eb Thanatos Tower Minor Fix (#4370)
Minor fix to allow quest progression with summoner job.

Fixes #4369
2022-02-01 19:56:13 +01:00
Atemo
9df2ebf6d9 Added BaseASPD default values when reading JobDatabase (#6575) 2022-01-31 23:08:48 +01:00
idk-whoami
178e71d2f9 Added Eff_Heat status effect (#6572)
Thanks to @idk-whoami!
2022-01-28 12:31:11 -05:00
Lemongrass3110
0fc05cec70 Follow up to 6ec51e8
Thanks to @mazvi
2022-01-24 09:07:32 +01:00
Lemongrass3110
6ec51e8bf3 Added feature configs for barter shops (#6549)
Thanks to JohnnyPlayy
2022-01-24 09:03:03 +01:00
Vincent Stumpf
77bd98519d Fix packet size calculation for normal barter shops (#6548)
Fixes #6545
2022-01-24 08:54:23 +01:00
Lemongrass3110
b7e008217f Episode 19 constants 2022-01-23 20:51:46 +01:00
Atemo
d1c5dffa11 Removed duplicates combos (#6546) 2022-01-23 20:06:29 +01:00
Everade
8064766f18 Quest Database - TimeLimit cleanup (#6539)
Corrected time limits according to OngoingQuestInfoList.lub
Added many quests which rely on TimeLimit

Fixes #6501

Credits to @llchrisll for most of the translations
Thanks to @Atemo
2022-01-23 01:14:24 +01:00
idk-whoami
464fd81dd6 Corrected script for some item #1 (#6523)
Implementations + corrections of a bunch of items
2022-01-21 14:44:22 +01:00
Jittapan Pluemsumran
53a367785d Added new mapflags for disabling renewal exp & drop penalty (#6541) 2022-01-21 20:28:42 +07:00
Lemongrass3110
e40da669ed Initial release of barter shops (#6508)
Fixes #5062

Thanks to @Atemo and @aleos89

Co-authored-by: Atemo <Atemo@users.noreply.github.com>
Co-authored-by: Aleos <aleos89@users.noreply.github.com>
2022-01-21 00:59:16 +01:00
Everade
4ad35d82bd Added Frost_Crystal consumable effect (#6540)
Also corrected price for Frost_Crystal and Squad_Prize

Fixes #6536

Thanks to @Atemo
2022-01-20 17:15:53 +01:00
Lemongrass3110
9ccdfe9d17 Fixed a display problem in refine UI (#6535)
Fixes #6528

Thanks to @idk-whoami, @zdurexz @secretdataz
2022-01-20 14:51:23 +01:00
JohnnyPlayy
9d324ba077 Implementing Stylist UI Script (#6506)
* Moved the old stylists npcs to pre-renewal folder
* Implemented and updated the stylists npcs which use the new UI on renewal

Co-authored-by: Lemongrass3110 <lemongrass@kstp.at>
Co-authored-by: Everade <Everade@users.noreply.github.com>
2022-01-20 01:07:20 +01:00
Everade
08b2dfa417 Episode 14.3 - Flame Basin Daily Quest Reward (#6534)
Fixes #6533
2022-01-19 20:06:42 +01:00
Lemongrass3110
fe8e2e477d Removed travis CI (#6532) 2022-01-19 00:26:03 +01:00
Lemongrass3110
5e48bad6a9 Removed appveyor CI (#6531) 2022-01-19 00:23:22 +01:00
Lemongrass3110
534c5b2edd Add more Github Actions (#6530)
Renames "build_servers.yml" to "build_servers_gcc.yml"
Removed Pre-Renewal and Renewal from GCC compilation
Added an action for Pre-Renewal and Renewal
Added an action for VIP
Added an action for different packet versions
Added MSVS build
Changed some make server to make all
Added master building
Disabled LTO by default
Added some missing override declarations
Added Clang building
Disabled Clang 12 and 13 for the time being

Thanks to @aleos89 and @Akkarinage for their help and input.
2022-01-19 00:08:28 +01:00
Akkarinage
56a05bd458 Github Action for NPC and DB Validation (#6520)
Co-authored-by: Aleos <aleos89@users.noreply.github.com>
Co-authored-by: Lemongrass3110 <lemongrass@kstp.at>
2022-01-18 01:18:00 +01:00
Kanin Temsrisuk
8477be724f Update item_db_etc.yml (#6527)
Fixed 1 bonus script that use number const to const name
2022-01-17 16:55:58 +01:00
Everade
1b16a3074d Implemented missing beat shop items (#6526)
Fixes #6525

Thanks to @RagnaWay
2022-01-17 15:21:48 +01:00
Akkarinage
6a25a392c2 Github Build Actions for Pull Requests (#6512)
Github Action will automatically build pull requests on:

OS: Ubuntu 20.04 LTS
GCC: Versions 7, 8, 9 and 10
Both Renewal and Pre-Renewal

Co-authored-by: Aleos <aleos89@users.noreply.github.com>
Co-authored-by: Lemongrass3110 <lemongrass@kstp.at>
2022-01-16 13:07:04 +01:00
mazvi
539b822c6d Fix small typo on Jobmaster (#6522) 2022-01-16 12:41:35 +01:00
Atemo
c08c7c027d Implemented new items up to january 5, 2022 (#6502) 2022-01-14 18:41:13 +01:00
Atemo
79bee276ee Corrected callshop arguments in eden_market.txt (#6516)
* The flag for shop others than normal shop are ignored as explained in the docs

Fixed #6426
Thanks to @mazvi
2022-01-14 15:27:59 +01:00
Atemo
0bc85ae0f9 Updated pc_cant_act2 (#6513)
Fixed #5087

Thanks to @idk-whoami
2022-01-14 15:15:09 +01:00
Atemo
8b53e58d4e Script command healap (#6514)
Fixes #6492
2022-01-14 09:37:35 +00:00
Lemongrass3110
0aa5e93397 Fixed vending packets for 2020 and 2021 (#6518)
Fixes #6510

Thanks to @kaninhot004, @sonniez, @Anjuts, @RagnaWay and @pigati23
2022-01-14 04:06:45 +00:00
Lemongrass3110
71ebd93027 Fixed viewing player equipment (#6517)
Fixes #6509

Thanks to @kaninhot004, @sonniez and @Anjuts
2022-01-14 02:49:07 +00:00
Daegaladh
be4e648fad Fixed players without can_trade permission being able to sell items to NPC (#6511) 2022-01-13 15:51:23 +01:00
hannicaldummy
0d8118ceda Addition of Episode 15.2 : Infinite Space monsters (#6491)
Related to #6448

All major mobs information has been taken from divine-pride and basic stats from original mobs version.

A Aggressive mode added as we can see in https://www.youtube.com/watch?v=d4RDE4ty-VM

Co-authored-by: Everade <Everade@users.noreply.github.com>
Co-authored-by: Atemo <Atemo@users.noreply.github.com>
2022-01-12 20:25:30 +01:00
idk-whoami
abcdf3d17d Added Eff_Freezing status effect (#6493) 2022-01-12 16:35:27 +01:00
Lemongrass3110
bf3bd7fb03 Synced packs_struct.hpp with Hercules (#6495)
Credits to the respective authors
2022-01-12 14:07:02 +01:00
Lemongrass3110
4e26c1bc55 Added some missing shield view ids (#6498)
Fixes #6497

Thanks to @Relliksuriv and @laziem

Co-authored-by: Atemo <capucrath@gmail.com>
2022-01-12 12:57:36 +01:00
Lemongrass3110
eb75e6fd6f Added a check for MAX_SKILL being to small (#6504)
Fixes #6494

Thanks to @voyfmyuh and @CairoLee

Co-authored-by: Aleos <aleos89@users.noreply.github.com>
2022-01-12 11:56:45 +01:00
Lemongrass3110
916860fef8 Implemented inventory expansion (#6485)
Fixes #5218
2022-01-12 09:12:47 +01:00
Atemo
c8d1d1ea1e Converted item_combo_db txt to yml (#6162)
* Converts item_combo_db.txt into YAML.
* Includes CSV2YAML converter.

Thanks to @llchrisll and @Everade 

Co-authored-by: Lemongrass3110 <lemongrass@kstp.at>
Co-authored-by: Aleos <aleos89@users.noreply.github.com>
2022-01-11 20:33:27 +00:00
idk-whoami
0fa79030e7 Corrected script for some item (#6500) 2022-01-11 16:17:07 +01:00
Lemongrass3110
655fbbd590 Added 4th class related random options (#6499) 2022-01-11 14:49:25 +01:00
Lemongrass3110
6cf761cabc Fixed a compile error in clif_parse_stylist_buy
Fixes #6496

Thanks to @sader1992
2022-01-11 10:36:51 +01:00
Vincent Stumpf
dcd01a0fa5 Fix esma with no buffs (#6490) 2022-01-10 14:54:32 +01:00
Daegaladh
f46dccff85 Fixed Giant Fly Wing behavior (#6204)
Adds new RandomAll warpparty mode and fixes Giant Fly Wing behavior

Fixes #6160

Co-authored-by: Lemongrass3110 <lemongrass@kstp.at>
Co-authored-by: Aleos <aleos89@users.noreply.github.com>
2022-01-10 11:19:55 +01:00
Lemongrass3110
c22ef3f547 Initial release of stylist UI (#6446)
Fixes #3037

Thanks to @Balferian and @aleos89

Co-authored-by: Aleos <aleos89@users.noreply.github.com>
2022-01-09 21:56:58 +01:00
Lemongrass3110
4bc9e24b4e Initial support for 2021-11-03RagexeRE (#6431)
Fixes #6415
2022-01-09 14:03:17 +01:00
idk-whoami
a251c3743c Corrected script for some item (#6440)
Thanks to @RagnaWay and @laziem
2022-01-08 15:55:50 +01:00
The Cecil
6b7144157e View incorrect casing on Wing_Headphone(5963) (#6443)
Fixed #6442
2022-01-06 20:43:46 +01:00
Lemongrass3110
09dcb2c61f Fixed pet capture rate (#6439)
Added some missing checks for distance to the target monster and if the target monster is hiding.

Fixes #6395

Thanks to @Everade and @mrjnumber1

Co-authored-by: Aleos <aleos89@users.noreply.github.com>
2022-01-04 22:52:22 +01:00
Lemongrass3110
9a27cb6a51 Fixed pc_checkskill in pre-renewal (#6438)
Fixes #6433

Thanks to @Shaktohh and @aleos89
2022-01-04 13:23:27 +01:00
Lemongrass3110
ec7879c99a Disallow changing to 4th on pre-renewal (#6435)
Fixes #6434

Thanks to @Shaktohh
2022-01-02 22:15:40 +01:00
659 changed files with 433259 additions and 66281 deletions

View File

@@ -0,0 +1,49 @@
name: Build servers with Clang
# build_servers_clang.yml
on:
push:
branches:
- master
pull_request:
paths:
# Always trigger all Github Actions if an action or something CI related was changed
- '.github/workflows/**'
- 'tools/ci/**'
# This workflow should run when a file in a source directory has been modified.
- 'src/**'
- '3rdparty/**'
jobs:
build:
# Github Actions checks for '[ci skip]', '[skip ci]', '[no ci]', '[skip actions]', or '[actions skip]' but not a hyphenated version.
# It's a catch-all incase a Pull Request has been opened and someone is on auto-pilot.
if: "!contains(github.event.head_commit.message, 'ci-skip')"
runs-on: ${{ matrix.os }}
strategy:
matrix:
# The ubuntu-latest label currently points to ubuntu-18.04.
# Available: ubuntu-20.04, ubuntu-18.04
os: [ubuntu-18.04]
# Version list can be found on https://github.com/marketplace/actions/install-clang
clang: ['3.9', '4.0', '5.0', '6.0', '7', '8', '9', '10', '11'] #, '12', '13']
steps:
- uses: actions/checkout@v2
- name: Set up Clang
uses: egor-tensin/setup-clang@v1
with:
version: ${{ matrix.clang }}
platform: x64
- name: Command - configure
env:
CONFIGURE_FLAGS: 'CC=clang-${{ matrix.clang }} CXX=clang++-${{ matrix.clang }} --enable-buildbot=yes'
run: ./configure $CONFIGURE_FLAGS
- name: Command - make clean
run: make clean
- name: Command - make all
run: make all

View File

@@ -0,0 +1,46 @@
name: Build servers with CMake
# build_servers_cmake.yml
on:
push:
branches:
- master
pull_request:
paths:
# Always trigger all Github Actions if an action or something CI related was changed
- '.github/workflows/**'
- 'tools/ci/**'
# This workflow should run when a file in a source directory has been modified.
- 'src/**'
- '3rdparty/**'
jobs:
build:
# Github Actions checks for '[ci skip]', '[skip ci]', '[no ci]', '[skip actions]', or '[actions skip]' but not a hyphenated version.
# It's a catch-all incase a Pull Request has been opened and someone is on auto-pilot.
if: "!contains(github.event.head_commit.message, 'ci-skip')"
runs-on: ${{ matrix.os }}
strategy:
matrix:
# The ubuntu-latest label currently points to ubuntu-18.04.
# Available: ubuntu-20.04, ubuntu-18.04
os: [ubuntu-latest]
steps:
- uses: actions/checkout@v2
# Install latest CMake.
- uses: lukka/get-cmake@latest
- name: Create build directory
run: mkdir cbuild
- name: Create Unix Makefiles
run: |
cd cbuild
cmake -G "Unix Makefiles" ..
- name: Command - make
run: |
cd cbuild
make

50
.github/workflows/build_servers_gcc.yml vendored Normal file
View File

@@ -0,0 +1,50 @@
name: Build servers with GCC
# build_servers_gcc.yml
on:
push:
branches:
- master
pull_request:
paths:
# Always trigger all Github Actions if an action or something CI related was changed
- '.github/workflows/**'
- 'tools/ci/**'
# This workflow should run when a file in a source directory has been modified.
- 'src/**'
- '3rdparty/**'
jobs:
build:
# Github Actions checks for '[ci skip]', '[skip ci]', '[no ci]', '[skip actions]', or '[actions skip]' but not a hyphenated version.
# It's a catch-all incase a Pull Request has been opened and someone is on auto-pilot.
if: "!contains(github.event.head_commit.message, 'ci-skip')"
runs-on: ${{ matrix.os }}
strategy:
matrix:
# The ubuntu-latest label currently points to ubuntu-18.04.
# Available: ubuntu-20.04, ubuntu-18.04
os: [ubuntu-latest]
# Older versions of GCC are not available via unaltered aptitude repo lists.
gcc: ['7', '8', '9', '10']
steps:
- uses: actions/checkout@v2
- name: Update & Install packages
# Ubuntu runners already have most of the packages rAthena requires to build.
# https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md
run: |
sudo apt update
sudo apt install zlib1g-dev libpcre3-dev gcc-${{ matrix.gcc }} g++-${{ matrix.gcc }}
- name: Command - configure
env:
CONFIGURE_FLAGS: 'CC=gcc-${{ matrix.gcc }} CXX=g++-${{ matrix.gcc }} --enable-buildbot=yes'
run: ./configure $CONFIGURE_FLAGS
- name: Command - make clean
run: make clean
- name: Command - make all
run: make all

View File

@@ -0,0 +1,77 @@
name: Build servers in Pre-Renewal and Renewal
# build_servers_modes.yml
on:
push:
branches:
- master
pull_request:
paths:
# Always trigger all Github Actions if an action or something CI related was changed
- '.github/workflows/**'
- 'tools/ci/**'
# This workflow should run when a file in a source directory has been modified.
- 'src/**'
- '3rdparty/**'
jobs:
build:
# Github Actions checks for '[ci skip]', '[skip ci]', '[no ci]', '[skip actions]', or '[actions skip]' but not a hyphenated version.
# It's a catch-all incase a Pull Request has been opened and someone is on auto-pilot.
if: "!contains(github.event.head_commit.message, 'ci-skip')"
runs-on: ${{ matrix.os }}
strategy:
matrix:
# The ubuntu-latest label currently points to ubuntu-18.04.
# Available: ubuntu-20.04, ubuntu-18.04
os: [ubuntu-latest]
# Older versions of GCC are not available via unaltered aptitude repo lists.
gcc: ['10']
# We run build checks for both Renewal and PRE-Renewal
mode: ['PRE','RE']
steps:
- uses: actions/checkout@v2
# A simple 'yes' and 'no' can be confusing, so we use names to display in the current job then convert them for use in the compiler.
- name: Variable Parsing - PRE
if: ${{ matrix.mode == 'PRE' }}
run: |
echo "PRERE=yes" >> $GITHUB_ENV
- name: Variable Parsing - RE
if: ${{ matrix.mode == 'RE' }}
run: |
echo "PRERE=no" >> $GITHUB_ENV
- name: Update & Install packages
# Ubuntu runners already have most of the packages rAthena requires to build.
# https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md
run: |
sudo apt update
sudo apt install zlib1g-dev libpcre3-dev gcc-${{ matrix.gcc }} g++-${{ matrix.gcc }}
- name: Start MySQL
run: sudo systemctl start mysql.service
- name: Setup Database and import table data
run: ./tools/ci/sql.sh
- name: Command - configure
env:
CONFIGURE_FLAGS: 'CC=gcc-${{ matrix.gcc }} CXX=g++-${{ matrix.gcc }} --enable-prere=${{ env.PRERE }} --enable-buildbot=yes'
run: ./configure $CONFIGURE_FLAGS
- name: Command - make clean
run: make clean
- name: Command - make server
run: make server
- name: Run Once - login-server
run: ./login-server --run-once
- name: Run Once - char-server
run: ./char-server --run-once
- name: Run Once - map-server
run: ./map-server --run-once

View File

@@ -0,0 +1,43 @@
name: Build servers with MSVS
# build_servers_msbuild.yml
on:
push:
branches:
- master
pull_request:
paths:
# Always trigger all Github Actions if an action or something CI related was changed
- '.github/workflows/**'
- 'tools/ci/**'
# This workflow should run when a file in a source directory has been modified.
- 'src/**'
- '3rdparty/**'
jobs:
build:
# Github Actions checks for '[ci skip]', '[skip ci]', '[no ci]', '[skip actions]', or '[actions skip]' but not a hyphenated version.
# It's a catch-all incase a Pull Request has been opened and someone is on auto-pilot.
if: "!contains(github.event.head_commit.message, 'ci-skip')"
runs-on: ${{ matrix.os }}
strategy:
matrix:
# The windows-latest label currently points to windows-2019.
# Available: windows-2016, windows-2019 and windows-2022
os: [windows-latest]
# We run build checks for both Renewal and PRE-Renewal
mode: ['PRE', 'RE']
steps:
- uses: actions/checkout@v2
- name: Add msbuild to PATH
uses: microsoft/setup-msbuild@v1.1
- name: Build solution in Debug
if: ${{ matrix.mode == 'PRE' }}
run: msbuild rAthena.sln -t:rebuild -property:Configuration=Debug /p:DefineConstants="BUILDBOT%3BPRERE"
- name: Build solution in Debug
if: ${{ matrix.mode == 'RE' }}
run: msbuild rAthena.sln -t:rebuild -property:Configuration=Debug /p:DefineConstants="BUILDBOT"

View File

@@ -0,0 +1,70 @@
name: Build servers with different packet versions
# build_servers_packetversions.yml
on:
push:
branches:
- master
pull_request:
paths:
# Always trigger all Github Actions if an action or something CI related was changed
- '.github/workflows/**'
- 'tools/ci/**'
# This workflow should run when a file in a source directory has been modified.
- 'src/**'
- '3rdparty/**'
jobs:
build:
# Github Actions checks for '[ci skip]', '[skip ci]', '[no ci]', '[skip actions]', or '[actions skip]' but not a hyphenated version.
# It's a catch-all incase a Pull Request has been opened and someone is on auto-pilot.
if: "!contains(github.event.head_commit.message, 'ci-skip')"
runs-on: ${{ matrix.os }}
strategy:
matrix:
# The ubuntu-latest label currently points to ubuntu-18.04.
# Available: ubuntu-20.04, ubuntu-18.04
os: [ubuntu-latest]
# Older versions of GCC are not available via unaltered aptitude repo lists.
gcc: ['10']
# We run build checks for both Renewal and PRE-Renewal
mode: ['PRE','RE']
# Check build success for different packet-versions
packetver: ['20211103', '20200902', '20200401', '20180620', '20151104']
steps:
- uses: actions/checkout@v2
# A simple 'yes' and 'no' can be confusing, so we use names to display in the current job then convert them for use in the compiler.
- name: Variable Parsing - PRE
if: ${{ matrix.mode == 'PRE' }}
run: |
echo "PRERE=yes" >> $GITHUB_ENV
- name: Variable Parsing - RE
if: ${{ matrix.mode == 'RE' }}
run: |
echo "PRERE=no" >> $GITHUB_ENV
- name: Update & Install packages
# Ubuntu runners already have most of the packages rAthena requires to build.
# https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md
run: |
sudo apt update
sudo apt install zlib1g-dev libpcre3-dev gcc-${{ matrix.gcc }} g++-${{ matrix.gcc }}
- name: Start MySQL
run: sudo systemctl start mysql.service
- name: Setup Database and import table data
run: ./tools/ci/sql.sh
- name: Command - configure
env:
CONFIGURE_FLAGS: 'CC=gcc-${{ matrix.gcc }} CXX=g++-${{ matrix.gcc }} --enable-prere=${{ env.PRERE }} --enable-packetver=${{ matrix.packetver }} --enable-buildbot=yes'
run: ./configure $CONFIGURE_FLAGS
- name: Command - make clean
run: make clean
- name: Command - make all
run: make all

77
.github/workflows/build_servers_vip.yml vendored Normal file
View File

@@ -0,0 +1,77 @@
name: Build servers in VIP mode
# build_servers_vip.yml
on:
push:
branches:
- master
pull_request:
paths:
# Always trigger all Github Actions if an action or something CI related was changed
- '.github/workflows/**'
- 'tools/ci/**'
# This workflow should run when a file in a source directory has been modified.
- 'src/**'
- '3rdparty/**'
jobs:
build:
# Github Actions checks for '[ci skip]', '[skip ci]', '[no ci]', '[skip actions]', or '[actions skip]' but not a hyphenated version.
# It's a catch-all incase a Pull Request has been opened and someone is on auto-pilot.
if: "!contains(github.event.head_commit.message, 'ci-skip')"
runs-on: ${{ matrix.os }}
strategy:
matrix:
# The ubuntu-latest label currently points to ubuntu-18.04.
# Available: ubuntu-20.04, ubuntu-18.04
os: [ubuntu-latest]
# Older versions of GCC are not available via unaltered aptitude repo lists.
gcc: ['10']
# We run build checks for both Renewal and PRE-Renewal
mode: ['PRE', 'RE']
steps:
- uses: actions/checkout@v2
# A simple 'yes' and 'no' can be confusing, so we use names to display in the current job then convert them for use in the compiler.
- name: Variable Parsing - PRE
if: ${{ matrix.mode == 'PRE' }}
run: |
echo "PRERE=yes" >> $GITHUB_ENV
- name: Variable Parsing - RE
if: ${{ matrix.mode == 'RE' }}
run: |
echo "PRERE=no" >> $GITHUB_ENV
- name: Update & Install packages
# Ubuntu runners already have most of the packages rAthena requires to build.
# https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md
run: |
sudo apt update
sudo apt install zlib1g-dev libpcre3-dev gcc-${{ matrix.gcc }} g++-${{ matrix.gcc }}
- name: Start MySQL
run: sudo systemctl start mysql.service
- name: Setup Database and import table data
run: ./tools/ci/sql.sh
- name: Command - configure
env:
CONFIGURE_FLAGS: 'CC=gcc-${{ matrix.gcc }} CXX=g++-${{ matrix.gcc }} --enable-prere=${{ env.PRERE }} --enable-buildbot=yes --enable-vip=yes'
run: ./configure $CONFIGURE_FLAGS
- name: Command - make clean
run: make clean
- name: Command - make server
run: make server
- name: Run Once - login-server
run: ./login-server --run-once
- name: Run Once - char-server
run: ./char-server --run-once
- name: Run Once - map-server
run: ./map-server --run-once

82
.github/workflows/npc_db_validation.yml vendored Normal file
View File

@@ -0,0 +1,82 @@
name: Validate NPC Scripts and DB Changes
# npc_db_validation.yml
# For NPC and DB validation we only need two builds: one of Renewal and one for Pre-Renewal checks.
# NPC scripts and database files are not platform dependent, so we can achieve this validation using only a simple linux setup.
on:
push:
branches:
- master
pull_request:
paths:
# Always trigger all Github Actions if an action or something CI related was changed
- '.github/workflows/**'
- 'tools/ci/**'
# This workflow should run when a file in either the db/ or npc/ directory has been modified.
- 'db/**'
- 'npc/**'
jobs:
build:
# Github Actions checks for '[ci skip]', '[skip ci]', '[no ci]', '[skip actions]', or '[actions skip]' but not a hyphenated version.
# It's a catch-all incase a Pull Request has been opened and someone is on auto-pilot.
if: "!contains(github.event.head_commit.message, 'ci-skip')"
runs-on: ${{ matrix.os }}
strategy:
matrix:
# The ubuntu-latest label currently points to ubuntu-18.04.
# Available: ubuntu-20.04, ubuntu-18.04
os: [ubuntu-latest]
# Only a single version of GCC is required for validating NPC scripts and database changes.
gcc: ['10']
# We run build checks for both Renewal and PRE-Renewal
mode: ['PRE', 'RE']
steps:
- uses: actions/checkout@v2
# A simple 'yes' and 'no' can be confusing, so we use names to display in the current job then convert them for use in the compiler.
- name: Variable Parsing - PRE
if: ${{ matrix.mode == 'PRE' }}
run: |
echo "PRERE=yes" >> $GITHUB_ENV
- name: Variable Parsing - RE
if: ${{ matrix.mode == 'RE' }}
run: |
echo "PRERE=no" >> $GITHUB_ENV
- name: Update & Install packages
# Ubuntu runners already have most of the packages rAthena requires to build.
# https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-Readme.md
run: |
sudo apt update
sudo apt install zlib1g-dev libpcre3-dev gcc-${{ matrix.gcc }} g++-${{ matrix.gcc }}
- name: Start MySQL
run: sudo systemctl start mysql.service
- name: Setup Database and import table data
run: ./tools/ci/sql.sh
- name: Command - configure
env:
CONFIGURE_FLAGS: 'CC=gcc-${{ matrix.gcc }} CXX=g++-${{ matrix.gcc }} --enable-prere=${{ env.PRERE }} --enable-buildbot=yes'
run: ./configure $CONFIGURE_FLAGS
# npc.sh enables all NPC scripts in the custom and test folders.
- name: Enable All NPCs for Testing
run: ./tools/ci/npc.sh
- name: Command - make clean
run: make clean
# Create import directories
- name: Command - make import
run: make import
- name: Command - make map
run: make map
- name: Run Once - map-server
run: ./map-server --run-once

3
.gitignore vendored
View File

@@ -50,6 +50,9 @@ Thumbs.db
/3rdparty/libconfig/*.o
/3rdparty/libconfig/obj
# /3rdparty/rapidyaml/
/3rdparty/rapidyaml/Makefile
# /3rdparty/yaml-cpp/
/3rdparty/yaml-cpp/Makefile

View File

@@ -1,198 +0,0 @@
language: cpp
dist: trusty
sudo: false
matrix:
include:
# First check if all our options are good
# Renewal without VIP
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-5
env:
- MATRIX_EVAL="CC=gcc-5 && CXX=g++-5"
- CONFIGURE_FLAGS="--enable-prere=no --enable-vip=no --enable-buildbot=yes"
# Renewal with VIP
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-5
env:
- MATRIX_EVAL="CC=gcc-5 && CXX=g++-5"
- CONFIGURE_FLAGS="--enable-prere=yes --enable-vip=no --enable-buildbot=yes"
# Pre-Renewal without VIP
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-5
env:
- MATRIX_EVAL="CC=gcc-5 && CXX=g++-5"
- CONFIGURE_FLAGS="--enable-prere=no --enable-vip=yes --enable-buildbot=yes"
# Pre-Renewal with VIP
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-5
env:
- MATRIX_EVAL="CC=gcc-5 && CXX=g++-5"
- CONFIGURE_FLAGS="--enable-prere=yes --enable-vip=yes --enable-buildbot=yes"
# After that check all different compilers and compiler versions
# GCC
# Version 6
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-6
env:
- MATRIX_EVAL="CC=gcc-6 && CXX=g++-6"
- CONFIGURE_FLAGS="--enable-prere=no --enable-vip=no --enable-buildbot=yes"
# Version 7
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-7
env:
- MATRIX_EVAL="CC=gcc-7 && CXX=g++-7"
- CONFIGURE_FLAGS="--enable-prere=no --enable-vip=no --enable-buildbot=yes"
# Version 8
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-8
env:
- MATRIX_EVAL="CC=gcc-8 && CXX=g++-8"
- CONFIGURE_FLAGS="--enable-prere=no --enable-vip=no --enable-buildbot=yes"
# Clang
# Version 3.9
- os: linux
addons:
apt:
sources:
- llvm-toolchain-trusty-3.9
packages:
- clang-3.9
env:
- MATRIX_EVAL="CC=clang-3.9 && CXX=clang++-3.9"
- CONFIGURE_FLAGS="--enable-prere=no --enable-vip=no --enable-buildbot=yes"
# Version 4
- os: linux
addons:
apt:
sources:
- llvm-toolchain-trusty-4.0
packages:
- clang-4.0
env:
- MATRIX_EVAL="CC=clang-4.0 && CXX=clang++-4.0"
- CONFIGURE_FLAGS="--enable-prere=no --enable-vip=no --enable-buildbot=yes"
# Version 5
- os: linux
addons:
apt:
sources:
- llvm-toolchain-trusty-5.0
packages:
- clang-5.0
env:
- MATRIX_EVAL="CC=clang-5.0 && CXX=clang++-5.0"
- CONFIGURE_FLAGS="--enable-prere=no --enable-vip=no --enable-buildbot=yes"
# LLVM on OSX
# - os: osx
# osx_image: xcode9.2
# install:
# - brew update
# - brew install mysql
# - brew tap homebrew/services
# - brew services start mysql
## MySQL takes a while to start...
# - brew services list
# - launchctl list | grep mysql
# before_install: false
# env:
# - CONFIGURE_FLAGS="--enable-prere=no --enable-vip=no --enable-buildbot=yes --enable-lto=no"
# script:
# - ./configure $CONFIGURE_FLAGS
## MacOS default MySQL configuration does not like our card seller(only full group by)
# - ./tools/ci/npc.sh
# - make clean
# - make all
# - ./login-server --run-once
# - ./char-server --run-once
# - ./map-server --run-once
# CMake
- os: linux
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-5
env:
- MATRIX_EVAL="CC=gcc-5 && CXX=g++-5"
script:
- mkdir cbuild
- cd cbuild
- cmake -G "Unix Makefiles" ..
- make || travis_terminate 1
# MariaDB
- os: linux
addons:
mariadb: '10.0'
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-5
- libmariadbclient-dev
env:
- MATRIX_EVAL="CC=gcc-5 && CXX=g++-5"
- CONFIGURE_FLAGS="--enable-prere=no --enable-vip=no --enable-buildbot=yes"
before_install:
- eval "${MATRIX_EVAL}"
before_script:
- uname -a
- ./tools/ci/sql.sh
script:
- ./configure $CONFIGURE_FLAGS || travis_terminate 1
- ./tools/ci/npc.sh
- make clean || travis_terminate 1
- make all || travis_terminate 1
- ./login-server --run-once
- ./char-server --run-once
- ./map-server --run-once
env:
global:
- DB_ROOT="root"
- DB_HOST="127.0.0.1"
- DB_NAME="ragnarok"
- DB_USER="ragnarok"
- DB_PASS="ragnarok"
notifications:
email: false

View File

@@ -50,5 +50,6 @@ endmacro( CONFIGURE_WITH_LOCAL_OR_SYSTEM )
add_subdirectory( libconfig )
add_subdirectory( mysql )
add_subdirectory( pcre )
add_subdirectory( zlib )
add_subdirectory( rapidyaml )
add_subdirectory( yaml-cpp )
add_subdirectory( zlib )

291
3rdparty/rapidyaml/.gitchangelog.rc vendored Normal file
View File

@@ -0,0 +1,291 @@
# -*- coding: utf-8; mode: python -*-
##
## https://pypi.org/project/gitchangelog/
##
## Format
##
## ACTION: [AUDIENCE:] COMMIT_MSG [!TAG ...]
##
## Description
##
## ACTION is one of 'chg', 'fix', 'new'
##
## Is WHAT the change is about.
##
## 'chg' is for refactor, small improvement, cosmetic changes...
## 'fix' is for bug fixes
## 'new' is for new features, big improvement
##
## AUDIENCE is optional and one of 'dev', 'usr', 'pkg', 'test', 'doc'
##
## Is WHO is concerned by the change.
##
## 'dev' is for developpers (API changes, refactors...)
## 'usr' is for final users (UI changes)
## 'pkg' is for packagers (packaging changes)
## 'test' is for testers (test only related changes)
## 'doc' is for doc guys (doc only changes)
##
## COMMIT_MSG is ... well ... the commit message itself.
##
## TAGs are additionnal adjective as 'refactor' 'minor' 'cosmetic'
##
## They are preceded with a '!' or a '@' (prefer the former, as the
## latter is wrongly interpreted in github.) Commonly used tags are:
##
## 'refactor' is obviously for refactoring code only
## 'minor' is for a very meaningless change (a typo, adding a comment)
## 'cosmetic' is for cosmetic driven change (re-indentation, 80-col...)
## 'wip' is for partial functionality but complete subfunctionality.
##
## Example:
##
## new: usr: support of bazaar implemented
## chg: re-indentend some lines !cosmetic
## new: dev: updated code to be compatible with last version of killer lib.
## fix: pkg: updated year of licence coverage.
## new: test: added a bunch of test around user usability of feature X.
## fix: typo in spelling my name in comment. !minor
##
## Please note that multi-line commit message are supported, and only the
## first line will be considered as the "summary" of the commit message. So
## tags, and other rules only applies to the summary. The body of the commit
## message will be displayed in the changelog without reformatting.
##
## ``ignore_regexps`` is a line of regexps
##
## Any commit having its full commit message matching any regexp listed here
## will be ignored and won't be reported in the changelog.
##
ignore_regexps = [
r'@minor', r'!minor',
r'@cosmetic', r'!cosmetic',
r'@refactor', r'!refactor',
r'@wip', r'!wip',
r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*[p|P]kg:',
r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*[d|D]ev:',
r'^(.{3,3}\s*:)?\s*[fF]irst commit.?\s*$',
r'^$', ## ignore commits with empty messages
]
## ``section_regexps`` is a list of 2-tuples associating a string label and a
## list of regexp
##
## Commit messages will be classified in sections thanks to this. Section
## titles are the label, and a commit is classified under this section if any
## of the regexps associated is matching.
##
## Please note that ``section_regexps`` will only classify commits and won't
## make any changes to the contents. So you'll probably want to go check
## ``subject_process`` (or ``body_process``) to do some changes to the subject,
## whenever you are tweaking this variable.
##
section_regexps = [
('New', [
r'^[nN]ew\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$',
]),
('Changes', [
r'^[cC]hg\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$',
]),
('Fix', [
r'^[fF]ix\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n]*)$',
]),
('Other', None ## Match all lines
),
]
## ``body_process`` is a callable
##
## This callable will be given the original body and result will
## be used in the changelog.
##
## Available constructs are:
##
## - any python callable that take one txt argument and return txt argument.
##
## - ReSub(pattern, replacement): will apply regexp substitution.
##
## - Indent(chars=" "): will indent the text with the prefix
## Please remember that template engines gets also to modify the text and
## will usually indent themselves the text if needed.
##
## - Wrap(regexp=r"\n\n"): re-wrap text in separate paragraph to fill 80-Columns
##
## - noop: do nothing
##
## - ucfirst: ensure the first letter is uppercase.
## (usually used in the ``subject_process`` pipeline)
##
## - final_dot: ensure text finishes with a dot
## (usually used in the ``subject_process`` pipeline)
##
## - strip: remove any spaces before or after the content of the string
##
## - SetIfEmpty(msg="No commit message."): will set the text to
## whatever given ``msg`` if the current text is empty.
##
## Additionally, you can `pipe` the provided filters, for instance:
#body_process = Wrap(regexp=r'\n(?=\w+\s*:)') | Indent(chars=" ")
#body_process = Wrap(regexp=r'\n(?=\w+\s*:)')
#body_process = noop
body_process = ReSub(r'((^|\n)[A-Z]\w+(-\w+)*: .*(\n\s+.*)*)+$', r'') | strip
## ``subject_process`` is a callable
##
## This callable will be given the original subject and result will
## be used in the changelog.
##
## Available constructs are those listed in ``body_process`` doc.
subject_process = (strip |
ReSub(r'^([cC]hg|[fF]ix|[nN]ew)\s*:\s*((dev|use?r|pkg|test|doc)\s*:\s*)?([^\n@]*)(@[a-z]+\s+)*$', r'\4') |
SetIfEmpty("No commit message.") | ucfirst | final_dot)
## ``tag_filter_regexp`` is a regexp
##
## Tags that will be used for the changelog must match this regexp.
##
tag_filter_regexp = r'^[0-9]+\.[0-9]+(\.[0-9]+)?$'
## ``unreleased_version_label`` is a string or a callable that outputs a string
##
## This label will be used as the changelog Title of the last set of changes
## between last valid tag and HEAD if any.
unreleased_version_label = "(unreleased)"
## ``output_engine`` is a callable
##
## This will change the output format of the generated changelog file
##
## Available choices are:
##
## - rest_py
##
## Legacy pure python engine, outputs ReSTructured text.
## This is the default.
##
## - mustache(<template_name>)
##
## Template name could be any of the available templates in
## ``templates/mustache/*.tpl``.
## Requires python package ``pystache``.
## Examples:
## - mustache("markdown")
## - mustache("restructuredtext")
##
## - makotemplate(<template_name>)
##
## Template name could be any of the available templates in
## ``templates/mako/*.tpl``.
## Requires python package ``mako``.
## Examples:
## - makotemplate("restructuredtext")
##
#output_engine = rest_py
#output_engine = mustache("restructuredtext")
output_engine = mustache("markdown")
#output_engine = makotemplate("restructuredtext")
## ``include_merge`` is a boolean
##
## This option tells git-log whether to include merge commits in the log.
## The default is to include them.
include_merge = True
## ``log_encoding`` is a string identifier
##
## This option tells gitchangelog what encoding is outputed by ``git log``.
## The default is to be clever about it: it checks ``git config`` for
## ``i18n.logOutputEncoding``, and if not found will default to git's own
## default: ``utf-8``.
#log_encoding = 'utf-8'
## ``publish`` is a callable
##
## Sets what ``gitchangelog`` should do with the output generated by
## the output engine. ``publish`` is a callable taking one argument
## that is an interator on lines from the output engine.
##
## Some helper callable are provided:
##
## Available choices are:
##
## - stdout
##
## Outputs directly to standard output
## (This is the default)
##
## - FileInsertAtFirstRegexMatch(file, pattern, idx=lamda m: m.start())
##
## Creates a callable that will parse given file for the given
## regex pattern and will insert the output in the file.
## ``idx`` is a callable that receive the matching object and
## must return a integer index point where to insert the
## the output in the file. Default is to return the position of
## the start of the matched string.
##
## - FileRegexSubst(file, pattern, replace, flags)
##
## Apply a replace inplace in the given file. Your regex pattern must
## take care of everything and might be more complex. Check the README
## for a complete copy-pastable example.
##
# publish = FileInsertIntoFirstRegexMatch(
# "CHANGELOG.rst",
# r'/(?P<rev>[0-9]+\.[0-9]+(\.[0-9]+)?)\s+\([0-9]+-[0-9]{2}-[0-9]{2}\)\n--+\n/',
# idx=lambda m: m.start(1)
# )
#publish = stdout
## ``revs`` is a list of callable or a list of string
##
## callable will be called to resolve as strings and allow dynamical
## computation of these. The result will be used as revisions for
## gitchangelog (as if directly stated on the command line). This allows
## to filter exaclty which commits will be read by gitchangelog.
##
## To get a full documentation on the format of these strings, please
## refer to the ``git rev-list`` arguments. There are many examples.
##
## Using callables is especially useful, for instance, if you
## are using gitchangelog to generate incrementally your changelog.
##
## Some helpers are provided, you can use them::
##
## - FileFirstRegexMatch(file, pattern): will return a callable that will
## return the first string match for the given pattern in the given file.
## If you use named sub-patterns in your regex pattern, it'll output only
## the string matching the regex pattern named "rev".
##
## - Caret(rev): will return the rev prefixed by a "^", which is a
## way to remove the given revision and all its ancestor.
##
## Please note that if you provide a rev-list on the command line, it'll
## replace this value (which will then be ignored).
##
## If empty, then ``gitchangelog`` will act as it had to generate a full
## changelog.
##
## The default is to use all commits to make the changelog.
#revs = ["^1.0.3", ]
#revs = [
# Caret(
# FileFirstRegexMatch(
# "CHANGELOG.rst",
# r"(?P<rev>[0-9]+\.[0-9]+(\.[0-9]+)?)\s+\([0-9]+-[0-9]{2}-[0-9]{2}\)\n--+\n")),
# "HEAD"
#]
revs = []

50
3rdparty/rapidyaml/.gitignore vendored Normal file
View File

@@ -0,0 +1,50 @@
# text editor files
*.bck
\#*
*~
.cquery_cached_index/
.clangd/
.ccls-cache/
.cache/
__pycache__/
# gdb files
.gdbinit
setup.gdb
# valgrind files
callgrind*
vgcore*
# Visual Studio files
.vs/
.vscode/
# QtCreator files
CMakeLists.txt.user*
# Eclipse
.project
.cproject
/.settings/
# KDevelop files
*.kdev4
# build files
build/
install/
.python-version
compile_commands.json
# test files
/Testing/
# python packaging
.eggs/
dist/
rapidyaml.egg-info/
# continuous integration files
.ci/.vagrant
# amalgamation files
src/c4/c4core_all.hpp
src_singleheader/

3
3rdparty/rapidyaml/.gitmodules vendored Normal file
View File

@@ -0,0 +1,3 @@
[submodule "extern/c4core"]
path = ext/c4core
url = https://github.com/biojppm/c4core

2
3rdparty/rapidyaml/.lgtm.yml vendored Normal file
View File

@@ -0,0 +1,2 @@
queries:
- exclude: cpp/unsigned-comparison-zero

100
3rdparty/rapidyaml/CMakeLists.txt vendored Normal file
View File

@@ -0,0 +1,100 @@
cmake_minimum_required(VERSION 3.12)
include(./ext/c4core/cmake/c4Project.cmake)
project(ryml
DESCRIPTION "Rapid YAML parsing and emitting"
HOMEPAGE_URL "https://github.com/biojppm/rapidyaml"
LANGUAGES CXX)
include(./compat.cmake)
c4_project(VERSION 0.4.0 STANDALONE
AUTHOR "Joao Paulo Magalhaes <dev@jpmag.me>")
#-------------------------------------------------------
option(RYML_WITH_TAB_TOKENS "Enable parsing of tabs after ':' and '-'. This is costly and disabled by default." OFF)
option(RYML_DEFAULT_CALLBACKS "Enable ryml's default implementation of callbacks: allocate(), free(), error()" ON)
option(RYML_BUILD_TOOLS "build tools" OFF)
option(RYML_BUILD_API "Enable API generation (python, etc)" OFF)
option(RYML_DBG "Enable (very verbose) ryml debug prints." OFF)
#-------------------------------------------------------
c4_require_subproject(c4core INCORPORATE
SUBDIRECTORY ${RYML_EXT_DIR}/c4core)
c4_add_library(ryml
SOURCES
ryml.hpp
ryml_std.hpp
c4/yml/detail/checks.hpp
c4/yml/detail/parser_dbg.hpp
c4/yml/detail/print.hpp
c4/yml/detail/stack.hpp
c4/yml/common.hpp
c4/yml/common.cpp
c4/yml/emit.def.hpp
c4/yml/emit.hpp
c4/yml/export.hpp
c4/yml/node.hpp
c4/yml/node.cpp
c4/yml/parse.hpp
c4/yml/parse.cpp
c4/yml/preprocess.hpp
c4/yml/preprocess.cpp
c4/yml/std/map.hpp
c4/yml/std/std.hpp
c4/yml/std/string.hpp
c4/yml/std/vector.hpp
c4/yml/tree.hpp
c4/yml/tree.cpp
c4/yml/writer.hpp
c4/yml/yml.hpp
ryml.natvis
SOURCE_ROOT ${RYML_SRC_DIR}
INC_DIRS
$<BUILD_INTERFACE:${RYML_SRC_DIR}>
$<INSTALL_INTERFACE:include>
LIBS c4core
INCORPORATE c4core
)
if(RYML_WITH_TAB_TOKENS)
target_compile_definitions(ryml PUBLIC RYML_WITH_TAB_TOKENS)
endif()
if(NOT RYML_DEFAULT_CALLBACKS)
target_compile_definitions(ryml PRIVATE RYML_NO_DEFAULT_CALLBACKS)
endif()
if(RYML_DBG)
target_compile_definitions(ryml PRIVATE RYML_DBG)
endif()
#-------------------------------------------------------
c4_install_target(ryml)
c4_install_exports(DEPENDENCIES c4core)
c4_pack_project()
#-------------------------------------------------------
# developer targets
# extern libraries, used only for testing/benchmarking
if(RYML_BUILD_TESTS OR RYML_BUILD_BENCHMARKS OR RYML_BUILD_TOOLS)
include(ext/testbm.cmake)
endif()
if(RYML_BUILD_TOOLS)
add_subdirectory(tools)
endif()
c4_add_dev_targets()
add_custom_target(ryml-uninstall
"${CMAKE_COMMAND}" -P "${PROJECT_SOURCE_DIR}/cmake/uninstall.cmake"
)

18
3rdparty/rapidyaml/CONTRIBUTING.md vendored Normal file
View File

@@ -0,0 +1,18 @@
# Contributing
Thanks for your contribution!
* Make sure to clone the project with `git clone --recursive` so that
the submodules are initialized correctly.
* To enable both tests and benchmarks, configure ryml with `-DRYML_DEV=ON`
when calling cmake. To enable only tests, use `-DRYML_BUILD_TESTS=ON`; to
enable only benchmarks use `-DRYML_BUILD_BENCHMARKS=ON`. All these flags
are disabled by default.
* Code style for pull requests should respect the existing code style:
```c++
if(foo) // no space before parens
{ // curly brackets on next line
// no tabs; indent with 4 spaces
bar();
}
```

20
3rdparty/rapidyaml/LICENSE.txt vendored Normal file
View File

@@ -0,0 +1,20 @@
Copyright (c) 2018, Joao Paulo Magalhaes <dev@jpmag.me>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

3
3rdparty/rapidyaml/MANIFEST.in vendored Normal file
View File

@@ -0,0 +1,3 @@
# MANIFEST.in must be in root directory.
# See https://github.com/pypa/setuptools/issues/2615
graft ext

44
3rdparty/rapidyaml/Makefile.in vendored Normal file
View File

@@ -0,0 +1,44 @@
RAPIDYAML_OBJ = $(shell find * -type f -name "*.cpp" | sed -e "s/\.cpp/\.o/g")
RAPIDYAML_DIR_OBJ = $(RAPIDYAML_OBJ:%=obj/%)
RAPIDYAML_H = $(shell find * -type f -name "*.h*")
RAPIDYAML_AR = obj/ryml.a
CXXFLAG =-std=c++11
INC=-Isrc -Iext/c4core/src
@SET_MAKE@
#####################################################################
.PHONY : all clean help rapidyaml
all: rapidyaml
clean:
@echo " CLEAN rapidyaml"
@rm -rf obj *.o
help:
@echo "possible targets are 'all' 'clean' 'help'"
@echo "'rapidyaml' - build $(RAPIDYAML_AR)
@echo "'all' - builds $(RAPIDYAML_DIR_OBJ)"
@echo "'clean' - deletes $(RAPIDYAML_DIR_OBJ)"
@echo "'help' - outputs this message"
#####################################################################
obj:
@echo " MKDIR obj/src/c4/yml"
@echo " MKDIR obj/ext/c4core/src/c4"
@mkdir -p obj/src/c4/yml
@mkdir -p obj/ext/c4core/src/c4
obj/%.o: %.cpp $(RAPIDYAML_H)
@echo " CXX $<"
@@CXX@ $(CXXFLAG) @CFLAGS_AR@ @CPPFLAGS@ -g $(INC) -c $(OUTPUT_OPTION) $<
rapidyaml: obj $(RAPIDYAML_DIR_OBJ) $(RAPIDYAML_AR)
$(RAPIDYAML_AR): $(RAPIDYAML_DIR_OBJ)
@echo " AR $@"
@@AR@ rcs obj/ryml.a $(RAPIDYAML_DIR_OBJ)

1049
3rdparty/rapidyaml/README.md vendored Normal file

File diff suppressed because it is too large Load Diff

18
3rdparty/rapidyaml/ROADMAP.md vendored Normal file
View File

@@ -0,0 +1,18 @@
# Roadmap
Roughly in order of priority:
* Cleanup:
* Review & cleanup API surface.
* Turn calls to `C4_ASSERT()` into calls to `RYML_ASSERT()`
* Add emit formatting controls:
* add single-line flow formatter
* add multi-line flow formatters
* indenting
* non indenting
* keep current block formatter
* add customizable linebreak limits (number of columns) to every formatter
* add per node format flags
* (lesser priority) add auto formatter using reasonable heuristics to
switch between other existing formatters
* Investigate possibility of comment-preserving roundtrips

View File

@@ -0,0 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<Project>
<ProjectOutputs />
<ContentFiles />
<SatelliteDlls />
<NonRecipeFileRefs />
</Project>

294
3rdparty/rapidyaml/c4core.vcxproj vendored Normal file
View File

@@ -0,0 +1,294 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="16.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|Win32">
<Configuration>Debug</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Debug|x64">
<Configuration>Debug</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|Win32">
<Configuration>Release</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|x64">
<Configuration>Release</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{B795DCB1-2FEA-3BDC-A05C-33F5BF08CF31}</ProjectGuid>
<WindowsTargetPlatformVersion>10.0</WindowsTargetPlatformVersion>
<Keyword>Win32Proj</Keyword>
<ProjectName>c4core</ProjectName>
<VCProjectUpgraderObjectName>NoUpgrade</VCProjectUpgraderObjectName>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
<ConfigurationType>StaticLibrary</ConfigurationType>
<CharacterSet>MultiByte</CharacterSet>
<PlatformToolset>$(DefaultPlatformToolset)</PlatformToolset>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
<ConfigurationType>StaticLibrary</ConfigurationType>
<CharacterSet>MultiByte</CharacterSet>
<PlatformToolset>$(DefaultPlatformToolset)</PlatformToolset>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
<ConfigurationType>StaticLibrary</ConfigurationType>
<CharacterSet>MultiByte</CharacterSet>
<PlatformToolset>$(DefaultPlatformToolset)</PlatformToolset>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
<ConfigurationType>StaticLibrary</ConfigurationType>
<CharacterSet>MultiByte</CharacterSet>
<PlatformToolset>$(DefaultPlatformToolset)</PlatformToolset>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup>
<_ProjectFileVersion>10.0.20506.1</_ProjectFileVersion>
<OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(SolutionDir).vs\build\</OutDir>
<IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(SolutionDir).vs\build\$(ProjectName)\$(Platform)\$(Configuration)\</IntDir>
<IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">$(SolutionDir).vs\build\$(ProjectName)\$(Platform)\$(Configuration)\</IntDir>
<TargetName Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(ProjectName)</TargetName>
<TargetName Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">$(ProjectName)</TargetName>
<TargetExt Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">.lib</TargetExt>
<TargetExt Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">.lib</TargetExt>
<OutDir Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(SolutionDir).vs\build\</OutDir>
<IntDir Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(SolutionDir).vs\build\$(ProjectName)\$(Platform)\$(Configuration)\</IntDir>
<IntDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">$(SolutionDir).vs\build\$(ProjectName)\$(Platform)\$(Configuration)\</IntDir>
<TargetName Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(ProjectName)</TargetName>
<TargetName Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">$(ProjectName)</TargetName>
<TargetExt Condition="'$(Configuration)|$(Platform)'=='Release|x64'">.lib</TargetExt>
<TargetExt Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">.lib</TargetExt>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<OutDir>$(SolutionDir).vs\build\</OutDir>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<OutDir>$(SolutionDir).vs\build\</OutDir>
</PropertyGroup>
<PropertyGroup Label="Vcpkg">
<VcpkgEnabled>false</VcpkgEnabled>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<ClCompile>
<AdditionalIncludeDirectories>ext\c4core\src;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<AssemblerListingLocation>$(IntDir)</AssemblerListingLocation>
<BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks>
<CompileAs>CompileAsCpp</CompileAs>
<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
<ExceptionHandling>Sync</ExceptionHandling>
<InlineFunctionExpansion>Disabled</InlineFunctionExpansion>
<Optimization>Disabled</Optimization>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>
<RuntimeTypeInfo>true</RuntimeTypeInfo>
<UseFullPaths>false</UseFullPaths>
<WarningLevel>Level3</WarningLevel>
<PreprocessorDefinitions>WIN32;_WINDOWS;CMAKE_INTDIR="Debug";%(PreprocessorDefinitions)</PreprocessorDefinitions>
<ObjectFileName>$(IntDir)</ObjectFileName>
<LanguageStandard>stdcpp17</LanguageStandard>
</ClCompile>
<ResourceCompile>
<PreprocessorDefinitions>WIN32;_DEBUG;_WINDOWS;CMAKE_INTDIR=\"Debug\";%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>ext\c4core\src;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
</ResourceCompile>
<Midl>
<AdditionalIncludeDirectories>ext\c4core\src;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<OutputDirectory>$(ProjectDir)/$(IntDir)</OutputDirectory>
<HeaderFileName>%(Filename).h</HeaderFileName>
<TypeLibraryName>%(Filename).tlb</TypeLibraryName>
<InterfaceIdentifierFileName>%(Filename)_i.c</InterfaceIdentifierFileName>
<ProxyFileName>%(Filename)_p.c</ProxyFileName>
</Midl>
<Lib>
<AdditionalOptions>%(AdditionalOptions) /machine:x64</AdditionalOptions>
</Lib>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
<AdditionalIncludeDirectories>ext\c4core\src;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<AssemblerListingLocation>$(IntDir)</AssemblerListingLocation>
<BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks>
<CompileAs>CompileAsCpp</CompileAs>
<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
<ExceptionHandling>Sync</ExceptionHandling>
<InlineFunctionExpansion>Disabled</InlineFunctionExpansion>
<Optimization>Disabled</Optimization>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>
<RuntimeTypeInfo>true</RuntimeTypeInfo>
<UseFullPaths>false</UseFullPaths>
<WarningLevel>Level3</WarningLevel>
<PreprocessorDefinitions>WIN32;_WINDOWS;CMAKE_INTDIR="Debug";%(PreprocessorDefinitions)</PreprocessorDefinitions>
<ObjectFileName>$(IntDir)</ObjectFileName>
<LanguageStandard>stdcpp17</LanguageStandard>
</ClCompile>
<ResourceCompile>
<PreprocessorDefinitions>WIN32;_DEBUG;_WINDOWS;CMAKE_INTDIR=\"Debug\";%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>ext\c4core\src;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
</ResourceCompile>
<Midl>
<AdditionalIncludeDirectories>ext\c4core\src;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<OutputDirectory>$(ProjectDir)/$(IntDir)</OutputDirectory>
<HeaderFileName>%(Filename).h</HeaderFileName>
<TypeLibraryName>%(Filename).tlb</TypeLibraryName>
<InterfaceIdentifierFileName>%(Filename)_i.c</InterfaceIdentifierFileName>
<ProxyFileName>%(Filename)_p.c</ProxyFileName>
</Midl>
<Lib>
<AdditionalOptions>%(AdditionalOptions) /machine:x86</AdditionalOptions>
</Lib>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<ClCompile>
<AdditionalIncludeDirectories>ext\c4core\src;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<AssemblerListingLocation>$(IntDir)</AssemblerListingLocation>
<CompileAs>CompileAsCpp</CompileAs>
<ExceptionHandling>Sync</ExceptionHandling>
<InlineFunctionExpansion>AnySuitable</InlineFunctionExpansion>
<Optimization>MaxSpeed</Optimization>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>
<RuntimeTypeInfo>true</RuntimeTypeInfo>
<UseFullPaths>false</UseFullPaths>
<WarningLevel>Level3</WarningLevel>
<PreprocessorDefinitions>WIN32;_WINDOWS;NDEBUG;CMAKE_INTDIR="Release";%(PreprocessorDefinitions)</PreprocessorDefinitions>
<ObjectFileName>$(IntDir)</ObjectFileName>
<DebugInformationFormat>
</DebugInformationFormat>
<LanguageStandard>stdcpp17</LanguageStandard>
</ClCompile>
<ResourceCompile>
<PreprocessorDefinitions>WIN32;_WINDOWS;NDEBUG;CMAKE_INTDIR=\"Release\";%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>ext\c4core\src;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
</ResourceCompile>
<Midl>
<AdditionalIncludeDirectories>ext\c4core\src;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<OutputDirectory>$(ProjectDir)/$(IntDir)</OutputDirectory>
<HeaderFileName>%(Filename).h</HeaderFileName>
<TypeLibraryName>%(Filename).tlb</TypeLibraryName>
<InterfaceIdentifierFileName>%(Filename)_i.c</InterfaceIdentifierFileName>
<ProxyFileName>%(Filename)_p.c</ProxyFileName>
</Midl>
<Lib>
<AdditionalOptions>%(AdditionalOptions) /machine:x64</AdditionalOptions>
</Lib>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<ClCompile>
<AdditionalIncludeDirectories>ext\c4core\src;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<AssemblerListingLocation>$(IntDir)</AssemblerListingLocation>
<CompileAs>CompileAsCpp</CompileAs>
<ExceptionHandling>Sync</ExceptionHandling>
<InlineFunctionExpansion>AnySuitable</InlineFunctionExpansion>
<Optimization>MaxSpeed</Optimization>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>
<RuntimeTypeInfo>true</RuntimeTypeInfo>
<UseFullPaths>false</UseFullPaths>
<WarningLevel>Level3</WarningLevel>
<PreprocessorDefinitions>WIN32;_WINDOWS;NDEBUG;CMAKE_INTDIR="Release";%(PreprocessorDefinitions)</PreprocessorDefinitions>
<ObjectFileName>$(IntDir)</ObjectFileName>
<DebugInformationFormat>
</DebugInformationFormat>
<LanguageStandard>stdcpp17</LanguageStandard>
</ClCompile>
<ResourceCompile>
<PreprocessorDefinitions>WIN32;_WINDOWS;NDEBUG;CMAKE_INTDIR=\"Release\";%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>ext\c4core\src;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
</ResourceCompile>
<Midl>
<AdditionalIncludeDirectories>ext\c4core\src;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<OutputDirectory>$(ProjectDir)/$(IntDir)</OutputDirectory>
<HeaderFileName>%(Filename).h</HeaderFileName>
<TypeLibraryName>%(Filename).tlb</TypeLibraryName>
<InterfaceIdentifierFileName>%(Filename)_i.c</InterfaceIdentifierFileName>
<ProxyFileName>%(Filename)_p.c</ProxyFileName>
</Midl>
<Lib>
<AdditionalOptions>%(AdditionalOptions) /machine:x86</AdditionalOptions>
</Lib>
</ItemDefinitionGroup>
<ItemGroup>
<ClInclude Include="ext\c4core\src\c4\allocator.hpp" />
<ClInclude Include="ext\c4core\src\c4\base64.hpp" />
<ClCompile Include="ext\c4core\src\c4\base64.cpp" />
<ClInclude Include="ext\c4core\src\c4\blob.hpp" />
<ClInclude Include="ext\c4core\src\c4\bitmask.hpp" />
<ClInclude Include="ext\c4core\src\c4\charconv.hpp" />
<ClInclude Include="ext\c4core\src\c4\c4_pop.hpp" />
<ClInclude Include="ext\c4core\src\c4\c4_push.hpp" />
<ClCompile Include="ext\c4core\src\c4\char_traits.cpp" />
<ClInclude Include="ext\c4core\src\c4\char_traits.hpp" />
<ClInclude Include="ext\c4core\src\c4\common.hpp" />
<ClInclude Include="ext\c4core\src\c4\compiler.hpp" />
<ClInclude Include="ext\c4core\src\c4\config.hpp" />
<ClInclude Include="ext\c4core\src\c4\cpu.hpp" />
<ClInclude Include="ext\c4core\src\c4\ctor_dtor.hpp" />
<ClInclude Include="ext\c4core\src\c4\dump.hpp" />
<ClInclude Include="ext\c4core\src\c4\enum.hpp" />
<ClCompile Include="ext\c4core\src\c4\error.cpp" />
<ClInclude Include="ext\c4core\src\c4\error.hpp" />
<ClInclude Include="ext\c4core\src\c4\export.hpp" />
<ClInclude Include="ext\c4core\src\c4\format.hpp" />
<ClCompile Include="ext\c4core\src\c4\format.cpp" />
<ClInclude Include="ext\c4core\src\c4\hash.hpp" />
<ClInclude Include="ext\c4core\src\c4\language.hpp" />
<ClCompile Include="ext\c4core\src\c4\language.cpp" />
<ClCompile Include="ext\c4core\src\c4\memory_resource.cpp" />
<ClInclude Include="ext\c4core\src\c4\memory_resource.hpp" />
<ClCompile Include="ext\c4core\src\c4\memory_util.cpp" />
<ClInclude Include="ext\c4core\src\c4\memory_util.hpp" />
<ClInclude Include="ext\c4core\src\c4\platform.hpp" />
<ClInclude Include="ext\c4core\src\c4\preprocessor.hpp" />
<ClInclude Include="ext\c4core\src\c4\restrict.hpp" />
<ClInclude Include="ext\c4core\src\c4\span.hpp" />
<ClInclude Include="ext\c4core\src\c4\std\std.hpp" />
<ClInclude Include="ext\c4core\src\c4\std\std_fwd.hpp" />
<ClInclude Include="ext\c4core\src\c4\std\string.hpp" />
<ClInclude Include="ext\c4core\src\c4\std\string_fwd.hpp" />
<ClInclude Include="ext\c4core\src\c4\std\tuple.hpp" />
<ClInclude Include="ext\c4core\src\c4\std\vector.hpp" />
<ClInclude Include="ext\c4core\src\c4\std\vector_fwd.hpp" />
<ClInclude Include="ext\c4core\src\c4\substr.hpp" />
<ClInclude Include="ext\c4core\src\c4\substr_fwd.hpp" />
<ClInclude Include="ext\c4core\src\c4\szconv.hpp" />
<ClInclude Include="ext\c4core\src\c4\time.hpp" />
<ClCompile Include="ext\c4core\src\c4\time.cpp" />
<ClInclude Include="ext\c4core\src\c4\type_name.hpp" />
<ClInclude Include="ext\c4core\src\c4\types.hpp" />
<ClInclude Include="ext\c4core\src\c4\unrestrict.hpp" />
<ClCompile Include="ext\c4core\src\c4\utf.cpp" />
<ClInclude Include="ext\c4core\src\c4\utf.hpp" />
<ClInclude Include="ext\c4core\src\c4\windows.hpp" />
<ClInclude Include="ext\c4core\src\c4\windows_pop.hpp" />
<ClInclude Include="ext\c4core\src\c4\windows_push.hpp" />
<Natvis Include="ext\c4core\src\c4\c4core.natvis" />
<ClInclude Include="ext\c4core\src\c4\ext\debugbreak\debugbreak.h" />
<ClInclude Include="ext\c4core\src\c4\ext\rng\rng.hpp" />
<ClInclude Include="ext\c4core\src\c4\ext\sg14\inplace_function.h" />
<ClInclude Include="ext\c4core\src\c4\ext\fast_float.hpp" />
<ClInclude Include="ext\c4core\src\c4\ext\fast_float\include\fast_float\ascii_number.h" />
<ClInclude Include="ext\c4core\src\c4\ext\fast_float\include\fast_float\bigint.h" />
<ClInclude Include="ext\c4core\src\c4\ext\fast_float\include\fast_float\decimal_to_binary.h" />
<ClInclude Include="ext\c4core\src\c4\ext\fast_float\include\fast_float\digit_comparison.h" />
<ClInclude Include="ext\c4core\src\c4\ext\fast_float\include\fast_float\fast_float.h" />
<ClInclude Include="ext\c4core\src\c4\ext\fast_float\include\fast_float\fast_table.h" />
<ClInclude Include="ext\c4core\src\c4\ext\fast_float\include\fast_float\float_common.h" />
<ClInclude Include="ext\c4core\src\c4\ext\fast_float\include\fast_float\parse_number.h" />
<ClInclude Include="ext\c4core\src\c4\ext\fast_float\include\fast_float\simple_decimal_conversion.h" />
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>

View File

@@ -0,0 +1,239 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="16.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<ClCompile Include="ext\c4core\src\c4\base64.cpp">
<Filter>c4</Filter>
</ClCompile>
<ClCompile Include="ext\c4core\src\c4\char_traits.cpp">
<Filter>c4</Filter>
</ClCompile>
<ClCompile Include="ext\c4core\src\c4\error.cpp">
<Filter>c4</Filter>
</ClCompile>
<ClCompile Include="ext\c4core\src\c4\format.cpp">
<Filter>c4</Filter>
</ClCompile>
<ClCompile Include="ext\c4core\src\c4\language.cpp">
<Filter>c4</Filter>
</ClCompile>
<ClCompile Include="ext\c4core\src\c4\memory_resource.cpp">
<Filter>c4</Filter>
</ClCompile>
<ClCompile Include="ext\c4core\src\c4\memory_util.cpp">
<Filter>c4</Filter>
</ClCompile>
<ClCompile Include="ext\c4core\src\c4\time.cpp">
<Filter>c4</Filter>
</ClCompile>
<ClCompile Include="ext\c4core\src\c4\utf.cpp">
<Filter>c4</Filter>
</ClCompile>
</ItemGroup>
<ItemGroup>
<ClInclude Include="ext\c4core\src\c4\allocator.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\base64.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\blob.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\bitmask.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\charconv.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\c4_pop.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\c4_push.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\char_traits.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\common.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\compiler.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\config.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\cpu.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\ctor_dtor.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\dump.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\enum.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\error.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\export.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\format.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\hash.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\language.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\memory_resource.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\memory_util.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\platform.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\preprocessor.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\restrict.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\span.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\std\std.hpp">
<Filter>c4\std</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\std\std_fwd.hpp">
<Filter>c4\std</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\std\string.hpp">
<Filter>c4\std</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\std\string_fwd.hpp">
<Filter>c4\std</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\std\tuple.hpp">
<Filter>c4\std</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\std\vector.hpp">
<Filter>c4\std</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\std\vector_fwd.hpp">
<Filter>c4\std</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\substr.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\substr_fwd.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\szconv.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\time.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\type_name.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\types.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\unrestrict.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\utf.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\windows.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\windows_pop.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\windows_push.hpp">
<Filter>c4</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\ext\debugbreak\debugbreak.h">
<Filter>c4\ext\debugbreak</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\ext\rng\rng.hpp">
<Filter>c4\ext\rng</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\ext\sg14\inplace_function.h">
<Filter>c4\ext\sg14</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\ext\fast_float.hpp">
<Filter>c4\ext</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\ext\fast_float\include\fast_float\ascii_number.h">
<Filter>c4\ext\fast_float\include\fast_float</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\ext\fast_float\include\fast_float\bigint.h">
<Filter>c4\ext\fast_float\include\fast_float</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\ext\fast_float\include\fast_float\decimal_to_binary.h">
<Filter>c4\ext\fast_float\include\fast_float</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\ext\fast_float\include\fast_float\digit_comparison.h">
<Filter>c4\ext\fast_float\include\fast_float</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\ext\fast_float\include\fast_float\fast_float.h">
<Filter>c4\ext\fast_float\include\fast_float</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\ext\fast_float\include\fast_float\fast_table.h">
<Filter>c4\ext\fast_float\include\fast_float</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\ext\fast_float\include\fast_float\float_common.h">
<Filter>c4\ext\fast_float\include\fast_float</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\ext\fast_float\include\fast_float\parse_number.h">
<Filter>c4\ext\fast_float\include\fast_float</Filter>
</ClInclude>
<ClInclude Include="ext\c4core\src\c4\ext\fast_float\include\fast_float\simple_decimal_conversion.h">
<Filter>c4\ext\fast_float\include\fast_float</Filter>
</ClInclude>
</ItemGroup>
<ItemGroup>
<Natvis Include="ext\c4core\src\c4\c4core.natvis">
<Filter>c4</Filter>
</Natvis>
</ItemGroup>
<ItemGroup>
<Filter Include="c4">
<UniqueIdentifier>{D96EB7E9-E07F-31EF-8A8C-CDA60AEE417F}</UniqueIdentifier>
</Filter>
<Filter Include="c4\ext">
<UniqueIdentifier>{0C8CED5D-8C03-3DC1-88A0-4532154293B9}</UniqueIdentifier>
</Filter>
<Filter Include="c4\ext\debugbreak">
<UniqueIdentifier>{518E0C6D-105B-39F9-8193-356D9B7641CB}</UniqueIdentifier>
</Filter>
<Filter Include="c4\ext\fast_float">
<UniqueIdentifier>{73394C47-97A1-34EE-A081-D6003F63E488}</UniqueIdentifier>
</Filter>
<Filter Include="c4\ext\fast_float\include">
<UniqueIdentifier>{1B43ABC9-6254-3DF0-A9A5-86BCF6FEA126}</UniqueIdentifier>
</Filter>
<Filter Include="c4\ext\fast_float\include\fast_float">
<UniqueIdentifier>{6328648E-A574-3A0D-8E0E-A2AA13716FDA}</UniqueIdentifier>
</Filter>
<Filter Include="c4\ext\rng">
<UniqueIdentifier>{2A97DBBA-1057-3E33-AE1C-C4CF7DBD4D89}</UniqueIdentifier>
</Filter>
<Filter Include="c4\ext\sg14">
<UniqueIdentifier>{4104B22C-27F9-38F8-9E20-00B05D09BD39}</UniqueIdentifier>
</Filter>
<Filter Include="c4\std">
<UniqueIdentifier>{599BE36A-381B-3D35-9752-B177B3AA1E4D}</UniqueIdentifier>
</Filter>
</ItemGroup>
</Project>

44
3rdparty/rapidyaml/changelog/0.1.0.md vendored Normal file
View File

@@ -0,0 +1,44 @@
This is the first ryml release. Future releases will have a more organized changelog; for now, only recent major changes are listed.
Please be aware that there are still some anticipated breaking changes in the API before releasing the 1.0 major version. These are highlighted in [the repo ROADMAP](https://github.com/biojppm/rapidyaml/blob/v0.1.0/ROADMAP.md).
* 2020/October
* [MR#89](https://github.com/biojppm/rapidyaml/pull/89):
* fix python API generation in windows
* use github actions for testing and releasing
* [MR#88](https://github.com/biojppm/rapidyaml/pull/88): [fix MacOS compilation and installs](https://github.com/biojppm/rapidyaml/issues/75). This is a fix from [c4core](https://github.com/biojppm/cmake/issues/1).
* [MR#88](https://github.com/biojppm/rapidyaml/pull/88): [fix boolean handling](https://github.com/biojppm/rapidyaml/issues/74). This is a fix from [c4core](https://github.com/biojppm/c4core/pull/18/). `true` and `false` are now parsed correctly into `bool` variables:
```c++
auto tree = parse("{foo: true, bar: false}");
```
Emitting `bool` variables still defaults to `0`/`1`, like the default behaviour in the STL. To explicitly request `true`/`false` use `c4::fmt::boolalpha()`:
```c++
node << var; // "1" or "0"
node << c4::fmt::boolalpha(var); // "true" or "false"
```
* 2020/September
* [***Breaking change***] [MR#85](https://github.com/biojppm/rapidyaml/pull/85) null values in YAML are now parsed to null strings instead of YAML null token "~":
```c++
auto tree = parse("{foo: , bar: ''}");
// previous:
assert(tree["foo"].val() == "~");
assert(tree["bar"].val() == "");
// now:
assert(tree["foo"].val() == nullptr); // notice that this is now null
assert(tree["bar"].val() == "");
```
* [MR#85](https://github.com/biojppm/rapidyaml/pull/85) Commas after tags are now allowed:
```yaml
{foo: !!str, bar: ''} # now the comma does not cause an error
```
* [MR#81](https://github.com/biojppm/rapidyaml/pull/81): Always compile with extra pedantic warnings.
* 2020/May
* [***Breaking change***] the error callback now receives a source location object:
```c++
// previous
using pfn_error = void (*)(const char* msg, size_t msg_len, void *user_data);
// now:
using pfn_error = void (*)(const char* msg, size_t msg_len, Location location, void *user_data);
```
* Parser fixes to improve test suite success: [MR#73](https://github.com/biojppm/rapidyaml/pull/73), [MR#71](https://github.com/biojppm/rapidyaml/pull/71), [MR#68](https://github.com/biojppm/rapidyaml/pull/68), [MR#67](https://github.com/biojppm/rapidyaml/pull/67), [MR#66](https://github.com/biojppm/rapidyaml/pull/66)
* Fix compilation as DLL on windows [MR#69](https://github.com/biojppm/rapidyaml/pull/69)

29
3rdparty/rapidyaml/changelog/0.2.0.md vendored Normal file
View File

@@ -0,0 +1,29 @@
### New features & improvements
- Enable parsing into nested nodes ([87f4184](https://github.com/biojppm/rapidyaml/commit/87f4184))
- `as_json()` can now be called with tree and node id ([4c23041](https://github.com/biojppm/rapidyaml/commit/4c23041))
- Add `Parser::reserve_stack()` ([f31fb9f](https://github.com/biojppm/rapidyaml/commit/f31fb9f))
- Add uninstall target ([PR #122](https://github.com/biojppm/rapidyaml/pull/122))
- Update [c4core](https://github.com/biojppm/c4core) to v0.1.1
- Add a [quickstart sample](samples/quickstart.cpp) with build examples.
- Update [README.md](README.md) to refer to the quickstart
- Add [gdb visualizers](src/ryml-gdbtypes.py)
- Add `SO_VERSION` to shared builds
### Fixes
- Fix [#139](https://github.com/biojppm/rapidyaml/issues/139): substr and csubstr not found in ryml namespace
- Fix [#131](https://github.com/biojppm/rapidyaml/issues/131): resolve references to map keys
- Fix [#129](https://github.com/biojppm/rapidyaml/issues/129): quoted strings starting with * parsed as references
- Fix [#128](https://github.com/biojppm/rapidyaml/issues/128): segfault on nonexistent anchor
- Fix [#124](https://github.com/biojppm/rapidyaml/issues/124): parse failure in comments with trailing colon
- Fix [#121](https://github.com/biojppm/rapidyaml/issues/121): preserve quotes when emitting scalars
- Fix [#103](https://github.com/biojppm/rapidyaml/issues/103): ambiguous parsing of null/empty scalars
- Fix [#90](https://github.com/biojppm/rapidyaml/issues/90): CMAKE_CXX_STANDARD ignored
- Fix [#40](https://github.com/biojppm/rapidyaml/issues/40): quadratic complexity from use of `sscanf(%f)`
- Fix emitting json to streams ([dc6af83](https://github.com/biojppm/rapidyaml/commit/dc6af83))
- Set the global memory resource when setting global callbacks ([511cba0](https://github.com/biojppm/rapidyaml/commit/511cba0))
- Fix python packaging ([PR #102](https://github.com/biojppm/rapidyaml/pull/102))
### Special thanks
- @Gei0r
- @litghost
- @costashatz

235
3rdparty/rapidyaml/changelog/0.2.1.md vendored Normal file
View File

@@ -0,0 +1,235 @@
This release is focused on bug fixes and compliance with the [YAML test suite](https://github.com/yaml/yaml-test-suite).
### Breaking changes
- Fix parsing behavior of root-level scalars: now these are parsed into a DOCVAL, not SEQ->VAL ([5ba0d56](https://github.com/biojppm/rapidyaml/pull/144/commits/5ba0d56904daef1509f0073695145c4835ab1b30), from [PR #144](https://github.com/biojppm/rapidyaml/pull/144)). Eg,
```yaml
---
this is a scalar
--- # previously this was parsed as
- this is a scalar
```
- Cleanup type predicate API ([PR #155](https://github.com/biojppm/rapidyaml/pull/155))):
- ensure all type predicates from `Tree` and `NodeRef` forward to the corresponding predicate in `NodeType`
- remove all type predicates and methods from `NodeData`; use the equivalent call from `Tree` or `NodeRef`. For example, for `is_map()`:
```c++
Tree t = parse("{foo: bar}");
size_t map_id = t.root_id();
NodeRef map = t.rootref();
t.get(map_id)->is_map(); // compile error: no longer exists
assert(t.is_map(map_id)); // OK
assert(map.is_map()); // OK
```
- Further cleanup to the type predicate API will be done in the future, especially around the `.has_*()` vs corresponding `.is_*()` naming scheme.
### New features & improvements
- `Tree::lookup_path_or_modify()`: add overload to graft existing branches ([PR #141](https://github.com/biojppm/rapidyaml/pull/141))
- Callbacks: improve test coverage ([PR #141](https://github.com/biojppm/rapidyaml/pull/141))
- [YAML test suite](https://github.com/yaml/yaml-test-suite) ([PR #144](https://github.com/biojppm/rapidyaml/pull/144), [PR #145](https://github.com/biojppm/rapidyaml/pull/145)): big progress towards compliance with the suite. There are still a number of existing problems, which are the subject of ongoing work. See the [list of current known failures](../test/test_suite/test_suite_parts.cpp) in the test suite file.
- Python wheels and source package are now [uploaded to PyPI](https://pypi.org/project/rapidyaml/) as part of the release process.
### Fixes
#### Anchors and references
- Fix resolving of nodes with keyref+valref ([PR #144](https://github.com/biojppm/rapidyaml/pull/144)): `{&a a: &b b, *b: *a}`
- Fix parsing of implicit scalars when tags are present ([PR #145](https://github.com/biojppm/rapidyaml/pull/145)):
```yaml
- &a # test case PW8X
- a
- &a : a
b: &b
- &c : &a
- ? &d
- ? &e
: &a
```
- Fix [#151](https://github.com/biojppm/rapidyaml/issues/151): scalars beginning with `*` or `&` or `<<` are now correctly quoted when emitting ([PR #156](https://github.com/biojppm/rapidyaml/pull/156)).
- Also from [PR #156](https://github.com/biojppm/rapidyaml/pull/156), map inheritance nodes like `<<: *anchor` or `<<: [*anchor1, *anchor2]` now have a `KEYREF` flag in their type (until a call to `Tree::resolve()`):
```c++
Tree tree = parse("{map: &anchor {foo: bar}, copy: {<<: *anchor}}");
assert(tree["copy"]["<<"].is_key_ref()); // previously this did not hold
assert(tree["copy"]["<<"].is_val_ref()); // ... but this did
```
#### Tags
- Fix parsing of tag dense maps and seqs ([PR #144](https://github.com/biojppm/rapidyaml/pull/144)):
```yaml
--- !!map {
k: !!seq [ a, !!str b],
j: !!seq
[ a, !!str b]
--- !!seq [
!!map { !!str k: v},
!!map { !!str ? k: v}
]
--- !!map
!!str foo: !!map # there was a parse error with the multiple tags
!!int 1: !!float 20.0
!!int 3: !!float 40.0
--- !!seq
- !!map
!!str k1: v1
!!str k2: v2
!!str k3: v3
```
#### Whitespace
- Fix parsing of double-quoted scalars with tabs ([PR #145](https://github.com/biojppm/rapidyaml/pull/145)):
```yaml
"This has a\ttab"
# is now correctly parsed as "This has a<TAB>tab"
```
- Fix filtering of leading and trailing whitespace within double-quoted scalars ([PR #145](https://github.com/biojppm/rapidyaml/pull/145)):
```yaml
# test case 4ZYM, 7A4E, TL85
"
<SPC><SPC>foo<SPC>
<SPC>
<SPC><TAB><SPC>bar
<SPC><SPC>baz
"
# is now correctly parsed as " foo\nbar\nbaz "
```
- Fix parsing of tabs within YAML tokens ([PR #145](https://github.com/biojppm/rapidyaml/pull/145)):
```yaml
---<TAB>scalar # test case K54U
---<TAB>{} # test case Q5MG
--- # test case DC7X
a: b<TAB>
seq:<TAB>
- a<TAB>
c: d<TAB>#X
```
- Fix parsing of flow-style maps with ommitted values without any space ([PR #145](https://github.com/biojppm/rapidyaml/pull/145)):
```yaml
# test case 4ABK
- {foo: , bar: , baz: } # this was parsed correctly as {foo: ~, bar: ~, baz: ~}
- {foo:, bar:, baz:} # ... but this was parsed as {'foo:': , 'bar:': ~, 'baz:': ~}
```
#### Scalars
- Unescape forward slashes in double quoted string ([PR #145](https://github.com/biojppm/rapidyaml/pull/145)):
```yaml
--- escaped slash: "a\/b" # test case 3UYS
# is now parsed as:
--- escaped slash: "a/b"
```
- Fix filtering of indented regions in folded scalars ([PR #145](https://github.com/biojppm/rapidyaml/pull/145)):
```yaml
# test case 7T8X
- >
folded
line
next
line
* bullet
* list
* lines
last
line
```
is now correctly parsed as `\nfolded line\nnext line\n * bullet\n\n * list\n * lines\n\nlast line\n`.
- Fix parsing of special characters within plain scalars ([PR #145](https://github.com/biojppm/rapidyaml/pull/145)):
```yaml
# test case 3MYT
k:#foo
&a !t s
!t s
# now correctly parsed as "k:#foo &a !t s !t s"
```
- Fix parsing of comments after complex keys ([PR #145](https://github.com/biojppm/rapidyaml/pull/145)):
```yaml
# test case X8DW
? key
# comment
: value
# now correctly parsed as {key: value}
```
- Fix parsing of consecutive complex keys within maps ([PR #145](https://github.com/biojppm/rapidyaml/pull/145))
```yaml
# test case 7W2P, ZWK4
? a
? b
c:
? d
e:
# now correctly parsed as {a: ~, b: ~, c: ~, d: ~, e: ~}
```
- Fix [#152](https://github.com/biojppm/rapidyaml/issues/152): parse error with folded scalars that are the last in a container ([PR #157](https://github.com/biojppm/rapidyaml/pull/157)):
```yaml
exec:
command:
# before the fix, this folded scalar failed to parse
- |
exec pg_isready -U "dog" -d "dbname=dog" -h 127.0.0.1 -p 5432
parses: no
```
- Fix: documents consisting of a quoted scalar now retain the VALQUO flag ([PR #156](https://github.com/biojppm/rapidyaml/pull/156))
```c++
Tree tree = parse("'this is a quoted scalar'");
assert(tree.rootref().is_doc());
assert(tree.rootref().is_val());
assert(tree.rootref().is_val_quoted());
```
#### Document structure
- Empty docs are now parsed as a docval with a null node:
```yaml
--- # test cases 6XDY, 6ZKB, 9BXL, PUW8
---
---
```
is now parsed as
```yaml
--- ~
--- ~
--- ~
```
- Prevent creation of DOC nodes from stream-level comments or tags ([PR #145](https://github.com/biojppm/rapidyaml/pull/145)):
```yaml
!foo "bar"
...
# Global
%TAG ! tag:example.com,2000:app/
---
!foo "bar"
```
was parsed as
```yaml
---
!foo "bar"
---
# notice the empty doc in here
---
!foo "bar"
```
and it is now correctly parsed as
```yaml
---
!foo "bar"
---
!foo "bar"
```
(other than the known limitation that ryml does not do tag lookup).
#### General
- Fix [#147](https://github.com/biojppm/rapidyaml/issues/147): serialize/deserialize special float values `.nan`, `.inf`, `-.inf` ([PR #149](https://github.com/biojppm/rapidyaml/pull/149))
- Fix [#142](https://github.com/biojppm/rapidyaml/issues/142): `preprocess_json()`: ensure quoted ranges are skipped when slurping containers
- Ensure error macros expand to a single statement ([PR #141](https://github.com/biojppm/rapidyaml/pull/141))
- Update c4core to [0.1.4](https://github.com/biojppm/c4core/releases/tag/v0.1.4)
### Special thanks
- @Gei0r

1
3rdparty/rapidyaml/changelog/0.2.2.md vendored Normal file
View File

@@ -0,0 +1 @@
Yank python package 0.2.1, was accidentally created while iterating the PyPI submission from the Github action. This release does not add any change, and is functionally the same as [0.2.1](https://github.com/biojppm/rapidyaml/releases/tag/v0.2.1).

285
3rdparty/rapidyaml/changelog/0.2.3.md vendored Normal file
View File

@@ -0,0 +1,285 @@
This release is focused on bug fixes and compliance with the [YAML test suite](https://github.com/yaml/yaml-test-suite).
### New features
- Add support for CPU architectures aarch64, ppc64le, s390x.
- Update c4core to [0.1.7](https://github.com/biojppm/c4core/releases/tag/v0.1.7)
- `Tree` and `NodeRef`: add document getter `doc()` and `docref()`
```c++
Tree tree = parse(R"(---
doc0
---
doc1
)");
NodeRef stream = t.rootref();
assert(stream.is_stream());
// tree.doc(i): get the index of the i-th doc node.
// Equivalent to tree.child(tree.root_id(), i)
assert(tree.doc(0) == 1u);
assert(tree.doc(1) == 2u);
// tree.docref(i), same as above, return NodeRef
assert(tree.docref(0).val() == "doc0");
assert(tree.docref(1).val() == "doc1");
// stream.doc(i), same as above, given NodeRef
assert(stream.doc(0).val() == "doc0");
assert(stream.doc(1).val() == "doc1");
```
### Fixes
- Fix compilation with `C4CORE_NO_FAST_FLOAT` ([PR #163](https://github.com/biojppm/rapidyaml/pull/163))
#### Flow maps
- Fix parse of multiline plain scalars inside flow maps ([PR #161](https://github.com/biojppm/rapidyaml/pull/161)):
```yaml
# test case UT92
# all parsed as "matches %": 20
- { matches
% : 20 }
- { matches
%: 20 }
- { matches
%:
20 }
```
#### Tags
- Fix parsing of tags followed by comments in sequences ([PR #161](https://github.com/biojppm/rapidyaml/pull/161)):
```yaml
# test case 735Y
- !!map # Block collection
foo : bar
```
#### Quoted scalars
- Fix filtering of tab characters in quoted scalars ([PR #161](https://github.com/biojppm/rapidyaml/pull/161)):
```yaml
---
# test case 5GBF
"Empty line
<TAB>
as a line feed"
# now correctly parsed as "Empty line\nas a line feed"
---
# test case PRH3
' 1st non-empty
<SPC>2nd non-empty<SPC>
<TAB>3rd non-empty '
# now correctly parsed as " 1st non-empty\n2nd non-empty 3rd non-empty "
```
- Fix filtering of backslash characters in double-quoted scalars ([PR #161](https://github.com/biojppm/rapidyaml/pull/161)):
```yaml
# test cases NP9H, Q8AD
"folded<SPC>
to a space,<TAB>
<SPC>
to a line feed, or <TAB>\
\ <TAB>non-content"
# now correctly parsed as "folded to a space,\nto a line feed, or \t \tnon-content"
```
- Ensure filtering of multiline quoted scalars ([PR #161](https://github.com/biojppm/rapidyaml/pull/161)):
```yaml
# all scalars now correctly parsed as "quoted string",
# both for double and single quotes
---
"quoted
string"
--- "quoted
string"
---
- "quoted
string"
---
- "quoted
string"
---
"quoted
string": "quoted
string"
---
"quoted
string": "quoted
string"
```
#### Block scalars
- Ensure no newlines are added when emitting block scalars ([PR #161](https://github.com/biojppm/rapidyaml/pull/161))
- Fix parsing of block spec with both chomping and indentation: chomping may come before or after the indentation ([PR #161](https://github.com/biojppm/rapidyaml/pull/161)):
```yaml
# the block scalar specs below now have the same effect.
# test cases: D83L, P2AD
- |2-
explicit indent and chomp
- |-2
chomp and explicit indent
```
- Fix [inference of block indentation](https://yaml.org/spec/1.2.2/#8111-block-indentation-indicator) with leading blank lines ([PR #161](https://github.com/biojppm/rapidyaml/pull/161)):
```yaml
# test cases: 4QFQ, 7T8X
- >
# child1
# parsed as "\n\n child1"
--- # test case DWX9
|
literal
text
# Comment
# parsed as "\n\nliteral\n \n\ntext\n"
```
- Fix parsing of same-indentation block scalars ([PR #161](https://github.com/biojppm/rapidyaml/pull/161)):
```yaml
# test case W4TN
# all docs have the same value: "%!PS-Adobe-2.0"
--- |
%!PS-Adobe-2.0
...
--- >
%!PS-Adobe-2.0
...
--- |
%!PS-Adobe-2.0
...
--- >
%!PS-Adobe-2.0
...
--- |
%!PS-Adobe-2.0
--- >
%!PS-Adobe-2.0
--- |
%!PS-Adobe-2.0
--- >
%!PS-Adobe-2.0
```
- Folded block scalars: fix folding of newlines at the border of indented parts ([PR #161](https://github.com/biojppm/rapidyaml/pull/161)):
```yaml
# test case 6VJK
# now correctly parsed as "Sammy Sosa completed another fine season with great stats.\n\n 63 Home Runs\n 0.288 Batting Average\n\nWhat a year!\n"
>
Sammy Sosa completed another
fine season with great stats.
63 Home Runs
0.288 Batting Average
What a year!
---
# test case MJS9
# now correctly parsed as "foo \n\n \t bar\n\nbaz\n"
>
foo<SPC>
<SPC>
<SPC><TAB><SPC>bar
baz
```
- Folded block scalars: fix folding of newlines when the indented part is at the begining of the scalar ([PR #161](https://github.com/biojppm/rapidyaml/pull/161)):
```yaml
# test case F6MC
a: >2
more indented
regular
# parsed as a: " more indented\nregular\n"
b: >2
more indented
regular
# parsed as b: "\n\n more indented\nregular\n"
```
#### Plain scalars
- Fix parsing of whitespace within plain scalars ([PR #161](https://github.com/biojppm/rapidyaml/pull/161)):
```yaml
---
# test case NB6Z
key:
value
with
tabs
tabs
foo
bar
baz
# is now correctly parsed as "value with\ntabs tabs\nfoo\nbar baz"
---
# test case 9YRD, EX5H (trailing whitespace)
a
b
c
d
e
# is now correctly parsed as "a b c d\ne"
```
- Fix parsing of unindented plain scalars at the root level scope ([PR #161](https://github.com/biojppm/rapidyaml/pull/161))
```yaml
--- # this parsed
Bare
scalar
is indented
# was correctly parsed as "Bare scalar is indented"
--- # but this failed to parse successfully:
Bare
scalar
is not indented
# is now correctly parsed as "Bare scalar is not indented"
--- # test case NB6Z
value
with
tabs
tabs
foo
bar
baz
# now correctly parsed as "value with\ntabs tabs\nfoo\nbar baz"
---
--- # test cases EXG3, 82AN
---word1
word2
# now correctly parsed as "---word1 word2"
```
- Fix parsing of comments within plain scalars
```yaml
# test case 7TMG
--- # now correctly parsed as "word1"
word1
# comment
--- # now correctly parsed as [word1, word2]
[ word1
# comment
, word2]
```
#### Python API
- Add missing node predicates in SWIG API definition ([PR #166](https://github.com/biojppm/rapidyaml/pull/166)):
- `is_anchor_or_ref()`
- `is_key_quoted()`
- `is_val_quoted()`
- `is_quoted()`
### Thanks
--- @mbs-c
--- @simu
--- @QuellaZhang

104
3rdparty/rapidyaml/changelog/0.3.0.md vendored Normal file
View File

@@ -0,0 +1,104 @@
### Breaking changes
Despite ryml being still in a non-stable 0.x.y version, considerable effort goes into trying to avoid breaking changes. However, this release has to collect on the [semantic versioning](https://semver.org/) prerogative for breaking changes. This is a needed improvement, so sorry for any nuisance!
**The allocation and error callback logic was revamped** on the [amalgamation PR](https://github.com/biojppm/rapidyaml/pull/172). Now trees and parsers receive (and store) a full `ryml::Callbacks` object instead of the (now removed) `ryml::Allocator` which had a pointer to a (now removed) `ryml::MemoryResourceCallbacks`, which was a (now removed) `ryml::MemoryResource`. To be clear, the `Callbacks` class is unchanged, other than removing some unneeded helper methods.
These changes were motivated by unfortunate name clashes between `c4::Allocator/ryml::Allocator` and `c4::MemoryResource/ryml::MemoryResource`, occurring if `<c4/allocator.hpp>` or `<c4/memory_resource.hpp>` were included before `<c4/yml/common.hpp>`. They also significantly simplify this part of the API, making it really easier to understand.
As a consequence of the above changes, the global memory resource getters and setters for ryml were also removed: `ryml::get_memory_resource()/ryml::set_memory_resource()`.
Here's an example of the required changes in client code. First the old client code (from the quickstart):
```c++
struct PerTreeMemoryExample : public ryml::MemoryResource
{
void *allocate(size_t len, void * hint) override;
void free(void *mem, size_t len) override;
};
PerTreeMemoryExample mrp;
PerTreeMemoryExample mr1;
PerTreeMemoryExample mr2;
ryml::Parser parser = {ryml::Allocator(&mrp)};
ryml::Tree tree1 = {ryml::Allocator(&mr1)};
ryml::Tree tree2 = {ryml::Allocator(&mr2)};
```
Should now be rewritten to:
```c++
struct PerTreeMemoryExample
{
ryml::Callbacks callbacks() const; // helper to create the callbacks
};
PerTreeMemoryExample mrp;
PerTreeMemoryExample mr1;
PerTreeMemoryExample mr2;
ryml::Parser parser = {mrp.callbacks()};
ryml::Tree tree1 = {mr1.callbacks()};
ryml::Tree tree2 = {mr2.callbacks()};
```
### New features
- Add amalgamation into a single header file ([PR #172](https://github.com/biojppm/rapidyaml/pull/172)):
- The amalgamated header will be available together with the deliverables from each release.
- To generate the amalgamated header:
```console
$ python tools/amalgamate.py ryml_all.hpp
```
- To use the amalgamated header:
- Include at will in any header of your project.
- In one - and only one - of your project source files, `#define RYML_SINGLE_HDR_DEFINE_NOW` and then `#include <ryml_all.hpp>`. This will enable the function and class definitions in the header file. For example, here's a sample program:
```c++
#include <iostream>
#define RYML_SINGLE_HDR_DEFINE_NOW // do this before the include
#include <ryml_all.hpp>
int main()
{
auto tree = ryml::parse("{foo: bar}");
std::cout << tree["foo"].val() << "\n";
}
```
- Add `Tree::change_type()` and `NodeRef::change_type()` ([PR #171](https://github.com/biojppm/rapidyaml/pull/171)):
```c++
// clears a node and sets its type to a different type (one of `VAL`, `SEQ`, `MAP`):
Tree t = parse("{keyval0: val0, keyval1: val1, keyval2: val2}");
t[0].change_type(VAL);
t[1].change_type(MAP);
t[2].change_type(SEQ);
Tree expected = parse("{keyval0: val0, keyval1: {}, keyval2: []}");
assert(emitrs<std::string>(t) == emitrs<std::string>(expected));
```
- Add support for compilation with emscripten (WebAssembly+javascript) ([PR #176](https://github.com/biojppm/rapidyaml/pull/176)).
### Fixes
- Take block literal indentation as relative to current indentation level, rather than as an absolute indentation level ([PR #178](https://github.com/biojppm/rapidyaml/pull/178)):
```yaml
foo:
- |
child0
- |2
child2 # indentation is 4, not 2
```
- Fix parsing when seq member maps start without a key ([PR #178](https://github.com/biojppm/rapidyaml/pull/178)):
```yaml
# previously this resulted in a parse error
- - : empty key
- - : another empty key
```
- Prefer passing `substr` and `csubstr` by value instead of const reference ([PR #171](https://github.com/biojppm/rapidyaml/pull/171))
- Fix [#173](https://github.com/biojppm/rapidyaml/issues/173): add alias target `ryml::ryml` ([PR #174](https://github.com/biojppm/rapidyaml/pull/174))
- Speedup compilation of tests by removing linking with yaml-cpp and libyaml. ([PR #177](https://github.com/biojppm/rapidyaml/pull/177))
- Fix [c4core#53](https://github.com/biojppm/c4core/issues/53): cmake install targets were missing call to `export()` ([PR #179](https://github.com/biojppm/c4core/pull/179)).
- Add missing export to `Tree` ([PR #181](https://github.com/biojppm/c4core/pull/181)).
### Thanks
- @aviktorov

229
3rdparty/rapidyaml/changelog/0.4.0.md vendored Normal file
View File

@@ -0,0 +1,229 @@
This release improves compliance with the [YAML test suite](https://github.com/yaml/yaml-test-suite/) (thanks @ingydotnet and @perlpunk for extensive and helpful cooperation), and adds node location tracking using the parser.
### Breaking changes
As part of the [new feature to track source locations](https://github.com/biojppm/rapidyaml/pull/168), opportunity was taken to address a number of pre-existing API issues. These changes consisted of:
- Deprecate `c4::yml::parse()` and `c4::yml::Parser::parse()` overloads; all these functions will be removed in short order. Until removal, any call from client code will trigger a compiler warning.
- Add `parse()` alternatives, either `parse_in_place()` or `parse_in_arena()`:
- `parse_in_place()` receives only `substr` buffers, ie mutable YAML source buffers. Trying to pass a `csubstr` buffer to `parse_in_place()` will cause a compile error:
```c++
substr readwrite = /*...*/;
Tree tree = parse_in_place(readwrite); // OK
csubstr readonly = /*...*/;
Tree tree = parse_in_place(readonly); // compile error
```
- `parse_in_arena()` receives only `csubstr` buffers, ie immutable YAML source buffers. Prior to parsing, the buffer is copied to the tree's arena, then the copy is parsed in place. Because `parse_in_arena()` is meant for immutable buffers, overloads receiving a `substr` YAML buffer are now declared but marked deprecated, and intentionally left undefined, such that calling `parse_in_arena()` with a `substr` will cause a linker error as well as a compiler warning.
```c++
substr readwrite = /*...*/;
Tree tree = parse_in_arena(readwrite); // compile warning+linker error
```
This is to prevent an accidental extra copy of the mutable source buffer to the tree's arena: `substr` is implicitly convertible to `csubstr`. If you really intend to parse an originally mutable buffer in the tree's arena, convert it first explicitly to immutable by assigning the `substr` to a `csubstr` prior to calling `parse_in_arena()`:
```c++
substr readwrite = /*...*/;
csubstr readonly = readwrite; // ok
Tree tree = parse_in_arena(readonly); // ok
```
This problem does not occur with `parse_in_place()` because `csubstr` is not implicitly convertible to `substr`.
- In the python API, `ryml.parse()` was removed and not just deprecated; the `parse_in_arena()` and `parse_in_place()` now replace this.
- `Callbacks`: changed behavior in `Parser` and `Tree`:
- When a tree is copy-constructed or move-constructed to another, the receiving tree will start with the callbacks of the original.
- When a tree is copy-assigned or move-assigned to another, the receiving tree will now change its callbacks to the original.
- When a parser creates a new tree, the tree will now use a copy of the parser's callbacks object.
- When an existing tree is given directly to the parser, both the tree and the parser now retain their own callback objects; any allocation or error during parsing will go through the respective callback object.
### New features
- Add tracking of source code locations. This is useful for reporting semantic errors after the parsing phase (ie where the YAML is syntatically valid and parsing is successful, but the tree contents are semantically invalid). The locations can be obtained lazily from the parser when the first location is queried:
```c++
// To obtain locations, use of the parser is needed:
ryml::Parser parser;
ryml::Tree tree = parser.parse_in_arena("source.yml", R"({
aa: contents,
foo: [one, [two, three]]
})");
// After parsing, on the first call to obtain a location,
// the parser will cache a lookup structure to accelerate
// tracking the location of a node, with complexity
// O(numchars(srcbuffer)). Then it will do the lookup, with
// complexity O(log(numlines(srcbuffer))).
ryml::Location loc = parser.location(tree.rootref());
assert(parser.location_contents(loc).begins_with("{"));
// note the location members are zero-based:
assert(loc.offset == 0u);
assert(loc.line == 0u);
assert(loc.col == 0u);
// On the next call to location(), the accelerator is reused
// and only the lookup is done.
loc = parser.location(tree["aa"]);
assert(parser.location_contents(loc).begins_with("aa"));
assert(loc.offset == 2u);
assert(loc.line == 1u);
assert(loc.col == 0u);
// KEYSEQ in flow style: points at the key
loc = parser.location(tree["foo"]);
assert(parser.location_contents(loc).begins_with("foo"));
assert(loc.offset == 16u);
assert(loc.line == 2u);
assert(loc.col == 0u);
loc = parser.location(tree["foo"][0]);
assert(parser.location_contents(loc).begins_with("one"));
assert(loc.line == 2u);
assert(loc.col == 6u);
// SEQ in flow style: location points at the opening '[' (there's no key)
loc = parser.location(tree["foo"][1]);
assert(parser.location_contents(loc).begins_with("["));
assert(loc.line == 2u);
assert(loc.col == 11u);
loc = parser.location(tree["foo"][1][0]);
assert(parser.location_contents(loc).begins_with("two"));
assert(loc.line == 2u);
assert(loc.col == 12u);
loc = parser.location(tree["foo"][1][1]);
assert(parser.location_contents(loc).begins_with("three"));
assert(loc.line == 2u);
assert(loc.col == 17u);
// NOTE: reusing the parser with a new YAML source buffer
// will invalidate the accelerator.
```
See more details in the [quickstart sample](https://github.com/biojppm/rapidyaml/blob/bfb073265abf8c58bbeeeed7fb43270e9205c71c/samples/quickstart.cpp#L3759). Thanks to @cschreib for submitting a working example proving how simple it could be to achieve this.
- `Parser`:
- add `source()` and `filename()` to get the latest buffer and filename to be parsed
- add `callbacks()` to get the parser's callbacks
- Add `from_tag_long()` and `normalize_tag_long()`:
```c++
assert(from_tag_long(TAG_MAP) == "<tag:yaml.org,2002:map>");
assert(normalize_tag_long("!!map") == "<tag:yaml.org,2002:map>");
```
- Add an experimental API to resolve tags based on the tree's tag directives. This API is still imature and will likely be subject to changes, so we won't document it yet.
- Regarding emit styles (see issue [#37](https://github.com/biojppm/rapidyaml/issues/37)): add an experimental API to force flow/block style on container nodes, as well as block-literal/block-folded/double-quoted/single-quoted/plain styles on scalar nodes. This API is also immature and will likely be subject to changes, so we won't document it yet. But if you are desperate for this functionality, the new facilities will let you go further.
- Add preliminary support for bare-metal ARM architectures, with CI tests pending implementation of QEMU action. ([#193](https://github.com/biojppm/rapidyaml/issues/193), [c4core#63](https://github.com/biojppm/c4core/issues/63)).
- Add preliminary support for RISC-V architectures, with CI tests pending availability of RISC-V based github actions. ([c4core#69](https://github.com/biojppm/c4core/pulls/69)).
### Fixes
- Fix edge cases of parsing of explicit keys (ie keys after `?`) ([PR#212](https://github.com/biojppm/rapidyaml/pulls/212)):
```yaml
# all these were fixed:
? : # empty
? explicit key # this comment was not parsed correctly
? # trailing empty key was not added to the map
```
- Fixed parsing of tabs used as whitespace tokens after `:` or `-`. This feature [is costly (see some benchmark results here)](https://github.com/biojppm/rapidyaml/pull/211#issuecomment-1030688035) and thus it is disabled by default, and requires defining a macro or cmake option `RYML_WITH_TAB_TOKENS` to enable ([PR#211](https://github.com/biojppm/rapidyaml/pulls/211)).
- Allow tab indentation in flow seqs ([PR#215](https://github.com/biojppm/rapidyaml/pulls/215)) (6CA3).
- ryml now parses successfully compact JSON code `{"like":"this"}` without any need for preprocessing. This code was not valid YAML 1.1, but was made valid in YAML 1.2. So the `preprocess_json()` functions, used to insert spaces after `:` are no longer necessary and have been removed. If you were using these functions, remove the calls and just pass the original source directly to ryml's parser ([PR#210](https://github.com/biojppm/rapidyaml/pulls/210)).
- Fix handling of indentation when parsing block scalars ([PR#210](https://github.com/biojppm/rapidyaml/pulls/210)):
```yaml
---
|
hello
there
---
|
ciao
qua
---
- |
hello
there
- |
ciao
qua
---
foo: |
hello
there
bar: |
ciao
qua
```
- Fix parsing of maps when opening a scope with whitespace before the colon ([PR#210](https://github.com/biojppm/rapidyaml/pulls/210)):
```yaml
foo0 : bar
---
foo1 : bar # the " :" was causing an assert
---
foo2 : bar
---
foo3 : bar
---
foo4 : bar
```
- Ensure container keys preserve quote flags when the key is quoted ([PR#210](https://github.com/biojppm/rapidyaml/pulls/210)).
- Ensure scalars beginning with `%` are emitted with quotes (([PR#216](https://github.com/biojppm/rapidyaml/pulls/216)).
- Fix [#203](https://github.com/biojppm/rapidyaml/issues/203): when parsing, do not convert `null` or `~` to null scalar strings. Now the scalar strings contain the verbatim contents of the original scalar; to query whether a scalar value is null, use `Tree::key_is_null()/val_is_null()` and `NodeRef::key_is_null()/val_is_null()` which return true if it is empty or any of the unquoted strings `~`, `null`, `Null`, or `NULL`. ([PR#207](https://github.com/biojppm/rapidyaml/pulls/207)):
- Fix [#205](https://github.com/biojppm/rapidyaml/issues/205): fix parsing of escaped characters in double-quoted strings: `"\\\"\n\r\t\<TAB>\/\<SPC>\0\b\f\a\v\e\_\N\L\P"` ([PR#207](https://github.com/biojppm/rapidyaml/pulls/207)).
- Fix [#204](https://github.com/biojppm/rapidyaml/issues/204): add decoding of unicode codepoints `\x` `\u` `\U` in double-quoted scalars:
```c++
Tree tree = parse_in_arena(R"(["\u263A \xE2\x98\xBA \u2705 \U0001D11E"])");
assert(tree[0].val() == "☺ ☺ ✅ 𝄞");
```
This is mandated by the YAML standard and was missing from ryml ([PR#207](https://github.com/biojppm/rapidyaml/pulls/207)).
- Fix emission of nested nodes which are sequences: when these are given as the emit root, the `- ` from the parent node was added ([PR#210](https://github.com/biojppm/rapidyaml/pulls/210)):
```c++
const ryml::Tree tree = ryml::parse_in_arena(R"(
- - Rochefort 10
- Busch
- Leffe Rituel
- - and so
- many other
- wonderful beers
)");
// before (error), YAML valid but not expected
//assert(ryml::emitrs<std::string>(tree[0][3]) == R"(- - and so
// - many other
// - wonderful beers
//)");
// now: YAML valid and expected
assert(ryml::emitrs<std::string>(tree[0][3]) == R"(- and so
- many other
- wonderful beers
)");
```
- Fix parsing of isolated `!`: should be an empty val tagged with `!` (UKK06-02) ([PR#215](https://github.com/biojppm/rapidyaml/pulls/215)).
- Fix [#193](https://github.com/biojppm/rapidyaml/issues/193): amalgamated header missing `#include <stdarg.h>` which prevented compilation in bare-metal `arm-none-eabi` ([PR #195](https://github.com/biojppm/rapidyaml/pull/195), requiring also [c4core #64](https://github.com/biojppm/c4core/pull/64)).
- Accept `infinity`,`inf` and `nan` as special float values (but not mixed case: eg `InFiNiTy` or `Inf` or `NaN` are not accepted) ([PR #186](https://github.com/biojppm/rapidyaml/pull/186)).
- Accept special float values with upper or mixed case: `.Inf`, `.INF`, `.NaN`, `.NAN`. Previously, only low-case `.inf` and `.nan` were accepted ([PR #186](https://github.com/biojppm/rapidyaml/pull/186)).
- Accept `null` with upper or mixed case: `Null` or `NULL`. Previously, only low-case `null` was accepted ([PR #186](https://github.com/biojppm/rapidyaml/pull/186)).
- Fix [#182](https://github.com/biojppm/rapidyaml/issues/182): add missing export of DLL symbols, and document requirements for compiling shared library from the amalgamated header. [PR #183](https://github.com/biojppm/rapidyaml/pull/183), also [PR c4core#56](https://github.com/biojppm/c4core/pull/56) and [PR c4core#57](https://github.com/biojppm/c4core/pull/57).
- Fix [#185](https://github.com/biojppm/rapidyaml/issues/185): compilation failures in earlier Xcode versions ([PR #187](https://github.com/biojppm/rapidyaml/pull/187) and [PR c4core#61](https://github.com/biojppm/c4core/pull/61)):
- `c4/substr_fwd.hpp`: (failure in Xcode 12 and earlier) forward declaration for `std::allocator` is inside the `inline namespace __1`, unlike later versions.
- `c4/error.hpp`: (failure in debug mode in Xcode 11 and earlier) `__clang_major__` does not mean the same as in the common clang, and as a result the warning `-Wgnu-inline-cpp-without-extern` does not exist there.
- Ensure error messages do not wrap around the buffer when the YAML source line is too long ([PR#210](https://github.com/biojppm/rapidyaml/pulls/210)).
- Ensure error is emitted on unclosed flow sequence characters eg `[[[` ([PR#210](https://github.com/biojppm/rapidyaml/pulls/210)). Same thing for `[]]`.
- Refactor error message building and parser debug logging to use the new dump facilities in c4core ([PR#212](https://github.com/biojppm/rapidyaml/pulls/212)).
- Parse: fix read-after-free when duplicating a parser state node, when pushing to the stack requires a stack buffer resize ([PR#210](https://github.com/biojppm/rapidyaml/pulls/210)).
- Add support for legacy gcc 4.8 ([PR#217](https://github.com/biojppm/rapidyaml/pulls/217)).
### Improvements
- Rewrite filtering of scalars to improve parsing performance ([PR #188](https://github.com/biojppm/rapidyaml/pull/188)). Previously the scalar strings were filtered in place, which resulted in quadratic complexity in terms of scalar length. This did not matter for small scalars fitting the cache (which is the more frequent case), but grew in cost as the scalars grew larger. To achieve linearity, the code was changed so that the strings are now filtered to a temporary scratch space in the parser, and copied back to the output buffer after filtering, if any change occurred. The improvements were large for the folded scalars; the table below shows the benchmark results of throughput (MB/s) for several files containing large scalars of a single type:
| scalar type | before | after | improvement |
|:------------|-------:|-------:|---------:|
| block folded | 276 | 561 | 103% |
| block literal | 331 | 611 | 85% |
| single quoted | 247 | 267 | 8% |
| double quoted | 212 | 230 | 8% |
| plain (unquoted) | 173 | 186 | 8% |
The cost for small scalars is negligible, with benchmark improvement in the interval of -2% to 5%, so well within the margin of benchmark variability in a regular OS. In the future, this will be optimized again by copying each character in place, thus completely avoiding the staging arena.
- `Callbacks`: add `operator==()` and `operator!=()` ([PR #168](https://github.com/biojppm/rapidyaml/pull/168)).
- `Tree`: on error or assert prefer the error callback stored into the tree's current `Callbacks`, rather than the global `Callbacks` ([PR #168](https://github.com/biojppm/rapidyaml/pull/168)).
- `detail::stack<>`: improve behavior when assigning from objects `Callbacks`, test all rule-of-5 scenarios ([PR #168](https://github.com/biojppm/rapidyaml/pull/168)).
- Improve formatting of error messages.
### Thanks
- @ingydotnet
- @perlpunk
- @cschreib
- @fargies
- @Xeonacid
- @aviktorov
- @xTVaser

View File

11
3rdparty/rapidyaml/compat.cmake vendored Normal file
View File

@@ -0,0 +1,11 @@
# old gcc-4.8 support
if((CMAKE_CXX_COMPILER_ID STREQUAL "GNU") AND
(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 4.8) AND
(CMAKE_CXX_COMPILER_VERSION VERSION_LESS 5.0))
# c++17 compiler required
set(C4RYML_BUILD_BENCHMARKS OFF CACHE BOOL "" FORCE)
# LLVM required
set(C4RYML_SANITIZE OFF CACHE BOOL "" FORCE)
endif()

View File

@@ -0,0 +1,9 @@
#ifndef _C4_LOG_HPP_
#define _C4_LOG_HPP_
// FIXME - these are just dumb placeholders
#define C4_LOGF_ERR(...) fprintf(stderr, __VA_ARGS__)
#define C4_LOGF_WARN(...) fprintf(stderr, __VA_ARGS__)
#define C4_LOGP(msg, ...) printf(msg)
#endif /* _C4_LOG_HPP_ */

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,108 @@
sudo: required
dist: bionic
language: cpp
env:
global:
# cmake is installed into /usr/bin
- PATH=/usr/bin:/usr/local/bin:$PATH
# we're not using combination parameters here to ensure that the builds
# run in the order we want. (We want to perform the fastest tests first so
# failed tests appear as early as possible).
# NOTE: The compiler setting is unused. It simply makes the display on
# travis-ci.org more readable.
# WARNING: do not use the name CXX. Travis will ignore the value here.
matrix:
include:
# every entry does both 64 and 32 bit
# SAN := sanitizers
# VG := valgrind
# coverage: in bionic, lcov is incompatible with g++8 and later
- env: CXX_=g++-7 BT=Coverage STD=11
- env: CXX_=g++-7 BT=Coverage STD=14
- env: CXX_=g++-7 BT=Coverage STD=17
- env: CXX_=g++-10 BT=Debug STD=11 VG=ON
- env: CXX_=g++-10 BT=Release STD=11 VG=ON
- env: CXX_=g++-10 BT=Debug STD=14 VG=ON
- env: CXX_=g++-10 BT=Release STD=14 VG=ON
- env: CXX_=g++-10 BT=Debug STD=17 VG=ON
- env: CXX_=g++-10 BT=Release STD=17 VG=ON
- env: CXX_=g++-10 BT=Debug STD=20 VG=ON
- env: CXX_=g++-10 BT=Release STD=20 VG=ON
- env: CXX_=clang++-10 BT=Debug STD=11 SAN=ALL VG=ON
- env: CXX_=clang++-10 BT=Release STD=11 SAN=ALL VG=ON
- env: CXX_=clang++-10 BT=Debug STD=14 SAN=ALL VG=ON
- env: CXX_=clang++-10 BT=Release STD=14 SAN=ALL VG=ON
- env: CXX_=clang++-10 BT=Debug STD=17 SAN=ALL VG=ON
- env: CXX_=clang++-10 BT=Release STD=17 SAN=ALL VG=ON
- env: CXX_=clang++-10 BT=Debug STD=20 SAN=ALL VG=ON
- env: CXX_=clang++-10 BT=Release STD=20 SAN=ALL VG=ON
- env: CXX_=g++-9 BT=Debug
- env: CXX_=g++-9 BT=Release
- env: CXX_=clang++-9 BT=Debug
- env: CXX_=clang++-9 BT=Release
- env: CXX_=g++-8 BT=Debug
- env: CXX_=g++-8 BT=Release
- env: CXX_=clang++-8 BT=Debug
- env: CXX_=clang++-8 BT=Release
- env: CXX_=g++-7 BT=Debug
- env: CXX_=g++-7 BT=Release
- env: CXX_=clang++-7 BT=Debug
- env: CXX_=clang++-7 BT=Release
- env: CXX_=g++-6 BT=Debug
- env: CXX_=g++-6 BT=Release
- env: CXX_=clang++-6.0 BT=Debug
- env: CXX_=clang++-6.0 BT=Release
- env: CXX_=g++-5 BT=Debug
- env: CXX_=g++-5 BT=Release
- env: CXX_=clang++-5.0 BT=Debug
- env: CXX_=clang++-5.0 BT=Release
# gcc 4.9 is not available in 18.04 -- https://stackoverflow.com/questions/48398475/
#- env: CXX_=g++-4.9 BT=Debug
#- env: CXX_=g++-4.9 BT=Release
- env: CXX_=clang++-4.0 BT=Debug
- env: CXX_=clang++-4.0 BT=Release
- env: CXX_=clang++-3.9 BT=Debug
- env: CXX_=clang++-3.9 BT=Release
# ----------- clang-tidy
#
- env: CXX_=clang++-9 BT=Debug LINT=clang-tidy
- env: CXX_=clang++-9 BT=Release LINT=clang-tidy
install:
- bash -x .ci/travis-install.sh
script:
- source .ci/travis-setenv.sh
- c4core_cfg_test 64 dynamic
- c4core_run_test 64 dynamic
- c4core_cfg_test 64 static
- c4core_run_test 64 static
- c4core_cfg_test 32 static
- c4core_run_test 32 static
- echo "Success!"
after_success:
- source .ci/travis-setenv.sh
# coveralls only accepts one submission per job
#- c4core_submit_coverage 32 static coveralls
- c4core_submit_coverage 64 static coveralls
- c4core_submit_coverage 32 static codecov
- c4core_submit_coverage 64 static codecov

View File

@@ -0,0 +1,82 @@
version: '{build}'
image: Visual Studio 2019
environment:
matrix:
- {GEN: Visual Studio 16 2019, ARCH: -A x64, CFG: Debug, compiler: msvc-16-seh}
- {GEN: Visual Studio 16 2019, ARCH: -A Win32, CFG: Debug, compiler: msvc-16-seh}
- {GEN: Visual Studio 16 2019, ARCH: -A x64, CFG: Release, compiler: msvc-16-seh}
- {GEN: Visual Studio 16 2019, ARCH: -A Win32, CFG: Release, compiler: msvc-16-seh}
- {GEN: Visual Studio 16 2019, ARCH: -A x64, STD: -D C4_CXX_STANDARD=20, CFG: Debug, compiler: msvc-16-seh}
- {GEN: Visual Studio 16 2019, ARCH: -A x64, STD: -D C4_CXX_STANDARD=17, CFG: Debug, compiler: msvc-16-seh}
- {GEN: Visual Studio 16 2019, ARCH: -A x64, STD: -D C4_CXX_STANDARD=14, CFG: Debug, compiler: msvc-16-seh}
- {GEN: Visual Studio 15 2017 Win64, CFG: Debug, APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017, compiler: msvc-15-seh}
- {GEN: Visual Studio 15 2017, CFG: Debug, APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017, compiler: msvc-15-seh}
- {GEN: Visual Studio 15 2017 Win64, CFG: Release, APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017, compiler: msvc-15-seh}
- {GEN: Visual Studio 15 2017, CFG: Release, APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017, compiler: msvc-15-seh}
#- compiler: gcc-5.3.0-posix
# GEN: "MinGW Makefiles"
# cxx_path: 'C:\mingw-w64\i686-5.3.0-posix-dwarf-rt_v4-rev0\mingw32\bin'
# APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
# CFG: Quicktest
# externconfig: Debug
matrix:
fast_finish: true
install:
- git submodule update --init --recursive
# git bash conflicts with MinGW makefiles
- set "PATH=%PATH:C:\Program Files\Git\usr\bin;=%"
- if not "%cxx_path%"=="" (set "PATH=%PATH%;%cxx_path%")
- cmake --version
build_script:
- echo %GEN%
- echo %ARCH%
- echo %CFG%
- echo %STD%
- set NUM_JOBS=3
- set PROJ_DIR=%cd%
- set BUILD_DIR=%PROJ_DIR%\build
- set INSTALL_DIR=%PROJ_DIR%\install
- set C4_EXTERN_DIR=%BUILD_DIR%\extern
- md %BUILD_DIR%
- md %BUILD_DIR%\static %BUILD_DIR%\shared %BUILD_DIR%\extern
- cmake -S %PROJ_DIR% -B %BUILD_DIR%\static %STD% -DC4CORE_DEV=ON -G "%GEN%" %ARCH% "-DCMAKE_BUILD_TYPE=%CFG%" "-DCMAKE_INSTALL_PREFIX=%INSTALL_DIR%\static" -DBUILD_SHARED_LIBS=OFF
- cmake -S %PROJ_DIR% -B %BUILD_DIR%\shared %STD% -DC4CORE_DEV=ON -G "%GEN%" %ARCH% "-DCMAKE_BUILD_TYPE=%CFG%" "-DCMAKE_INSTALL_PREFIX=%INSTALL_DIR%\shared" -DBUILD_SHARED_LIBS=ON
- cmake --build %BUILD_DIR%\static --config %configuration% --target test-build --parallel %NUM_JOBS%
- cmake --build %BUILD_DIR%\shared --config %configuration% --target test-build --parallel %NUM_JOBS%
test_script:
- cmake --build %BUILD_DIR%\static --config %configuration% --target test
- cmake --build %BUILD_DIR%\shared --config %configuration% --target test
#artifacts:
# - path: '_build/CMakeFiles/*.log'
# name: logs
# - path: '_build/Testing/**/*.xml'
# name: test_results
skip_commits:
files:
- .gitignore
- .travis*
- .ci/travis*
- .ci/dev_*
- .ci/show_*
- .ci/vagrant*
- .ci/Vagrant*
- bm/html/*
- doc/*
- img/*
- CHANGELOG.md
- CONTRIBUTING.md
- LICENSE.txt
- README.*
- ROADMAP.*

View File

@@ -0,0 +1,129 @@
#!/bin/bash
# useful to iterate when fixing the release:
# ver=0.2.1 ; ( set -x ; git tag -d v$ver ; git push origin :v$ver ) ; (set -x ; set -e ; tbump --only-patch --non-interactive $ver ; git add -u ; git commit --amend --no-edit ; git tag --annotate --message "v$ver" "v$ver" ; git push -f --tags origin )
function c4_release_create()
{
( \
set -euxo pipefail ; \
ver=$(_c4_validate_ver $1) ; \
branch=$(_c4_validate_branch) ; \
c4_release_bump $ver ; \
c4_release_commit $ver $branch \
)
}
function c4_release_redo()
{
( \
set -euxo pipefail ; \
ver=$(_c4_validate_ver $1) ; \
branch=$(_c4_validate_branch) ; \
c4_release_delete $ver ; \
c4_release_bump $ver ; \
c4_release_amend $ver $branch \
)
}
function c4_release_bump()
{
( \
set -euxo pipefail ; \
ver=$(_c4_validate_ver $1) ; \
tbump --non-interactive --only-patch $ver \
)
}
function c4_release_commit()
{
( \
set -euxo pipefail ; \
ver=$(_c4_validate_ver $1) ; \
branch=$(_c4_validate_branch) ; \
tag=v$ver ; \
git add -u ; \
git commit -m $tag ; \
git tag --annotate --message $tag $tag ; \
)
}
function c4_release_amend()
{
( \
set -euxo pipefail ; \
ver=$(_c4_validate_ver $1) ; \
branch=$(_c4_validate_branch) ; \
tag=v$ver ; \
git add -u ; \
git commit --amend -m $tag ; \
git tag --annotate --message $tag $tag ; \
)
}
function c4_release_delete()
{
( \
set -euxo pipefail ; \
ver=$(_c4_validate_ver $1) ; \
git tag -d v$ver ; \
git push origin :v$ver \
)
}
function c4_release_push()
{
( \
set -euxo pipefail ; \
ver=$(_c4_validate_ver $1) ; \
branch=$(_c4_validate_branch) ; \
tag=v$ver ; \
git push origin $branch ; \
git push --tags origin $tag \
)
}
function c4_release_force_push()
{
( \
set -euxo pipefail ; \
ver=$(_c4_validate_ver $1) ; \
branch=$(_c4_validate_branch) ; \
tag=v$ver ; \
git push -f origin $branch ; \
git push -f --tags origin $tag \
)
}
function _c4_validate_ver()
{
ver=$1
if [ -z "$ver" ] ; then \
exit 1
fi
ver=$(echo $ver | sed "s:v\(.*\):\1:")
#sver=$(echo $ver | sed "s:\([0-9]*\.[0-9]*\..[0-9]*\).*:\1:")
if [ ! -f changelog/$ver.md ] ; then \
if [ -f changelog/current.md ] ; then
git mv changelog/current.md changelog/$ver.md
touch changelog/current.md
git add changelog/current.md
else
echo "ERROR: could not find changelog/$ver.md or changelog/current.md"
exit 1
fi
fi
echo $ver
}
function _c4_validate_branch()
{
branch=$(git rev-parse --abbrev-ref HEAD)
if [ "$branch" != "master" ] ; then
echo "ERROR: release branch must be master"
exit 1
fi
echo $branch
}

View File

@@ -0,0 +1,306 @@
#!/usr/bin/env bash
set -x
# input environment variables:
# OS: the operating system
# CXX_: the compiler version. eg, g++-9 or clang++-6.0
# BT: the build type
# VG: whether to install valgrind
# ARM: whether to arm cross-compiler and emulator
# GITHUB_WORKFLOW: when run from github
# API: whether to install swig
# CMANY: whether to install cmany
#-------------------------------------------------------------------------------
function c4_install_test_requirements()
{
os=$1
case "$os" in
ubuntu*)
c4_install_test_requirements_ubuntu
return 0
;;
macos*)
c4_install_test_requirements_macos
return 0
;;
win*)
c4_install_test_requirements_windows
return 0
;;
*)
return 0
;;
esac
}
function c4_install_test_requirements_windows()
{
if [ "$CMANY" == "ON" ] ; then
pip install cmany
fi
if [ "$API" == "ON" ] ; then
choco install swig
which swig
fi
# ensure chocolatey does not override cmake's cpack
which cpack
choco_cpack="/c/ProgramData/Chocolatey/bin/cpack.exe"
if [ -f $choco_cpack ] ; then
newname=$(echo $choco_cpack | sed 's:cpack:choco-cpack:')
mv -vf $choco_cpack $newname
fi
which cpack
}
function c4_install_test_requirements_macos()
{
if [ "$CMANY" == "ON" ] ; then
sudo pip3 install cmany
fi
}
function c4_install_test_requirements_ubuntu()
{
APT_PKG="" # all
PIP_PKG=""
c4_gather_test_requirements_ubuntu
echo "apt packages: $APT_PKG"
echo "pip packages: $PIP_PKG"
c4_install_test_requirements_ubuntu_impl
echo 'INSTALL COMPLETE!'
}
function c4_install_all_possible_requirements_ubuntu()
{
export CXX_=all
export BT=Coverage
APT_PKG="" # all
PIP_PKG=""
sudo dpkg --add-architecture i386
c4_gather_test_requirements_ubuntu
_c4_add_arm_compilers
echo "apt packages: $APT_PKG"
echo "pip packages: $PIP_PKG"
c4_install_test_requirements_ubuntu_impl
echo 'INSTALL COMPLETE!'
}
function c4_gather_test_requirements_ubuntu()
{
if [ "$GITHUB_WORKFLOW" != "" ] ; then
sudo dpkg --add-architecture i386
else
_add_apt build-essential
_add_apt cmake
fi
_add_apt linux-libc-dev:i386
_add_apt libc6:i386
_add_apt libc6-dev:i386
_add_apt libc6-dbg:i386
_c4_addlibcxx
_c4_gather_compilers "$CXX_"
_add_apt python3-setuptools
_add_apt python3-pip
#_add_apt iwyu
#_add_apt cppcheck
#_add_pip cpplint
# oclint?
if [ "$VG" == "ON" ] ; then
_add_apt valgrind
fi
if [ "$BT" == "Coverage" ]; then
_add_apt lcov
_add_apt libffi-dev
_add_apt libssl-dev
_add_pip requests[security]
_add_pip pyopenssl
_add_pip ndg-httpsclient
_add_pip pyasn1
_add_pip cpp-coveralls
fi
if [ "$CMANY" != "" ] ; then
_add_pip cmany
fi
case "$CXX_" in
arm*)
_c4_add_arm_compilers
;;
esac
}
function c4_install_test_requirements_ubuntu_impl()
{
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key 2>/dev/null | sudo apt-key add -
wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | sudo apt-key add -
sudo -E apt-add-repository --yes 'deb https://apt.kitware.com/ubuntu/ bionic main'
sudo -E add-apt-repository --yes ppa:ubuntu-toolchain-r/test
if [ "$APT_PKG" != "" ] ; then
#sudo -E apt-get clean
sudo -E apt-get update
sudo -E apt-get install -y --force-yes $APT_PKG
fi
if [ "$PIP_PKG" != "" ]; then
sudo pip3 install $PIP_PKG
fi
}
#-------------------------------------------------------------------------------
function _c4_add_arm_compilers()
{
# this is going to be deprecated:
# https://askubuntu.com/questions/1243252/how-to-install-arm-none-eabi-gdb-on-ubuntu-20-04-lts-focal-fossa
sudo -E add-apt-repository --yes ppa:team-gcc-arm-embedded/ppa
_add_apt gcc-arm-embedded
_add_apt g++-arm-linux-gnueabihf
_add_apt g++-multilib-arm-linux-gnueabihf
_add_apt qemu
}
function _c4_gather_compilers()
{
cxx=$1
case $cxx in
g++-11 ) _c4_addgcc 11 ;;
g++-10 ) _c4_addgcc 10 ;;
g++-9 ) _c4_addgcc 9 ;;
g++-8 ) _c4_addgcc 8 ;;
g++-7 ) _c4_addgcc 7 ;;
g++-6 ) _c4_addgcc 6 ;;
g++-5 ) _c4_addgcc 5 ;;
#g++-4.9 ) _c4_addgcc 4.9 ;; # https://askubuntu.com/questions/1036108/install-gcc-4-9-at-ubuntu-18-04
g++-4.8 ) _c4_addgcc 4.8 ;;
clang++-12 ) _c4_addclang 12 ;;
clang++-11 ) _c4_addclang 11 ;;
clang++-10 ) _c4_addclang 10 ;;
clang++-9 ) _c4_addclang 9 ;;
clang++-8 ) _c4_addclang 8 ;;
clang++-7 ) _c4_addclang 7 ;;
clang++-6.0) _c4_addclang 6.0 ;;
clang++-5.0) _c4_addclang 5.0 ;;
clang++-4.0) _c4_addclang 4.0 ;;
clang++-3.9) _c4_addclang 3.9 ;;
all)
all="g++-11 g++-10 g++-9 g++-8 g++-7 g++-6 g++-5 clang++-12 clang++-11 clang++-10 clang++-9 clang++-8 clang++-7 clang++-6.0 clang++-5.0 clang++-4.0 clang++-3.9"
echo "installing all compilers: $all"
for cxx in $all ; do
_c4_gather_compilers $cxx
done
;;
"")
# use default compiler
;;
arm*)
;;
*)
echo "unknown compiler: $cxx"
exit 1
;;
esac
}
# add a gcc compiler
function _c4_addgcc()
{
gccversion=$1
case $gccversion in
5 )
_add_apt gcc-5 "deb http://dk.archive.ubuntu.com/ubuntu/ xenial main"
_add_apt gcc-5 "deb http://dk.archive.ubuntu.com/ubuntu/ xenial universe"
;;
*)
;;
esac
_add_apt g++-$gccversion
_add_apt g++-$gccversion-multilib
_add_apt libstdc++-$gccversion-dev
_add_apt lib32stdc++-$gccversion-dev
}
# add a clang compiler
function _c4_addclang()
{
clversion=$1
case $clversion in
# in 18.04, clang9 and later require PPAs
9 | 10 | 11 | 12 )
_add_apt clang-$clversion "deb http://apt.llvm.org/bionic/ llvm-toolchain-bionic-$clversion main"
# libstdc++ is required
_c4_addgcc 11
_c4_addgcc 10
_c4_addgcc 9
;;
*)
_add_apt clang-$clversion
;;
esac
_add_apt g++-multilib # this is required for 32 bit https://askubuntu.com/questions/1057341/unable-to-find-stl-headers-in-ubuntu-18-04
_add_apt clang-tidy-$clversion
}
# add libc++
function _c4_addlibcxx()
{
_add_apt libc++1
_add_apt libc++abi-dev
_add_apt libc++-dev
_add_apt libc++1:i386
_add_apt libc++abi-dev:i386
_add_apt libc++-dev:i386
}
#-------------------------------------------------------------------------------
# add a pip package to the list
function _add_pip()
{
pkgs=$*
PIP_PKG="$PIP_PKG $pkgs"
echo "adding to pip packages: $pkgs"
}
# add a debian package to the list
function _add_apt()
{
pkgs=$1
sourceslist=$2
APT_PKG="$APT_PKG $pkgs"
echo "adding to apt packages: $pkgs"
_add_src "$sourceslist" "# for packages: $pkgs"
}
# add an apt source
function _add_src()
{
sourceslist=$1
comment=$2
if [ ! -z "$sourceslist" ] ; then
echo "adding apt source: $sourceslist"
sudo bash -c "cat >> /etc/apt/sources.list <<EOF
$comment
$sourceslist
EOF"
#cat /etc/apt/sources.list
fi
}

View File

@@ -0,0 +1,410 @@
#!/usr/bin/env bash
set -e
set -x
PROJ_DIR=$(pwd)
function c4_show_info()
{
set +x
env | sort
echo "PROJ_DIR=$PROJ_DIR"
echo "PROJ_PFX_TARGET=$PROJ_PFX_TARGET"
echo "PROJ_PFX_CMAKE=$PROJ_PFX_CMAKE"
echo "CMAKE_FLAGS=$CMAKE_FLAGS"
echo "NUM_JOBS_BUILD=$NUM_JOBS_BUILD"
echo "GITHUB_WORKSPACE=$GITHUB_WORKSPACE"
pwd
ls -lFhp
echo "BITLINKS=$BITLINKS"
for bl in shared64 static64 shared32 static32 ; do
if _c4skipbitlink $bl ; then
echo "skip $bl"
else
echo "exec $bl"
fi
done
echo "CXX_=$CXX_"
echo "BT=$BT"
echo "LINT=$LINT"
echo "SAN=$SAN"
echo "SAN_ONLY=$SAN"
echo "VG=$VG"
echo "BM=$BM"
echo "STD=$STD"
echo "ARM=$ARM"
echo "LIBCXX=$LIBCXX"
echo "VERBOSE_MAKEFILES=$VERBOSE_MAKEFILES"
which cmake
cmake --version
case "$CXX_" in
xcode)
# https://gist.github.com/nlutsenko/ee245fbd239087d22137
echo "number of cores=$(sysctl -n hw.ncpu)"
#defaults read com.apple.dt.xcodebuild | grep -i Number | grep -i Build
#defaults read com.apple.dt.Xcode | grep -i Number | grep -i Tasks
;;
gcc*|g++*|*clang*)
echo "number of cores=$(nproc)"
$CXX_ --version
;;
esac
set -x
git branch
git rev-parse HEAD
git tag || echo
git log -1 --format='%H'
}
function _c4bits()
{
case "$1" in
shared64|static64|arm64) echo 64 ;;
shared32|static32|arm32|arm) echo 32 ;;
*) exit 1 ;;
esac
}
function _c4linktype()
{
case "$1" in
shared64|shared32) echo shared ;;
static64|static32) echo static ;;
*) exit 1 ;;
esac
}
function _c4skipbitlink()
{
bitlink___=$1
if [ -z "$BITLINKS" ] ; then
return 1 # return nonzero as failure, meaning DO NOT SKIP
fi
for bl___ in $BITLINKS ; do
if [ "${bl___}" == "${bitlink___}" ] ; then
return 1 # return nonzero as failure, meaning DO NOT SKIP
fi
done
return 0 # return nonzero as success, meaning DO SKIP
}
function c4_build_test()
{
c4_build_target $* test-build
}
function c4_run_test()
{
c4_run_target $* test
}
function c4_build_target() # runs in parallel
{
if _c4skipbitlink "$1" ; then return 0 ; fi
id=$1
target=$2
if [ ! -z "$target" ] ; then
target="--target $target"
fi
build_dir=`pwd`/build/$id
export CTEST_OUTPUT_ON_FAILURE=1
# watchout: the `--parallel` flag to `cmake --build` is broken:
# https://discourse.cmake.org/t/parallel-does-not-really-enable-parallel-compiles-with-msbuild/964/10
# https://gitlab.kitware.com/cmake/cmake/-/issues/20564
cmake --build $build_dir --config $BT $target -- $(_c4_generator_build_flags) $(_c4_parallel_build_flags)
}
function c4_run_target() # does not run in parallel
{
if _c4skipbitlink "$1" ; then return 0 ; fi
id=$1
target=$2
build_dir=`pwd`/build/$id
export CTEST_OUTPUT_ON_FAILURE=1
cmake --build $build_dir --config $BT --target $target -- $(_c4_generator_build_flags)
}
function c4_package()
{
if _c4skipbitlink "$1" ; then return 0 ; fi
id=$1
generator=$2
build_dir=`pwd`/build/$id
if [ -z "$generator" ] ; then
c4_run_target $id package
else
( cd $build_dir ; cpack -G $generator )
fi
}
function c4_submit_coverage()
{
if [ "$BT" != "Coverage" ] ; then
echo "build type is \"$BT\": no coverage to submit"
return 0
fi
if _c4skipbitlink "$1" ; then return 0 ; fi
id=$1
coverage_service=$2
build_dir=`pwd`/build/$id
echo "Submitting coverage data: $build_dir --> $coverage_service"
cmake --build $build_dir --config $BT --target ${PROJ_PFX_TARGET}coverage-submit-$coverage_service
}
# WIP
function c4_run_static_analysis()
{
if _c4skipbitlink "$1" ; then return 0 ; fi
id=$1
linktype=$(_c4linktype $id)
build_dir=`pwd`/build/$id
# https://blog.kitware.com/static-checks-with-cmake-cdash-iwyu-clang-tidy-lwyu-cpplint-and-cppcheck/
pushd $PROJ_DIR
}
function c4_cfg_test()
{
if _c4skipbitlink "$1" ; then return 0 ; fi
id=$1
#
build_dir=`pwd`/build/$id
install_dir=`pwd`/install/$id
mkdir -p $build_dir
mkdir -p $install_dir
#
if [ "$TOOLCHAIN" != "" ] ; then
toolchain_file=`pwd`/$TOOLCHAIN
if [ ! -f "$toolchain_file" ] ; then
echo "ERROR: toolchain not found: $toolchain_file"
exit 1
fi
_addcmkflags -DCMAKE_TOOLCHAIN_FILE=$toolchain_file
else
bits=$(_c4bits $id)
linktype=$(_c4linktype $id)
case "$linktype" in
static) _addcmkflags -DBUILD_SHARED_LIBS=OFF ;;
shared) _addcmkflags -DBUILD_SHARED_LIBS=ON ;;
*)
echo "ERROR: unknown linktype: $linktype"
exit 1
;;
esac
fi
if [ "$STD" != "" ] ; then
_addcmkflags -DC4_CXX_STANDARD=$STD
_addprojflags CXX_STANDARD=$STD
fi
if [ "$LIBCXX" != "" ] ; then
_addprojflags USE_LIBCXX=$LIBCXX
fi
#
if [ "$DEV" != "OFF" ] ; then
_addprojflags DEV=ON
fi
case "$LINT" in
all ) _addprojflags LINT=ON LINT_TESTS=ON LINT_CLANG_TIDY=ON LINT_PVS_STUDIO=ON ;;
clang-tidy) _addprojflags LINT=ON LINT_TESTS=ON LINT_CLANG_TIDY=ON LINT_PVS_STUDIO=OFF ;;
pvs-studio) _addprojflags LINT=ON LINT_TESTS=ON LINT_CLANG_TIDY=OFF LINT_PVS_STUDIO=ON ;;
* ) _addprojflags LINT=OFF ;;
esac
case "$SAN" in
ALL) _addprojflags SANITIZE=ON ;;
A ) _addprojflags SANITIZE=ON ASAN=ON TSAN=OFF MSAN=OFF UBSAN=OFF ;;
T ) _addprojflags SANITIZE=ON ASAN=OFF TSAN=ON MSAN=OFF UBSAN=OFF ;;
M ) _addprojflags SANITIZE=ON ASAN=OFF TSAN=OFF MSAN=ON UBSAN=OFF ;;
UB ) _addprojflags SANITIZE=ON ASAN=OFF TSAN=OFF MSAN=OFF UBSAN=ON ;;
* ) _addprojflags SANITIZE=OFF ;;
esac
case "$SAN_ONLY" in
ON) _addprojflags SANITIZE_ONLY=ON ;;
* ) _addprojflags SANITIZE_ONLY=OFF ;;
esac
case "$VG" in
ON) _addprojflags VALGRIND=ON VALGRIND_SGCHECK=OFF ;; # FIXME SGCHECK should be ON
* ) _addprojflags VALGRIND=OFF VALGRIND_SGCHECK=OFF ;;
esac
case "$BM" in
ON) _addprojflags BUILD_BENCHMARKS=ON ;;
* ) _addprojflags BUILD_BENCHMARKS=OFF ;;
esac
if [ "$BT" == "Coverage" ] ; then
# the coverage repo tokens can be set in the travis environment:
# export CODECOV_TOKEN=.......
# export COVERALLS_REPO_TOKEN=.......
_addprojflags COVERAGE_CODECOV=ON COVERAGE_CODECOV_SILENT=ON
_addprojflags COVERAGE_COVERALLS=ON COVERAGE_COVERALLS_SILENT=ON
fi
if [ ! -z "$VERBOSE_MAKEFILES" ] ; then
_addcmkflags -DCMAKE_VERBOSE_MAKEFILES=$VERBOSE_MAKEFILES
fi
_addcmkflags -DCMAKE_EXPORT_COMPILE_COMMANDS=ON
if [ ! -z "$CMAKE_FLAGS" ] ; then
_addcmkflags $CMAKE_FLAGS
fi
echo "building with additional cmake flags: $CMFLAGS"
export C4_EXTERN_DIR=`pwd`/build/extern
mkdir -p $C4_EXTERN_DIR
cmake --version
pwd
#
# bash quote handling is a fiasco, and I could not find a way of storing
# quoted strings in variables and then expand the variables with correct quotes
# so we have to do this precious jewell of chicanery:
case "$CXX_" in
vs2019)
g='Visual Studio 16 2019'
case "$bits" in
64) a=x64 ;;
32) a=Win32 ;;
esac
cmake -S $PROJ_DIR -B $build_dir -DCMAKE_INSTALL_PREFIX="$install_dir" \
-DCMAKE_BUILD_TYPE=$BT -G "$g" -A $a $CMFLAGS
;;
vs2017)
case "$bits" in
64) g="Visual Studio 15 2017 Win64" ;;
32) g="Visual Studio 15 2017" ;;
esac
cmake -S $PROJ_DIR -B $build_dir -DCMAKE_INSTALL_PREFIX="$install_dir" \
-DCMAKE_BUILD_TYPE=$BT -G "$g" $CMFLAGS
;;
xcode)
g=Xcode
case "$bits" in
64) a="x86_64" ;;
32) a="i386"
exit 1 # i386 is deprecated in xcode
;;
esac
cmake -S $PROJ_DIR -B $build_dir -DCMAKE_INSTALL_PREFIX="$install_dir" \
-DCMAKE_BUILD_TYPE=$BT -G "$g" -DCMAKE_OSX_ARCHITECTURES=$a $CMFLAGS
;;
arm*|"") # make sure arm* comes before *g++ or *gcc*
cmake -S $PROJ_DIR -B $build_dir -DCMAKE_INSTALL_PREFIX="$install_dir" \
-DCMAKE_BUILD_TYPE=$BT $CMFLAGS
;;
*g++*|*gcc*|*clang*)
export CC_=$(echo "$CXX_" | sed 's:clang++:clang:g' | sed 's:g++:gcc:g')
_c4_choose_clang_tidy $CXX_
cmake -S $PROJ_DIR -B $build_dir -DCMAKE_INSTALL_PREFIX="$install_dir" \
-DCMAKE_BUILD_TYPE=$BT $CMFLAGS \
-DCMAKE_C_COMPILER=$CC_ -DCMAKE_CXX_COMPILER=$CXX_ \
-DCMAKE_C_FLAGS="-std=c99 -m$bits" -DCMAKE_CXX_FLAGS="-m$bits"
cmake --build $build_dir --target help | sed 1d | sort
;;
em++)
emcmake cmake -S $PROJ_DIR -B $build_dir -DCMAKE_INSTALL_PREFIX="$install_dir" \
-DCMAKE_BUILD_TYPE=$BT $CMFLAGS -DCMAKE_CXX_FLAGS="-s DISABLE_EXCEPTION_CATCHING=0"
;;
*)
echo "unknown compiler"
exit 1
;;
esac
}
function _c4_choose_clang_tidy()
{
cxx=$1
# only for clang compilers.
case $cxx in
clang*)
# try with version first
clang_tidy_ver=$(echo $cxx | sed "s:++:-tidy:")
clang_tidy=$(echo $cxx | sed "s:++.*:-tidy:")
for n in $clang_tidy_ver $clang_tidy ; do
exe=$(which $n)
echo "searching for $n: $exe"
if [ -z "$exe" ] ; then
echo "could not find $clang_tidy"
else
_addcmkflags "-DCLANG_TIDY=$exe"
return 0
fi
done
echo "error: could not find clang-tidy for $cxx"
exit 1
;;
esac
}
# add cmake flags without project prefix
function _addcmkflags()
{
for f in $* ; do
CMFLAGS="$CMFLAGS ${f}"
done
}
# add cmake flags with project prefix
function _addprojflags()
{
for f in $* ; do
CMFLAGS="$CMFLAGS -D${PROJ_PFX_CMAKE}${f}"
done
}
function _c4_parallel_build_flags()
{
case "$CXX_" in
vs2019|vs2017|vs2015)
# https://docs.microsoft.com/en-us/visualstudio/msbuild/msbuild-command-line-reference?view=vs-2019
# https://stackoverflow.com/questions/2619198/how-to-get-number-of-cores-in-win32
if [ -z "$NUM_JOBS_BUILD" ] ; then
echo "/maxcpucount:$NUMBER_OF_PROCESSORS"
else
echo "/maxcpucount:$NUM_JOBS_BUILD"
fi
;;
xcode)
# https://stackoverflow.com/questions/5417835/how-to-modify-the-number-of-parallel-compilation-with-xcode
# https://gist.github.com/nlutsenko/ee245fbd239087d22137
if [ -z "$NUM_JOBS_BUILD" ] ; then
echo "-IDEBuildOperationMaxNumberOfConcurrentCompileTasks=$(sysctl -n hw.ncpu)"
else
echo "-IDEBuildOperationMaxNumberOfConcurrentCompileTasks=$NUM_JOBS_BUILD"
fi
;;
*g++*|*gcc*|*clang*|em++)
if [ -z "$NUM_JOBS_BUILD" ] ; then
echo "-j $(nproc)"
else
echo "-j $NUM_JOBS_BUILD"
fi
;;
"") # allow empty compiler
;;
*)
echo "unknown compiler"
exit 1
;;
esac
}
function _c4_generator_build_flags()
{
case "$CXX_" in
vs2019|vs2017|vs2015)
;;
xcode)
# WTF???
# https://github.com/biojppm/rapidyaml/pull/97/checks?check_run_id=1504677928#step:7:964
# https://stackoverflow.com/questions/51153525/xcode-10-unable-to-attach-db-error
echo "-UseModernBuildSystem=NO"
;;
*g++*|*gcc*|*clang*|em++)
;;
"") # allow empty compiler
;;
*)
echo "unknown compiler"
exit 1
;;
esac
}

View File

@@ -0,0 +1,80 @@
# -*- mode: ruby -*-
# vi: set ft=ruby :
# 1) download and install vagrant: https://www.vagrantup.com/downloads.html
# (do not install ubuntu's 14.04 16.04 version, see https://stackoverflow.com/questions/22717428/vagrant-error-failed-to-mount-folders-in-linux-guest ):
# 2) vagrant plugin install vagrant-vbguest
# 3) vagrant up --provider virtualbox
# 4) vagrant ssh
# All Vagrant configuration is done below. The "2" in Vagrant.configure
# configures the configuration version (we support older styles for
# backwards compatibility). Please don't change it unless you know what
# you're doing.
Vagrant.configure(2) do |config|
# The most common configuration options are documented and commented below.
# For a complete reference, please see the online documentation at
# https://docs.vagrantup.com.
# Every Vagrant development environment requires a box. You can search for
# boxes at https://atlas.hashicorp.com/search.
config.vm.box = "generic/ubuntu1804"
# Disable automatic box update checking. If you disable this, then
# boxes will only be checked for updates when the user runs
# `vagrant box outdated`. This is not recommended.
# config.vm.box_check_update = false
# Create a forwarded port mapping which allows access to a specific port
# within the machine from a port on the host machine. In the example below,
# accessing "localhost:8080" will access port 80 on the guest machine.
# config.vm.network "forwarded_port", guest: 80, host: 8080
#config.ssh.username = 'travis'
#config.ssh.password = 'travis'
# Create a private network, which allows host-only access to the machine
# using a specific IP.
# config.vm.network "private_network", ip: "192.168.33.10"
# Create a public network, which generally matched to bridged network.
# Bridged networks make the machine appear as another physical device on
# your network.
# config.vm.network "public_network"
# Share an additional folder to the guest VM. The first argument is
# the path on the host to the actual folder. The second argument is
# the path on the guest to mount the folder. And the optional third
# argument is a set of non-required options.
config.vm.synced_folder "../../../..", "/vagrant"
#config.vm.synced_folder '.', '/vagrant', disabled: true
# Provider-specific configuration so you can fine-tune various
# backing providers for Vagrant. These expose provider-specific options.
# Example for VirtualBox:
#
# config.vm.provider "virtualbox" do |vb|
# # Display the VirtualBox GUI when booting the machine
# vb.gui = true
#
# # Customize the amount of memory on the VM:
# vb.memory = "1024"
# end
#
# View the documentation for the provider you are using for more
# information on available options.
# Define a Vagrant Push strategy for pushing to Atlas. Other push strategies
# such as FTP and Heroku are also available. See the documentation at
# https://docs.vagrantup.com/v2/push/atlas.html for more information.
# config.push.define "atlas" do |push|
# push.app = "YOUR_ATLAS_USERNAME/YOUR_APPLICATION_NAME"
# end
# Enable provisioning with a shell script. Additional provisioners such as
# Puppet, Chef, Ansible, Salt, and Docker are also available. Please see the
# documentation for more information about their specific syntax and use.
#config.vm.provision "shell", path: "travis-install.sh"
end

View File

@@ -0,0 +1,71 @@
# -*- mode: ruby -*-
# vi: set ft=ruby :
# All Vagrant configuration is done below. The "2" in Vagrant.configure
# configures the configuration version (we support older styles for
# backwards compatibility). Please don't change it unless you know what
# you're doing.
Vagrant.configure("2") do |config|
# The most common configuration options are documented and commented below.
# For a complete reference, please see the online documentation at
# https://docs.vagrantup.com.
# Every Vagrant development environment requires a box. You can search for
# boxes at https://vagrantcloud.com/search.
config.vm.box = "ramsey/macos-catalina"
config.vm.box_version = "1.0.0"
# Disable automatic box update checking. If you disable this, then
# boxes will only be checked for updates when the user runs
# `vagrant box outdated`. This is not recommended.
# config.vm.box_check_update = false
# Create a forwarded port mapping which allows access to a specific port
# within the machine from a port on the host machine. In the example below,
# accessing "localhost:8080" will access port 80 on the guest machine.
# NOTE: This will enable public access to the opened port
# config.vm.network "forwarded_port", guest: 80, host: 8080
# Create a forwarded port mapping which allows access to a specific port
# within the machine from a port on the host machine and only allow access
# via 127.0.0.1 to disable public access
# config.vm.network "forwarded_port", guest: 80, host: 8080, host_ip: "127.0.0.1"
# Create a private network, which allows host-only access to the machine
# using a specific IP.
# config.vm.network "private_network", ip: "192.168.33.10"
# Create a public network, which generally matched to bridged network.
# Bridged networks make the machine appear as another physical device on
# your network.
# config.vm.network "public_network"
# Share an additional folder to the guest VM. The first argument is
# the path on the host to the actual folder. The second argument is
# the path on the guest to mount the folder. And the optional third
# argument is a set of non-required options.
# config.vm.synced_folder "../data", "/vagrant_data"
# Provider-specific configuration so you can fine-tune various
# backing providers for Vagrant. These expose provider-specific options.
# Example for VirtualBox:
#
# config.vm.provider "virtualbox" do |vb|
# # Display the VirtualBox GUI when booting the machine
# vb.gui = true
#
# # Customize the amount of memory on the VM:
# vb.memory = "1024"
# end
#
# View the documentation for the provider you are using for more
# information on available options.
# Enable provisioning with a shell script. Additional provisioners such as
# Ansible, Chef, Docker, Puppet and Salt are also available. Please see the
# documentation for more information about their specific syntax and use.
# config.vm.provision "shell", inline: <<-SHELL
# apt-get update
# apt-get install -y apache2
# SHELL
end

View File

@@ -0,0 +1,71 @@
#!/usr/bin/env bash
set -x
# https://askubuntu.com/questions/735201/installing-clang-3-8-on-ubuntu-14-04-3
wget -O - http://llvm.org/apt/llvm-snapshot.gpg.key | sudo apt-key add -
done=$(grep C4STL /etc/apt/sources.list)
if [ -z "$done" ] ; then
cat >> /etc/apt/sources.list <<EOF
# C4STL
# http://apt.llvm.org/
#deb http://llvm.org/apt/trusty/ llvm-toolchain-trusty-3.7 main
#deb http://llvm.org/apt/trusty/ llvm-toolchain-trusty-3.8 main
deb http://llvm.org/apt/trusty/ llvm-toolchain-trusty-3.9 main
deb http://llvm.org/apt/trusty/ llvm-toolchain-trusty-4.0 main
#deb http://llvm.org/apt/trusty/ llvm-toolchain-trusty-5.0 main
EOF
fi
sudo -E apt-get install -y software-properties-common python-software-properties
sudo -E add-apt-repository -y ppa:ubuntu-toolchain-r/test
sudo -E add-apt-repository -y ppa:george-edison55/cmake-3.x
sudo -E apt-get -yq update
sudo -E apt-get install -yq --force-yes \
build-essential \
cmake \
g++-5 \
g++-5-multilib \
g++-6 \
g++-6-multilib \
g++-7 \
g++-7-multilib \
g++-8 \
g++-8-multilib \
g++-9 \
g++-9-multilib \
g++-10 \
g++-10-multilib \
g++-11 \
g++-11-multilib \
clang-3.7 \
clang-3.8 \
clang-3.9 \
clang-4.0 \
swig3.0 \
libssl-dev \
zlib1g-dev \
libbz2-dev \
libreadline-dev \
libsqlite3-dev \
wget \
curl \
llvm \
libncurses5-dev \
libncursesw5-dev \
xz-utils \
tk-dev \
libffi-dev \
liblzma-dev \
python-openssl \
git \
python3 \
python3-pip \
python3-venv
sudo -E pip install cmany
exit 0

View File

@@ -0,0 +1,116 @@
name: rarearchs
defaults:
#if: "!contains(github.event.head_commit.message, 'skip ci')" # SKIP
run:
# Use a bash shell so we can use the same syntax for environment variable
# access regardless of the host operating system
shell: bash -e -x {0}
on:
# https://github.community/t/how-to-trigger-an-action-on-push-or-pull-request-but-not-both/16662
workflow_dispatch:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
rarearchs:
name: ${{matrix.arch}}/c++${{matrix.std}}/${{matrix.bt}}
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
include:
- {std: 11, bt: Debug , arch: aarch64, distro: ubuntu20.04}
- {std: 11, bt: Release, arch: aarch64, distro: ubuntu20.04}
- {std: 14, bt: Debug , arch: aarch64, distro: ubuntu20.04}
- {std: 14, bt: Release, arch: aarch64, distro: ubuntu20.04}
- {std: 17, bt: Debug , arch: aarch64, distro: ubuntu20.04}
- {std: 17, bt: Release, arch: aarch64, distro: ubuntu20.04}
#
- {std: 11, bt: Debug , arch: ppc64le, distro: ubuntu20.04}
- {std: 11, bt: Release, arch: ppc64le, distro: ubuntu20.04}
- {std: 14, bt: Debug , arch: ppc64le, distro: ubuntu20.04}
- {std: 14, bt: Release, arch: ppc64le, distro: ubuntu20.04}
- {std: 17, bt: Debug , arch: ppc64le, distro: ubuntu20.04}
- {std: 17, bt: Release, arch: ppc64le, distro: ubuntu20.04}
#
- {std: 11, bt: Debug , arch: s390x , distro: ubuntu20.04}
- {std: 11, bt: Release, arch: s390x , distro: ubuntu20.04}
- {std: 14, bt: Debug , arch: s390x , distro: ubuntu20.04}
- {std: 14, bt: Release, arch: s390x , distro: ubuntu20.04}
- {std: 17, bt: Debug , arch: s390x , distro: ubuntu20.04}
- {std: 17, bt: Release, arch: s390x , distro: ubuntu20.04}
#
#- {std: 11, bt: Debug , arch: armv6 , distro: bullseye}
#- {std: 11, bt: Release, arch: armv6 , distro: bullseye}
#- {std: 14, bt: Debug , arch: armv6 , distro: bullseye}
#- {std: 14, bt: Release, arch: armv6 , distro: bullseye}
#- {std: 17, bt: Debug , arch: armv6 , distro: bullseye}
#- {std: 17, bt: Release, arch: armv6 , distro: bullseye}
#
#- {std: 11, bt: Debug , arch: armv7 , distro: ubuntu20.04}
#- {std: 11, bt: Release, arch: armv7 , distro: ubuntu20.04}
#- {std: 14, bt: Debug , arch: armv7 , distro: ubuntu20.04}
#- {std: 14, bt: Release, arch: armv7 , distro: ubuntu20.04}
#- {std: 17, bt: Debug , arch: armv7 , distro: ubuntu20.04}
#- {std: 17, bt: Release, arch: armv7 , distro: ubuntu20.04}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- name: test
uses: uraimo/run-on-arch-action@v2.0.5
with:
arch: ${{matrix.arch}}
distro: ${{matrix.distro}}
install: |
set -x
apt-get update -y
apt-get install -y \
git \
build-essential
# arm platforms need an up-to-date cmake:
# https://gitlab.kitware.com/cmake/cmake/-/issues/20568
if [ "${{matrix.arch}}" == "armv6" ] || [ "${{matrix.arch}}" == "armv7" ] ; then
apt-get install -y \
gpg \
wget \
apt-transport-https
wget --no-check-certificate -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | gpg --dearmor - | tee /usr/share/keyrings/kitware-archive-keyring.gpg >/dev/null
echo 'deb [signed-by=/usr/share/keyrings/kitware-archive-keyring.gpg] https://apt.kitware.com/ubuntu/ focal main' | tee /etc/apt/sources.list.d/kitware.list >/dev/null
apt-get update -y
rm /usr/share/keyrings/kitware-archive-keyring.gpg
apt-get install kitware-archive-keyring
apt-get update -y
fi
apt-get install -y cmake cmake-data
cmake --version
run: |
set -x
uname -a
pwd
ls -lFhp .
#
bdir=build_${{matrix.arch}}_${{matrix.bt}}_${{matrix.std}}
idir=install_${{matrix.arch}}_${{matrix.bt}}_${{matrix.std}}
mkdir -p $bdir
#
cmake -S . -B $bdir \
-DCMAKE_INSTALL_PREFIX=$idir \
-DCMAKE_BUILD_TYPE=${{matrix.bt}} \
-DC4_CXX_STANDARD=${{matrix.std}} \
-DCXX_STANDARD=${{matrix.std}} \
-DC4CORE_DEV=ON \
-DC4CORE_BUILD_BENCHMARKS=OFF \
-DC4CORE_SANITIZE=OFF \
-DC4CORE_LINT=OFF \
-DC4CORE_VALGRIND=OFF
#
cmake --build $bdir -j --target c4core-test-build
#
cmake --build $bdir --target c4core-test-run

View File

@@ -0,0 +1,87 @@
name: benchmarks
defaults:
run:
# Use a bash shell so we can use the same syntax for environment variable
# access regardless of the host operating system
shell: bash -e -x {0}
on:
# https://github.community/t/how-to-trigger-an-action-on-push-or-pull-request-but-not-both/16662
workflow_dispatch:
push:
branches:
- master
pull_request:
branches:
- master
env:
PROJ_PFX_TARGET: c4core-
PROJ_PFX_CMAKE: C4CORE_
CMAKE_FLAGS:
NUM_JOBS_BUILD: # 4
jobs:
benchmarks:
name: bm/c++${{matrix.std}}/${{matrix.cxx}}/${{matrix.bt}}
if: |
(!contains(github.event.head_commit.message, 'skip all')) ||
(!contains(github.event.head_commit.message, 'skip benchmarks')) ||
contains(github.event.head_commit.message, 'only benchmarks')
continue-on-error: true
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
- {std: 11, cxx: g++-10, bt: Debug , os: ubuntu-18.04 , bitlinks: static64 static32}
- {std: 11, cxx: g++-10, bt: Release, os: ubuntu-18.04 , bitlinks: static64 static32}
- {std: 17, cxx: g++-10, bt: Debug , os: ubuntu-18.04 , bitlinks: static64 static32}
- {std: 17, cxx: g++-10, bt: Release, os: ubuntu-18.04 , bitlinks: static64 static32}
- {std: 20, cxx: g++-10, bt: Debug , os: ubuntu-18.04 , bitlinks: static64 static32}
- {std: 20, cxx: g++-10, bt: Release, os: ubuntu-18.04 , bitlinks: static64 static32}
- {std: 11, cxx: vs2019, bt: Debug , os: windows-latest, bitlinks: static64 static32}
- {std: 11, cxx: vs2019, bt: Release, os: windows-latest, bitlinks: static64 static32}
- {std: 17, cxx: vs2019, bt: Debug , os: windows-latest, bitlinks: static64 static32}
- {std: 17, cxx: vs2019, bt: Release, os: windows-latest, bitlinks: static64 static32}
- {std: 20, cxx: vs2019, bt: Debug , os: windows-latest, bitlinks: static64 static32}
- {std: 20, cxx: vs2019, bt: Release, os: windows-latest, bitlinks: static64 static32}
env: {BM: ON, STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
steps:
# use fetch-depth to ensure all tags are fetched
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive, fetch-depth: 0}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_target shared64 c4core-bm-build}
- {name: shared64-run, run: export NUM_JOBS_BUILD=1 && source .github/setenv.sh && c4_run_target shared64 c4core-bm-run}
- name: static64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static64
- {name: static64-build, run: source .github/setenv.sh && c4_build_target static64 c4core-bm-build}
- {name: static64-run, run: export NUM_JOBS_BUILD=1 && source .github/setenv.sh && c4_run_target static64 c4core-bm-run}
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_target static32 c4core-bm-build}
- {name: static32-run, run: export NUM_JOBS_BUILD=1 && source .github/setenv.sh && c4_run_target static32 c4core-bm-run}
- name: shared32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared32
- {name: shared32-build, run: source .github/setenv.sh && c4_build_target shared32 c4core-bm-build}
- {name: shared32-run, run: export NUM_JOBS_BUILD=1 && source .github/setenv.sh && c4_run_target shared32 c4core-bm-run}
- name: gather benchmark results
run: |
set -x
desc=$(git describe || git rev-parse --short HEAD)
for bl in ${{matrix.bitlinks}} ; do
dst=$(echo benchmark_results/$desc/${{matrix.cxx}}-${{matrix.bt}}-c++${{matrix.std}}-$bl | sed 's:++-:xx:g' | sed 's:+:x:g')
mkdir -p $dst
find build -name bm-results
mv -vf build/$bl/bm/bm-results/* $dst/.
done
- name: upload benchmark result artifacts
uses: actions/upload-artifact@v2
with:
name: benchmark_results
path: benchmark_results/

View File

@@ -0,0 +1,692 @@
name: ci
defaults:
#if: "!contains(github.event.head_commit.message, 'skip ci')" # SKIP
run:
# Use a bash shell so we can use the same syntax for environment variable
# access regardless of the host operating system
shell: bash -e -x {0}
on:
- push
- pull_request
- workflow_dispatch
env:
PROJ_PFX_TARGET: c4core-
PROJ_PFX_CMAKE: C4CORE_
CMAKE_FLAGS:
NUM_JOBS_BUILD: # 4
# ubuntu-20.04:
# # https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-README.md
# gcc: 7.5.0, 8.4.0, 9.3.0, 10.2.0
# clang: 8.0.1, 9.0.1, 10.0.0
# ubuntu-18.04:
# # https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu1804-README.md
# gcc: 7.5.0, 8.4.0, 9.3.0, 10.1.0
# clang: 6.0.0, 8.0.0, 9.0.0
# ubuntu-16.04:
# # https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu1604-README.md
# gcc: 5.5.0, 7.5.0, 8.4.0, 9.3.0
# clang: 6.0.0, 8.0.0, 9.0.1
# macos-11.0: macOS Big Sur 11.0
# # https://github.com/actions/virtual-environments/blob/main/images/macos/macos-11.0-Readme.md
# Xcode 12.1 11.7
# clang/LLVM 10.0.1
# gcc-8 gcc-9
# macos-10.15: macOS Catalina 10.15
# # https://github.com/actions/virtual-environments/blob/main/images/macos/macos-10.15-Readme.md
# Xcode 12.1 11.7
# clang/LLVM 11.0.0
# gcc-8 gcc-9
# windows-2019:
# # https://github.com/actions/virtual-environments/blob/main/images/win/Windows2019-Readme.md
# vs2019
# windows-2016:
# # https://github.com/actions/virtual-environments/blob/main/images/win/Windows2019-Readme.md
# vs2017
jobs:
#----------------------------------------------------------------------------
test_coverage:
# if: github.ref == 'refs/heads/master'
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
- {std: 11, cxx: g++-7, bt: Coverage, os: ubuntu-18.04}
- {std: 14, cxx: g++-7, bt: Coverage, os: ubuntu-18.04}
- {std: 17, cxx: g++-7, bt: Coverage, os: ubuntu-18.04}
env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}", CODECOV_TOKEN: "${{secrets.CODECOV_TOKEN}}", COVERALLS_REPO_TOKEN: "${{secrets.COVERALLS_REPO_TOKEN}}"}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
- {name: shared64-run, run: source .github/setenv.sh && c4_run_test shared64}
- name: shared64-submit
run: |
source .github/setenv.sh
c4_submit_coverage shared64 codecov
#c4_submit_coverage shared64 coveralls # only accepts one submission per job
- name: static64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static64
- {name: static64-build, run: source .github/setenv.sh && c4_build_test static64}
- {name: static64-run, run: source .github/setenv.sh && c4_run_test static64}
- name: static64-submit
run: |
source .github/setenv.sh
c4_submit_coverage static64 codecov
c4_submit_coverage static64 coveralls
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- name: static32-submit
run: |
source .github/setenv.sh
c4_submit_coverage static32 codecov
#c4_submit_coverage static32 coveralls # only accepts one submission per job
- name: shared32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared32
- {name: shared32-build, run: source .github/setenv.sh && c4_build_test shared32}
- {name: shared32-run, run: source .github/setenv.sh && c4_run_test shared32}
- name: shared32-submit
run: |
source .github/setenv.sh
c4_submit_coverage shared32 codecov
#c4_submit_coverage shared32 coveralls # only accepts one submission per job
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- name: static32-submit
run: |
source .github/setenv.sh
c4_submit_coverage static32 codecov
#c4_submit_coverage static32 coveralls # only accepts one submission per job
#----------------------------------------------------------------------------
test_windows:
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
- {std: 11, cxx: vs2017, bt: Debug , os: windows-2016, bitlinks: shared64 static32}
- {std: 11, cxx: vs2017, bt: Release, os: windows-2016, bitlinks: shared64 static32}
- {std: 14, cxx: vs2017, bt: Debug , os: windows-2016, bitlinks: shared64 static32}
- {std: 14, cxx: vs2017, bt: Release, os: windows-2016, bitlinks: shared64 static32}
- {std: 11, cxx: vs2019, bt: Debug , os: windows-2019, bitlinks: shared64 static32}
- {std: 11, cxx: vs2019, bt: Release, os: windows-2019, bitlinks: shared64 static32}
- {std: 14, cxx: vs2019, bt: Debug , os: windows-2019, bitlinks: shared64 static32}
- {std: 14, cxx: vs2019, bt: Release, os: windows-2019, bitlinks: shared64 static32}
- {std: 17, cxx: vs2019, bt: Debug , os: windows-2019, bitlinks: shared64 static32}
- {std: 17, cxx: vs2019, bt: Release, os: windows-2019, bitlinks: shared64 static32}
- {std: 20, cxx: vs2019, bt: Debug , os: windows-2019, bitlinks: shared64 static32}
- {std: 20, cxx: vs2019, bt: Release, os: windows-2019, bitlinks: shared64 static32}
env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
- {name: shared64-run, run: source .github/setenv.sh && c4_run_test shared64}
- {name: shared64-pack, run: source .github/setenv.sh && c4_package shared64}
- name: static64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static64
- {name: static64-build, run: source .github/setenv.sh && c4_build_test static64}
- {name: static64-run, run: source .github/setenv.sh && c4_run_test static64}
- {name: static64-pack, run: source .github/setenv.sh && c4_package static64}
- name: shared32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared32
- {name: shared32-build, run: source .github/setenv.sh && c4_build_test shared32}
- {name: shared32-run, run: source .github/setenv.sh && c4_run_test shared32}
- {name: shared32-pack, run: source .github/setenv.sh && c4_package shared32}
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- {name: static32-pack, run: source .github/setenv.sh && c4_package static32}
#----------------------------------------------------------------------------
test_macosx:
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
- {std: 11, cxx: xcode, bt: Debug , os: macos-11.0, bitlinks: shared64 static64}
- {std: 11, cxx: xcode, bt: Release, os: macos-11.0, bitlinks: shared64 static64}
- {std: 14, cxx: xcode, bt: Debug , os: macos-11.0, bitlinks: shared64 static64}
- {std: 14, cxx: xcode, bt: Release, os: macos-11.0, bitlinks: shared64 static64}
- {std: 17, cxx: xcode, bt: Debug , os: macos-11.0, bitlinks: shared64 static64}
- {std: 17, cxx: xcode, bt: Release, os: macos-11.0, bitlinks: shared64 static64}
env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
- {name: shared64-run, run: source .github/setenv.sh && c4_run_test shared64}
- {name: shared64-pack, run: source .github/setenv.sh && c4_package shared64}
- name: static64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static64
- {name: static64-build, run: source .github/setenv.sh && c4_build_test static64}
- {name: static64-run, run: source .github/setenv.sh && c4_run_test static64}
- {name: static64-pack, run: source .github/setenv.sh && c4_package static64}
- name: shared32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared32
- {name: shared32-build, run: source .github/setenv.sh && c4_build_test shared32}
- {name: shared32-run, run: source .github/setenv.sh && c4_run_test shared32}
- {name: shared32-pack, run: source .github/setenv.sh && c4_package shared32}
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- {name: static32-pack, run: source .github/setenv.sh && c4_package static32}
#----------------------------------------------------------------------------
test_gcc_canary:
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
- {std: 11, cxx: g++-7 , bt: Debug , os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 11, cxx: g++-7 , bt: Release, os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 20, cxx: g++-10 , bt: Debug , os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 20, cxx: g++-10 , bt: Release, os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 11, cxx: g++-5 , bt: Debug , os: ubuntu-16.04, bitlinks: shared64 static32}
- {std: 11, cxx: g++-5 , bt: Release, os: ubuntu-16.04, bitlinks: shared64 static32}
env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
- {name: shared64-run, run: source .github/setenv.sh && c4_run_test shared64}
- {name: shared64-pack, run: source .github/setenv.sh && c4_package shared64}
- name: static64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static64
- {name: static64-build, run: source .github/setenv.sh && c4_build_test static64}
- {name: static64-run, run: source .github/setenv.sh && c4_run_test static64}
- {name: static64-pack, run: source .github/setenv.sh && c4_package static64}
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- {name: static32-pack, run: source .github/setenv.sh && c4_package static32}
- name: shared32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared32
- {name: shared32-build, run: source .github/setenv.sh && c4_build_test shared32}
- {name: shared32-run, run: source .github/setenv.sh && c4_run_test shared32}
- {name: shared32-pack, run: source .github/setenv.sh && c4_package shared32}
#----------------------------------------------------------------------------
test_clang_canary:
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
- {std: 20, cxx: clang++-10 , bt: Debug , os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 20, cxx: clang++-10 , bt: Release, os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 11, cxx: clang++-6.0, bt: Debug , os: ubuntu-16.04, bitlinks: shared64 static32}
- {std: 11, cxx: clang++-6.0, bt: Release, os: ubuntu-16.04, bitlinks: shared64 static32}
env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
- {name: shared64-run, run: source .github/setenv.sh && c4_run_test shared64}
- {name: shared64-pack, run: source .github/setenv.sh && c4_package shared64}
- name: static64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static64
- {name: static64-build, run: source .github/setenv.sh && c4_build_test static64}
- {name: static64-run, run: source .github/setenv.sh && c4_run_test static64}
- {name: static64-pack, run: source .github/setenv.sh && c4_package static64}
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- {name: static32-pack, run: source .github/setenv.sh && c4_package static32}
- name: shared32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared32
- {name: shared32-build, run: source .github/setenv.sh && c4_build_test shared32}
- {name: shared32-run, run: source .github/setenv.sh && c4_run_test shared32}
- {name: shared32-pack, run: source .github/setenv.sh && c4_package shared32}
#----------------------------------------------------------------------------
test_clang_tidy:
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
# clang tidy takes a long time, so don't do multiple bits/linktypes
- {std: 11, cxx: clang++-9, bt: Debug , lint: clang-tidy, bitlinks: shared64, os: ubuntu-18.04}
- {std: 11, cxx: clang++-9, bt: Debug , lint: clang-tidy, bitlinks: shared32, os: ubuntu-18.04}
- {std: 11, cxx: clang++-9, bt: Debug , lint: clang-tidy, bitlinks: static64, os: ubuntu-18.04}
- {std: 11, cxx: clang++-9, bt: Debug , lint: clang-tidy, bitlinks: static32, os: ubuntu-18.04}
- {std: 11, cxx: clang++-9, bt: ReleaseWithDebInfo, lint: clang-tidy, bitlinks: shared64, os: ubuntu-18.04}
- {std: 11, cxx: clang++-9, bt: ReleaseWithDebInfo, lint: clang-tidy, bitlinks: shared32, os: ubuntu-18.04}
- {std: 11, cxx: clang++-9, bt: ReleaseWithDebInfo, lint: clang-tidy, bitlinks: static64, os: ubuntu-18.04}
- {std: 11, cxx: clang++-9, bt: ReleaseWithDebInfo, lint: clang-tidy, bitlinks: static32, os: ubuntu-18.04}
env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
- {name: shared64-run, run: source .github/setenv.sh && c4_run_test shared64}
- {name: shared64-pack, run: source .github/setenv.sh && c4_package shared64}
- name: static64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static64
- {name: static64-build, run: source .github/setenv.sh && c4_build_test static64}
- {name: static64-run, run: source .github/setenv.sh && c4_run_test static64}
- {name: static64-pack, run: source .github/setenv.sh && c4_package static64}
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- {name: static32-pack, run: source .github/setenv.sh && c4_package static32}
- name: shared32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared32
- {name: shared32-build, run: source .github/setenv.sh && c4_build_test shared32}
- {name: shared32-run, run: source .github/setenv.sh && c4_run_test shared32}
- {name: shared32-pack, run: source .github/setenv.sh && c4_package shared32}
#----------------------------------------------------------------------------
test_gcc_extended:
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
# VALGRIND
- {std: 11, cxx: g++-10, bt: Debug , vg: ON, os: ubuntu-18.04}
- {std: 11, cxx: g++-10, bt: Release, vg: ON, os: ubuntu-18.04}
- {std: 14, cxx: g++-10, bt: Debug , vg: ON, os: ubuntu-18.04}
- {std: 14, cxx: g++-10, bt: Release, vg: ON, os: ubuntu-18.04}
- {std: 17, cxx: g++-10, bt: Debug , vg: ON, os: ubuntu-18.04}
- {std: 17, cxx: g++-10, bt: Release, vg: ON, os: ubuntu-18.04}
- {std: 20, cxx: g++-10, bt: Debug , vg: ON, os: ubuntu-18.04}
- {std: 20, cxx: g++-10, bt: Release, vg: ON, os: ubuntu-18.04}
#
- {std: 11, cxx: g++-9, bt: Debug , os: ubuntu-18.04}
- {std: 11, cxx: g++-9, bt: Release, os: ubuntu-18.04}
- {std: 11, cxx: g++-8, bt: Debug , os: ubuntu-18.04}
- {std: 11, cxx: g++-8, bt: Release, os: ubuntu-18.04}
- {std: 11, cxx: g++-7, bt: Debug , os: ubuntu-18.04}
- {std: 11, cxx: g++-7, bt: Release, os: ubuntu-18.04}
- {std: 11, cxx: g++-6, bt: Debug , os: ubuntu-18.04}
- {std: 11, cxx: g++-6, bt: Release, os: ubuntu-18.04}
- {std: 11, cxx: g++-5, bt: Debug , os: ubuntu-18.04}
- {std: 11, cxx: g++-5, bt: Release, os: ubuntu-18.04}
env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
- {name: shared64-run, run: source .github/setenv.sh && c4_run_test shared64}
- {name: shared64-pack, run: source .github/setenv.sh && c4_package shared64}
- name: static64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static64
- {name: static64-build, run: source .github/setenv.sh && c4_build_test static64}
- {name: static64-run, run: source .github/setenv.sh && c4_run_test static64}
- {name: static64-pack, run: source .github/setenv.sh && c4_package static64}
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- {name: static32-pack, run: source .github/setenv.sh && c4_package static32}
- name: shared32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared32
- {name: shared32-build, run: source .github/setenv.sh && c4_build_test shared32}
- {name: shared32-run, run: source .github/setenv.sh && c4_run_test shared32}
- {name: shared32-pack, run: source .github/setenv.sh && c4_package shared32}
#----------------------------------------------------------------------------
test_clang_extended:
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
- {std: 20, cxx: clang++-10 , bt: Debug , vg: on, os: ubuntu-18.04}
- {std: 20, cxx: clang++-10 , bt: Release, vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-9 , bt: Debug , vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-9 , bt: Release, vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-8 , bt: Debug , vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-8 , bt: Release, vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-7 , bt: Debug , vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-7 , bt: Release, vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-6.0, bt: Debug , vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-6.0, bt: Release, vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-5.0, bt: Debug , vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-5.0, bt: Release, vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-4.0, bt: Debug , vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-4.0, bt: Release, vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-3.9, bt: Debug , vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-3.9, bt: Release, vg: on, os: ubuntu-18.04}
env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
- {name: shared64-run, run: source .github/setenv.sh && c4_run_test shared64}
- {name: shared64-pack, run: source .github/setenv.sh && c4_package shared64}
- name: static64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static64
- {name: static64-build, run: source .github/setenv.sh && c4_build_test static64}
- {name: static64-run, run: source .github/setenv.sh && c4_run_test static64}
- {name: static64-pack, run: source .github/setenv.sh && c4_package static64}
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- {name: static32-pack, run: source .github/setenv.sh && c4_package static32}
- name: shared32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared32
- {name: shared32-build, run: source .github/setenv.sh && c4_build_test shared32}
- {name: shared32-run, run: source .github/setenv.sh && c4_run_test shared32}
- {name: shared32-pack, run: source .github/setenv.sh && c4_package shared32}
#----------------------------------------------------------------------------
test_clang_sanitize:
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
# these jobs take much longer, so run only one bitlink pair per job to profit from parallelism
- {std: 11, cxx: clang++-10 , bt: Debug , vg: ON, san: ALL, bitlinks: shared64 static64, os: ubuntu-18.04}
- {std: 11, cxx: clang++-10 , bt: Debug , vg: ON, san: ALL, bitlinks: shared32 static32, os: ubuntu-18.04}
- {std: 11, cxx: clang++-10 , bt: Release, vg: ON, san: ALL, bitlinks: shared64 static64, os: ubuntu-18.04}
- {std: 11, cxx: clang++-10 , bt: Release, vg: ON, san: ALL, bitlinks: shared32 static32, os: ubuntu-18.04}
- {std: 14, cxx: clang++-10 , bt: Debug , vg: ON, san: ALL, bitlinks: shared64 static64, os: ubuntu-18.04}
- {std: 14, cxx: clang++-10 , bt: Debug , vg: ON, san: ALL, bitlinks: shared32 static32, os: ubuntu-18.04}
- {std: 14, cxx: clang++-10 , bt: Release, vg: ON, san: ALL, bitlinks: shared64 static64, os: ubuntu-18.04}
- {std: 14, cxx: clang++-10 , bt: Release, vg: ON, san: ALL, bitlinks: shared32 static32, os: ubuntu-18.04}
- {std: 17, cxx: clang++-10 , bt: Debug , vg: ON, san: ALL, bitlinks: shared64 static64, os: ubuntu-18.04}
- {std: 17, cxx: clang++-10 , bt: Debug , vg: ON, san: ALL, bitlinks: shared32 static32, os: ubuntu-18.04}
- {std: 17, cxx: clang++-10 , bt: Release, vg: ON, san: ALL, bitlinks: shared64 static64, os: ubuntu-18.04}
- {std: 17, cxx: clang++-10 , bt: Release, vg: ON, san: ALL, bitlinks: shared32 static32, os: ubuntu-18.04}
- {std: 20, cxx: clang++-10 , bt: Debug , vg: ON, san: ALL, bitlinks: shared64 static64, os: ubuntu-18.04}
- {std: 20, cxx: clang++-10 , bt: Debug , vg: ON, san: ALL, bitlinks: shared32 static32, os: ubuntu-18.04}
- {std: 20, cxx: clang++-10 , bt: Release, vg: ON, san: ALL, bitlinks: shared64 static64, os: ubuntu-18.04}
- {std: 20, cxx: clang++-10 , bt: Release, vg: ON, san: ALL, bitlinks: shared32 static32, os: ubuntu-18.04}
env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
- {name: shared64-run, run: source .github/setenv.sh && c4_run_test shared64}
- {name: shared64-pack, run: source .github/setenv.sh && c4_package shared64}
- name: static64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static64
- {name: static64-build, run: source .github/setenv.sh && c4_build_test static64}
- {name: static64-run, run: source .github/setenv.sh && c4_run_test static64}
- {name: static64-pack, run: source .github/setenv.sh && c4_package static64}
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- {name: static32-pack, run: source .github/setenv.sh && c4_package static32}
- name: shared32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared32
- {name: shared32-build, run: source .github/setenv.sh && c4_build_test shared32}
- {name: shared32-run, run: source .github/setenv.sh && c4_run_test shared32}
- {name: shared32-pack, run: source .github/setenv.sh && c4_package shared32}
#----------------------------------------------------------------------------
test_arm:
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
# these jobs take much longer, so run only one bitlink pair per job to profit from parallelism
- {std: 11, bt: Debug , toolchain: cmake/Toolchain-Arm-ubuntu.cmake, cxx: arm-linux-gnueabihf-gcc, os: ubuntu-18.04}
- {std: 11, bt: Release, toolchain: cmake/Toolchain-Arm-ubuntu.cmake, cxx: arm-linux-gnueabihf-gcc, os: ubuntu-18.04}
- {std: 14, bt: Debug , toolchain: cmake/Toolchain-Arm-ubuntu.cmake, cxx: arm-linux-gnueabihf-gcc, os: ubuntu-18.04}
- {std: 14, bt: Release, toolchain: cmake/Toolchain-Arm-ubuntu.cmake, cxx: arm-linux-gnueabihf-gcc, os: ubuntu-18.04}
- {std: 17, bt: Debug , toolchain: cmake/Toolchain-Arm-ubuntu.cmake, cxx: arm-linux-gnueabihf-gcc, os: ubuntu-18.04}
- {std: 17, bt: Release, toolchain: cmake/Toolchain-Arm-ubuntu.cmake, cxx: arm-linux-gnueabihf-gcc, os: ubuntu-18.04}
env: {TOOLCHAIN: "${{matrix.toolchain}}", STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test arm
- {name: build, run: source .github/setenv.sh && c4_build_test arm}
- {name: run, run: source .github/setenv.sh && c4_run_test arm}
- {name: pack, run: source .github/setenv.sh && c4_package arm}
# #----------------------------------------------------------------------------
# # https://blog.kitware.com/static-checks-with-cmake-cdash-iwyu-clang-tidy-lwyu-cpplint-and-cppcheck/
# static_analysis:
# continue-on-error: true
# if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
# runs-on: ${{matrix.os}}
# strategy:
# fail-fast: false
# matrix:
# include:
# # these jobs take much longer, so run only one bitlink pair per job to profit from parallelism
# - {std: 11, cxx: clang++-10, bt: Debug , bitlinks: shared64, os: ubuntu-18.04}
# - {std: 11, cxx: clang++-10, bt: Release, bitlinks: shared64, os: ubuntu-18.04}
# - {std: 14, cxx: clang++-10, bt: Debug , bitlinks: shared64, os: ubuntu-18.04}
# - {std: 14, cxx: clang++-10, bt: Release, bitlinks: shared64, os: ubuntu-18.04}
# - {std: 17, cxx: clang++-10, bt: Debug , bitlinks: shared64, os: ubuntu-18.04}
# - {std: 17, cxx: clang++-10, bt: Release, bitlinks: shared64, os: ubuntu-18.04}
# - {std: 20, cxx: clang++-10, bt: Debug , bitlinks: shared64, os: ubuntu-18.04}
# - {std: 20, cxx: clang++-10, bt: Release, bitlinks: shared64, os: ubuntu-18.04}
# env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
# steps:
# - {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
# - {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
# - {name: show info, run: source .github/setenv.sh && c4_show_info}
# - name: shared64-configure---------------------------------------------------
# run: source .github/setenv.sh && c4_cfg_test shared64
# - {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
# - {name: clang-tidy, run: cmake "-DCMAKE_CXX_CLANG_TIDY=/usr/bin/clang-tidy-3.9;-checks=*" ../path/to/source}
# - {name: cppcheck, run: cmake "-DCMAKE_CXX_CPPCHECK=/usr/bin/cppcheck;--std=c++11" ../path/to/source}
# - {name: cpplint, run: cmake "-DCMAKE_CXX_CPPLINT=/usr/local/bin/cpplint;--linelength=179" ..}
# - {name: include-what-you-use, run: cmake "-DCMAKE_CXX_INCLUDE_WHAT_YOU_USE=/usr/bin/iwyu;--transitive_includes_only" ..}
# - {name: link-what-you-use, run: cmake -DCMAKE_LINK_WHAT_YOU_USE=TRUE ..}
#----------------------------------------------------------------------------
# useful to iterate when fixing the release
# ver=0.0.0-rc1 ; ( set -x ; git tag -d v$ver ; git push origin :v$ver ) ; (set -x ; set -e ; git add -u ; git commit --amend --no-edit ; git tag --annotate --message "v$ver" "v$ver" ; git push -f --tags origin gh_actions )
release:
if: contains(github.ref, 'tags/v')
runs-on: ubuntu-latest
#needs: [test_coverage, test_windows, test_macosx, test_gcc_canary, test_clang_canary, test_clang_tidy, test_gcc_extended, test_clang_extended, test_clang_sanitize, test_api]
steps:
- name: Install requirements
run: |
sudo -E pip install git-archive-all
- name: Get version
id: get_version
# https://github.community/t/how-to-get-just-the-tag-name/16241/11
run: |
echo ::set-output name=SRC_TAG::${GITHUB_REF#refs/tags/}
echo ::set-output name=SRC_VERSION::${GITHUB_REF#refs/tags/v}
echo SRC_TAG=${GITHUB_REF#refs/tags/}
echo SRC_VERSION=${GITHUB_REF#refs/tags/v}
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- name: Create Release
id: create_release
uses: actions/create-release@v1 # https://github.com/marketplace/actions/create-a-release
env:
GITHUB_TOKEN: "${{secrets.GITHUB_TOKEN}}"
SRC_TAG: "${{steps.get_version.outputs.SRC_TAG}}"
SRC_VERSION: "${{steps.get_version.outputs.SRC_VERSION}}"
with:
tag_name: ${{github.ref}}
release_name: Release ${{steps.get_version.outputs.SRC_VERSION}}
draft: true # to create a draft (unpublished) release, false to create a published one. Default: false
prerelease: ${{contains(github.ref, '-rc')}}
body_path: ${{github.workspace}}/changelog/${{steps.get_version.outputs.SRC_VERSION}}.md
- name: Create source packs
id: src_pack
run: |
version=${{steps.get_version.outputs.SRC_VERSION}}
name=${PROJ_PFX_TARGET}src-$version
git-archive-all --prefix $name $name.tgz
git-archive-all --prefix $name $name.zip
echo ::set-output name=TGZ::$name.tgz
echo ::set-output name=ZIP::$name.zip
- name: Upload tgz source pack
id: upload_src_tgz_to_release
uses: actions/upload-release-asset@v1.0.1
env: {GITHUB_TOKEN: "${{secrets.GITHUB_TOKEN}}"}
with:
upload_url: ${{steps.create_release.outputs.upload_url}}
asset_path: ${{steps.src_pack.outputs.TGZ}}
asset_name: ${{steps.src_pack.outputs.TGZ}}
asset_content_type: application/gzip
- name: Upload zip source pack
id: upload_src_zip_to_release
uses: actions/upload-release-asset@v1.0.1
env: {GITHUB_TOKEN: "${{secrets.GITHUB_TOKEN}}"}
with:
upload_url: ${{steps.create_release.outputs.upload_url}}
asset_path: ${{steps.src_pack.outputs.ZIP}}
asset_name: ${{steps.src_pack.outputs.ZIP}}
asset_content_type: application/zip
- name: Save Release URL for uploading binary artifacts
run: |
echo "UPLOAD_URL: ${{steps.create_release.outputs.upload_url}}"
echo "${{steps.create_release.outputs.upload_url}}" > ./upload_url
- name: Upload Release URL
uses: actions/upload-artifact@v1
with:
path: ./upload_url
name: upload_url
# since this is a library, we just provide the source packages (done above)
# #----------------------------------------------------------------------------
# publish:
# needs: release
# name: publish/${{matrix.config.os}}/${{matrix.config.gen}}
# runs-on: ${{matrix.config.os}}
# env: {DEV: OFF, BT: Release, OS: "${{matrix.config.os}}", CXX_: "${{matrix.config.cxx}}", GEN: "${{matrix.config.gen}}"}
# strategy:
# fail-fast: false
# matrix:
# config:
# # name of the artifact | suffix | cpack gen | mime type | os | cxx
# - {name: Ubuntu 20.04 deb , sfx: unix64.deb, gen: DEB , mime: vnd.debian.binary-package, os: ubuntu-20.04 }
# - {name: Ubuntu 20.04 sh , sfx: unix64.sh , gen: STGZ , mime: x-sh , os: ubuntu-20.04 }
# - {name: Ubuntu 18.04 deb , sfx: unix64.deb, gen: DEB , mime: vnd.debian.binary-package, os: ubuntu-18.04 }
# - {name: Ubuntu 18.04 sh , sfx: unix64.sh , gen: STGZ , mime: x-sh , os: ubuntu-18.04 }
# - {name: Ubuntu 16.04 deb , sfx: unix64.deb, gen: DEB , mime: vnd.debian.binary-package, os: ubuntu-16.04 }
# - {name: Ubuntu 16.04 sh , sfx: unix64.sh , gen: STGZ , mime: x-sh , os: ubuntu-16.04 }
# - {name: Windows VS2017 zip, sfx: win64.zip , gen: ZIP , mime: zip , os: windows-2016, cxx: vs2017}
# - {name: Windows VS2019 zip, sfx: win64.zip , gen: ZIP , mime: zip , os: windows-2019, cxx: vs2019}
# - {name: MacOSX sh , sfx: apple64.sh, gen: STGZ , mime: x-sh , os: macos-11.0 , cxx: xcode }
# steps:
# - name: Get version
# id: get_version
# # https://github.community/t/how-to-get-just-the-tag-name/16241/11
# run: |
# echo ::set-output name=SRC_VERSION::${GITHUB_REF#refs/tags/v}
# echo SRC_VERSION=${GITHUB_REF#refs/tags/v}
# echo GEN=$GEN
# - name: Download upload URL
# uses: actions/download-artifact@v1
# with: {name: upload_url, path: ./}
# - name: Preprocess
# id: preprocess
# run: |
# upload_url=`cat ./upload_url`
# echo ::set-output name=upload_url::$upload_url
# # the package has the same name in multiple same-platform+same-sfx
# # instances, but the uploaded asset needs to have different names:
# sfx=${{matrix.config.sfx}}
# case "${{matrix.config.os}}" in
# ubuntu*)
# sfx=$(echo $sfx | sed "s:unix64:${{matrix.config.os}}:")
# ;;
# windows*)
# sfx=$(echo $sfx | sed "s:win64:win64-${{matrix.config.cxx}}:")
# ;;
# macos*)
# sfx=$(echo $sfx | sed "s:apple64:macosx-${{matrix.config.cxx}}:")
# ;;
# esac
# asset_name=${PROJ_PFX_TARGET}${{steps.get_version.outputs.SRC_VERSION}}-$sfx
# echo ::set-output name=asset_name::$asset_name
# - {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
# - {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
# - {name: show info, run: source .github/setenv.sh && c4_show_info }
# - name: shared64-configure---------------------------------------------------
# run: source .github/setenv.sh && c4_cfg_test shared64
# - {name: shared64-build, run: source .github/setenv.sh && c4_build_target shared64 all}
# - name: shared64-pack
# run: |
# source .github/setenv.sh && c4_package shared64 $GEN
# src=./build/shared64/${PROJ_PFX_TARGET}${{steps.get_version.outputs.SRC_VERSION}}-${{matrix.config.sfx}}
# dst=${{steps.preprocess.outputs.asset_name}}
# cp -fav $src $dst
# - name: Upload artifact
# id: upload_to_release
# uses: actions/upload-release-asset@v1.0.1
# env: {GITHUB_TOKEN: "${{secrets.GITHUB_TOKEN}}"}
# with:
# upload_url: ${{steps.preprocess.outputs.upload_url}}
# asset_path: ${{steps.preprocess.outputs.asset_name}}
# asset_name: ${{steps.preprocess.outputs.asset_name}}
# asset_content_type: application/${{matrix.config.mime}}
# #- name: Report artifact URL
# # run: echo "artifact uploaded successfully: ${{steps.upload_to_release.outputs.browser_download_url}}"

View File

@@ -0,0 +1,95 @@
name: emscripten
defaults:
#if: "!contains(github.event.head_commit.message, 'skip ci')" # SKIP
run:
# Use a bash shell so we can use the same syntax for environment variable
# access regardless of the host operating system
shell: bash -e -x {0}
on:
# https://github.community/t/how-to-trigger-an-action-on-push-or-pull-request-but-not-both/16662
workflow_dispatch:
push:
branches:
- master
pull_request:
branches:
- master
env:
PROJ_PFX_TARGET: c4core-
PROJ_PFX_CMAKE: C4CORE_
CMAKE_FLAGS:
NUM_JOBS_BUILD: # 4
EMSCRIPTEN_CACHE_FOLDER: 'emsdk-cache'
# ubuntu-20.04:
# # https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-README.md
# gcc: 7.5.0, 8.4.0, 9.3.0, 10.2.0
# clang: 8.0.1, 9.0.1, 10.0.0
# ubuntu-18.04:
# # https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu1804-README.md
# gcc: 7.5.0, 8.4.0, 9.3.0, 10.1.0
# clang: 6.0.0, 8.0.0, 9.0.0
# macos-11.0: macOS Big Sur 11.0
# # https://github.com/actions/virtual-environments/blob/main/images/macos/macos-11.0-Readme.md
# Xcode 12.1 11.7
# clang/LLVM 10.0.1
# gcc-8 gcc-9
# macos-10.15: macOS Catalina 10.15
# # https://github.com/actions/virtual-environments/blob/main/images/macos/macos-10.15-Readme.md
# Xcode 12.1 11.7
# clang/LLVM 11.0.0
# gcc-8 gcc-9
# windows-2019:
# # https://github.com/actions/virtual-environments/blob/main/images/win/Windows2019-Readme.md
# vs2019
# windows-2016:
# # https://github.com/actions/virtual-environments/blob/main/images/win/Windows2016-Readme.md
# vs2017
jobs:
#----------------------------------------------------------------------------
emscripten:
name: emscripten/${{matrix.emver}}/c++${{matrix.std}}/${{matrix.bt}}
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
- {std: 11, cxx: em++, emver: 2.0.34, bt: Debug , os: ubuntu-latest, bitlinks: static32}
- {std: 11, cxx: em++, emver: 2.0.34, bt: Release, os: ubuntu-latest, bitlinks: static32}
- {std: 20, cxx: em++, emver: 2.0.34, bt: Debug , os: ubuntu-latest, bitlinks: static32}
- {std: 20, cxx: em++, emver: 2.0.34, bt: Release, os: ubuntu-latest, bitlinks: static32}
- {std: 11, cxx: em++, emver: 3.0.0 , bt: Debug , os: ubuntu-latest, bitlinks: static32}
- {std: 11, cxx: em++, emver: 3.0.0 , bt: Release, os: ubuntu-latest, bitlinks: static32}
- {std: 20, cxx: em++, emver: 3.0.0 , bt: Debug , os: ubuntu-latest, bitlinks: static32}
- {std: 20, cxx: em++, emver: 3.0.0 , bt: Release, os: ubuntu-latest, bitlinks: static32}
env:
STD: "${{matrix.std}}"
CXX_: "${{matrix.cxx}}"
BT: "${{matrix.bt}}"
BITLINKS: "${{matrix.bitlinks}}"
VG: "${{matrix.vg}}"
SAN: "${{matrix.san}}"
LINT: "${{matrix.lint}}"
OS: "${{matrix.os}}"
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- name: setup emscripten cache
id: cache-system-libraries
uses: actions/cache@v2
with: {path: "${{env.EMSCRIPTEN_CACHE_FOLDER}}", key: "${{matrix.emver}}-${{runner.os}}"}
- name: setup emscripten
uses: mymindstorm/setup-emsdk@v11
with: {version: "${{matrix.emver}}", actions-cache-folder: "${{env.EMSCRIPTEN_CACHE_FOLDER}}"}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}

View File

@@ -0,0 +1,111 @@
name: libcxx
defaults:
#if: "!contains(github.event.head_commit.message, 'skip ci')" # SKIP
run:
# Use a bash shell so we can use the same syntax for environment variable
# access regardless of the host operating system
shell: bash -e -x {0}
on:
# https://github.community/t/how-to-trigger-an-action-on-push-or-pull-request-but-not-both/16662
workflow_dispatch:
push:
branches:
- master
pull_request:
branches:
- master
env:
PROJ_PFX_TARGET: c4core-
PROJ_PFX_CMAKE: C4CORE_
CMAKE_FLAGS:
NUM_JOBS_BUILD: # 4
# ubuntu-20.04:
# # https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-README.md
# gcc: 7.5.0, 8.4.0, 9.3.0, 10.2.0
# clang: 8.0.1, 9.0.1, 10.0.0
# ubuntu-18.04:
# # https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu1804-README.md
# gcc: 7.5.0, 8.4.0, 9.3.0, 10.1.0
# clang: 6.0.0, 8.0.0, 9.0.0
# macos-11.0: macOS Big Sur 11.0
# # https://github.com/actions/virtual-environments/blob/main/images/macos/macos-11.0-Readme.md
# Xcode 12.1 11.7
# clang/LLVM 10.0.1
# gcc-8 gcc-9
# macos-10.15: macOS Catalina 10.15
# # https://github.com/actions/virtual-environments/blob/main/images/macos/macos-10.15-Readme.md
# Xcode 12.1 11.7
# clang/LLVM 11.0.0
# gcc-8 gcc-9
# windows-2019:
# # https://github.com/actions/virtual-environments/blob/main/images/win/Windows2019-Readme.md
# vs2019
# windows-2016:
# # https://github.com/actions/virtual-environments/blob/main/images/win/Windows2016-Readme.md
# vs2017
jobs:
#----------------------------------------------------------------------------
libcxx:
name: libc++/${{matrix.cxx}}/c++${{matrix.std}}/${{matrix.bt}}
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
- {std: 20, cxx: clang++-10 , bt: Debug , os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 20, cxx: clang++-10 , bt: Release, os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 17, cxx: clang++-10 , bt: Debug , os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 17, cxx: clang++-10 , bt: Release, os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 14, cxx: clang++-10 , bt: Debug , os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 14, cxx: clang++-10 , bt: Release, os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 11, cxx: clang++-10 , bt: Debug , os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 11, cxx: clang++-10 , bt: Release, os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 17, cxx: clang++-6.0, bt: Debug , os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 17, cxx: clang++-6.0, bt: Release, os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 14, cxx: clang++-6.0, bt: Debug , os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 14, cxx: clang++-6.0, bt: Release, os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 11, cxx: clang++-6.0, bt: Debug , os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 11, cxx: clang++-6.0, bt: Release, os: ubuntu-18.04, bitlinks: shared64 static32}
env:
LIBCXX: ON # <---- enable libc++
STD: "${{matrix.std}}"
CXX_: "${{matrix.cxx}}"
BT: "${{matrix.bt}}"
BITLINKS: "${{matrix.bitlinks}}"
VG: "${{matrix.vg}}"
SAN: "${{matrix.san}}"
LINT: "${{matrix.lint}}"
OS: "${{matrix.os}}"
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
- {name: shared64-run, run: source .github/setenv.sh && c4_run_test shared64}
- {name: shared64-pack, run: source .github/setenv.sh && c4_package shared64}
- name: static64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static64
- {name: static64-build, run: source .github/setenv.sh && c4_build_test static64}
- {name: static64-run, run: source .github/setenv.sh && c4_run_test static64}
- {name: static64-pack, run: source .github/setenv.sh && c4_package static64}
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- {name: static32-pack, run: source .github/setenv.sh && c4_package static32}
- name: shared32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared32
- {name: shared32-build, run: source .github/setenv.sh && c4_build_test shared32}
- {name: shared32-run, run: source .github/setenv.sh && c4_run_test shared32}
- {name: shared32-pack, run: source .github/setenv.sh && c4_package shared32}

View File

@@ -0,0 +1,103 @@
name: macosx
defaults:
#if: "!contains(github.event.head_commit.message, 'skip ci')" # SKIP
run:
# Use a bash shell so we can use the same syntax for environment variable
# access regardless of the host operating system
shell: bash -e -x {0}
on:
# https://github.community/t/how-to-trigger-an-action-on-push-or-pull-request-but-not-both/16662
workflow_dispatch:
push:
branches:
- master
pull_request:
branches:
- master
env:
PROJ_PFX_TARGET: c4core-
PROJ_PFX_CMAKE: C4CORE_
CMAKE_FLAGS:
NUM_JOBS_BUILD: # 4
# ubuntu-20.04:
# # https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-README.md
# gcc: 7.5.0, 8.4.0, 9.3.0, 10.2.0
# clang: 8.0.1, 9.0.1, 10.0.0
# ubuntu-18.04:
# # https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu1804-README.md
# gcc: 7.5.0, 8.4.0, 9.3.0, 10.1.0
# clang: 6.0.0, 8.0.0, 9.0.0
# macos-11.0: macOS Big Sur 11.0
# # https://github.com/actions/virtual-environments/blob/main/images/macos/macos-11.0-Readme.md
# Xcode 12.1 11.7
# clang/LLVM 10.0.1
# gcc-8 gcc-9
# macos-10.15: macOS Catalina 10.15
# # https://github.com/actions/virtual-environments/blob/main/images/macos/macos-10.15-Readme.md
# Xcode 12.1 11.7
# clang/LLVM 11.0.0
# gcc-8 gcc-9
# windows-2019:
# # https://github.com/actions/virtual-environments/blob/main/images/win/Windows2019-Readme.md
# vs2019
# windows-2016:
# # https://github.com/actions/virtual-environments/blob/main/images/win/Windows2016-Readme.md
# vs2017
jobs:
#----------------------------------------------------------------------------
xcode:
name: xcode${{matrix.xcver}}/c++${{matrix.std}}/${{matrix.bt}}
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
- {std: 11, cxx: xcode, xcver: 13, bt: Debug , os: macos-11, bitlinks: shared64 static64}
- {std: 11, cxx: xcode, xcver: 13, bt: Release, os: macos-11, bitlinks: shared64 static64}
- {std: 17, cxx: xcode, xcver: 13, bt: Debug , os: macos-11, bitlinks: shared64 static64}
- {std: 17, cxx: xcode, xcver: 13, bt: Release, os: macos-11, bitlinks: shared64 static64}
#
- {std: 11, cxx: xcode, xcver: 12, bt: Debug , os: macos-11, bitlinks: shared64 static64}
- {std: 11, cxx: xcode, xcver: 12, bt: Release, os: macos-11, bitlinks: shared64 static64}
- {std: 17, cxx: xcode, xcver: 12, bt: Debug , os: macos-11, bitlinks: shared64 static64}
- {std: 17, cxx: xcode, xcver: 12, bt: Release, os: macos-11, bitlinks: shared64 static64}
#
- {std: 11, cxx: xcode, xcver: 11, bt: Debug , os: macos-11, bitlinks: shared64 static64}
- {std: 11, cxx: xcode, xcver: 11, bt: Release, os: macos-11, bitlinks: shared64 static64}
- {std: 17, cxx: xcode, xcver: 11, bt: Debug , os: macos-11, bitlinks: shared64 static64}
- {std: 17, cxx: xcode, xcver: 11, bt: Release, os: macos-11, bitlinks: shared64 static64}
env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: xcode, uses: maxim-lobanov/setup-xcode@v1, with: {xcode-version: "${{matrix.xcver}}" }}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
- {name: shared64-run, run: source .github/setenv.sh && c4_run_test shared64}
- {name: shared64-pack, run: source .github/setenv.sh && c4_package shared64}
- name: static64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static64
- {name: static64-build, run: source .github/setenv.sh && c4_build_test static64}
- {name: static64-run, run: source .github/setenv.sh && c4_run_test static64}
- {name: static64-pack, run: source .github/setenv.sh && c4_package static64}
- name: shared32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared32
- {name: shared32-build, run: source .github/setenv.sh && c4_build_test shared32}
- {name: shared32-run, run: source .github/setenv.sh && c4_run_test shared32}
- {name: shared32-pack, run: source .github/setenv.sh && c4_package shared32}
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- {name: static32-pack, run: source .github/setenv.sh && c4_package static32}

View File

@@ -0,0 +1,199 @@
name: release
defaults:
#if: "!contains(github.event.head_commit.message, 'skip ci')" # SKIP
run:
# Use a bash shell so we can use the same syntax for environment variable
# access regardless of the host operating system
shell: bash -e -x {0}
on:
# https://github.community/t/how-to-trigger-an-action-on-push-or-pull-request-but-not-both/16662
workflow_dispatch:
push:
tags:
- v0.*
- v1.*
- v2.*
branches:
- master
pull_request:
branches:
- master
env:
PROJ_PKG_NAME: c4core-
PROJ_PFX_TARGET: c4core-
PROJ_PFX_CMAKE: C4CORE_
CMAKE_FLAGS:
NUM_JOBS_BUILD: # 4
# useful to iterate when fixing the release:
# ver=0.2.1 ; ( set -x ; git tag -d v$ver ; git push origin :v$ver ) ; (set -x ; set -e ; tbump --only-patch --non-interactive $ver ; git add -u ; git commit --amend --no-edit ; git tag --annotate --message "v$ver" "v$ver" ; git push -f --tags origin )
jobs:
gettag:
runs-on: ubuntu-latest
steps:
# use fetch-depth to ensure all tags are fetched
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive, fetch-depth: 0}}
- name: Variables (from tag)
if: contains(github.ref, 'tags/v')
run: |
# https://github.community/t/how-to-get-just-the-tag-name/16241/11
SRC_TAG=${GITHUB_REF#refs/tags/}
SRC_VERSION=${GITHUB_REF#refs/tags/v}
cat <<EOF > vars.sh
export SRC_TAG=$SRC_TAG
export SRC_VERSION=$SRC_VERSION
EOF
- name: Variables (from commit, no tag)
if: ${{ !contains(github.ref, 'tags/v') }}
run: |
set -x
branch_name=${GITHUB_REF#refs/heads/}
# builds triggered from PRs have the branch_name like this: refs/pull/150/merge
# so filter to eg pr0150_merge
branch_name=`echo $branch_name | sed "s:refs/pull/\([0-9]*\)/\(.*\):pr0\1_\2:"`
# sanitize the branch name; eg merge/foo-bar -> merge_foo_bar
branch_name=`echo $branch_name | sed 's:[/.-]:_:g'`
SRC_TAG=$(git describe || git rev-parse --short HEAD) # eg v0.2.0-110-gda837e0
SRC_VERSION="${branch_name}-${SRC_TAG}"
cat <<EOF > vars.sh
export SRC_TAG=$SRC_TAG
export SRC_VERSION=$SRC_VERSION
EOF
- name: Verify vars.sh
run: cat vars.sh ; source vars.sh ; echo $SRC_TAG ; echo $SRC_VERSION
- name: Save vars.sh
uses: actions/upload-artifact@v1
with: {name: vars.sh, path: ./vars.sh}
#----------------------------------------------------------------------------
# create source packages
src:
needs: gettag
runs-on: ubuntu-latest
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- name: Download vars.sh
uses: actions/download-artifact@v1
with: {name: vars.sh, path: ./}
- name: Install python 3.9
uses: actions/setup-python@v2
with: { python-version: 3.9 }
- name: Install requirements
run: |
sudo -E pip install git-archive-all
- name: Create source packages
run: |
pwd
ls -lFhp
source vars.sh
echo SRC_TAG=$SRC_TAG
echo SRC_VERSION=$SRC_VERSION
id=${PROJ_PKG_NAME}${SRC_VERSION}
name=${id}-src
mkdir -p assets
git-archive-all --prefix $name assets/$name.tgz
git-archive-all --prefix $name assets/$name.zip
python --version
python tools/amalgamate.py assets/$id.hpp
- name: Save source artifacts
uses: actions/upload-artifact@v1
with: {name: assets, path: assets}
#----------------------------------------------------------------------------
# create c++ packages
cpp:
name: cpp/${{matrix.config.os}}/${{matrix.config.gen}}
needs: gettag
runs-on: ${{matrix.config.os}}
env: {DEV: OFF, BT: Release, OS: "${{matrix.config.os}}", CXX_: "${{matrix.config.cxx}}", GEN: "${{matrix.config.gen}}"}
strategy:
fail-fast: false
matrix:
config:
# name of the artifact | suffix (gen) | suffix (package) | cpack gen | mime type | os | cxx
# ubuntu 20.04 is disabled because of a problem installing libc++:i386:
#- {name: Ubuntu 20.04 deb , sfxg: unix64-shared-Release.deb, sfxp: ubuntu-20.04.deb , gen: DEB , mime: vnd.debian.binary-package, os: ubuntu-20.04 }
- {name: Ubuntu 18.04 deb , sfxg: unix64-shared-Release.deb, sfxp: ubuntu-18.04.deb , gen: DEB , mime: vnd.debian.binary-package, os: ubuntu-18.04 }
- {name: Windows VS2019 zip, sfxg: win64-shared-Release.zip , sfxp: windows-vs2019.zip , gen: ZIP , mime: zip , os: windows-2019, cxx: vs2019}
- {name: MacOSX sh , sfxg: apple64-shared-Release.sh, sfxp: macosx-xcode.sh , gen: STGZ , mime: x-sh , os: macos-11.0 , cxx: xcode }
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- name: Download vars.sh
uses: actions/download-artifact@v1
with: {name: vars.sh, path: ./}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info }
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_target shared64}
- name: shared64-pack
run: source .github/setenv.sh && c4_package shared64 $GEN
- name: shared64-normalize
run: |
set -x
source vars.sh
mkdir -p assets
asset_src=`ls -1 ./build/shared64/${PROJ_PFX_TARGET}*-${{matrix.config.sfxg}}`
asset_dst=./assets/${PROJ_PKG_NAME}${SRC_VERSION}-${{matrix.config.sfxp}}
[ ! -f $asset_src ] && exit 1
cp -fav $asset_src $asset_dst
- name: Save artifacts
uses: actions/upload-artifact@v1
with: {name: assets, path: assets}
#----------------------------------------------------------------------------
release:
runs-on: ubuntu-latest
needs:
- src
- cpp
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- name: Gather artifacts - ./assets
uses: actions/download-artifact@v2
with: {name: assets, path: assets}
- name: Verify existing artifacts
run: |
ls -lFhp assets/
#
# Github
- name: Restore vars.sh
if: contains(github.ref, 'tags/v')
uses: actions/download-artifact@v1
with: {name: vars.sh, path: ./}
- name: Save vars for following steps
if: contains(github.ref, 'tags/v')
id: vars
run: |
source vars.sh
version_body=${{github.workspace}}/changelog/$SRC_VERSION.md
if [ ! -f $version_body ] ; then
echo "version body file was not found: $version_body"
exit 1
fi
echo "::set-output name=VERSION::$SRC_VERSION"
echo "::set-output name=VERSION_BODY::$version_body"
- name: Create Github Release
if: contains(github.ref, 'tags/v')
id: create_release
uses: actions/create-release@v1
env: { GITHUB_TOKEN: "${{secrets.GITHUB_TOKEN}}" }
with:
tag_name: ${{github.ref}}
release_name: Release ${{steps.vars.outputs.VERSION}}
body_path: ${{steps.vars.outputs.VERSION_BODY}}
draft: true
prerelease: ${{contains(github.ref, 'rc')}}
- name: Upload assets to Github Release
if: contains(github.ref, 'tags/v')
uses: dwenegar/upload-release-assets@v1
env: { GITHUB_TOKEN: "${{secrets.GITHUB_TOKEN}}" }
with:
release_id: ${{steps.create_release.outputs.id}}
assets_path: ./assets/

View File

@@ -0,0 +1,576 @@
name: test
defaults:
#if: "!contains(github.event.head_commit.message, 'skip ci')" # SKIP
run:
# Use a bash shell so we can use the same syntax for environment variable
# access regardless of the host operating system
shell: bash -e -x {0}
on:
# https://github.community/t/how-to-trigger-an-action-on-push-or-pull-request-but-not-both/16662
workflow_dispatch:
push:
branches:
- master
pull_request:
branches:
- master
env:
PROJ_PFX_TARGET: c4core-
PROJ_PFX_CMAKE: C4CORE_
CMAKE_FLAGS:
NUM_JOBS_BUILD: # 4
# ubuntu-20.04:
# # https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu2004-README.md
# gcc: 7.5.0, 8.4.0, 9.3.0, 10.2.0
# clang: 8.0.1, 9.0.1, 10.0.0
# ubuntu-18.04:
# # https://github.com/actions/virtual-environments/blob/main/images/linux/Ubuntu1804-README.md
# gcc: 7.5.0, 8.4.0, 9.3.0, 10.1.0
# clang: 6.0.0, 8.0.0, 9.0.0
# macos-11.0: macOS Big Sur 11.0
# # https://github.com/actions/virtual-environments/blob/main/images/macos/macos-11.0-Readme.md
# Xcode 12.1 11.7
# clang/LLVM 10.0.1
# gcc-8 gcc-9
# macos-10.15: macOS Catalina 10.15
# # https://github.com/actions/virtual-environments/blob/main/images/macos/macos-10.15-Readme.md
# Xcode 12.1 11.7
# clang/LLVM 11.0.0
# gcc-8 gcc-9
# windows-2019:
# # https://github.com/actions/virtual-environments/blob/main/images/win/Windows2019-Readme.md
# vs2019
# windows-2016:
# # https://github.com/actions/virtual-environments/blob/main/images/win/Windows2016-Readme.md
# vs2017
jobs:
#----------------------------------------------------------------------------
coverage:
name: coverage/c++${{matrix.std}}
# if: github.ref == 'refs/heads/master'
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
- {std: 11, cxx: g++-7, cc: gcc-7, bt: Coverage, os: ubuntu-18.04}
- {std: 14, cxx: g++-7, cc: gcc-7, bt: Coverage, os: ubuntu-18.04}
- {std: 17, cxx: g++-7, cc: gcc-7, bt: Coverage, os: ubuntu-18.04}
env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}", CODECOV_TOKEN: "${{secrets.CODECOV_TOKEN}}", COVERALLS_REPO_TOKEN: "${{secrets.COVERALLS_REPO_TOKEN}}"}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
- {name: shared64-run, run: source .github/setenv.sh && c4_run_test shared64}
- name: shared64-submit
run: |
source .github/setenv.sh
c4_submit_coverage shared64 codecov
#c4_submit_coverage shared64 coveralls # only accepts one submission per job
- name: static64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static64
- {name: static64-build, run: source .github/setenv.sh && c4_build_test static64}
- {name: static64-run, run: source .github/setenv.sh && c4_run_test static64}
- name: static64-submit
run: |
source .github/setenv.sh
c4_submit_coverage static64 codecov
c4_submit_coverage static64 coveralls
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- name: static32-submit
run: |
source .github/setenv.sh
c4_submit_coverage static32 codecov
#c4_submit_coverage static32 coveralls # only accepts one submission per job
- name: shared32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared32
- {name: shared32-build, run: source .github/setenv.sh && c4_build_test shared32}
- {name: shared32-run, run: source .github/setenv.sh && c4_run_test shared32}
- name: shared32-submit
run: |
source .github/setenv.sh
c4_submit_coverage shared32 codecov
#c4_submit_coverage shared32 coveralls # only accepts one submission per job
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- name: static32-submit
run: |
source .github/setenv.sh
c4_submit_coverage static32 codecov
#c4_submit_coverage static32 coveralls # only accepts one submission per job
#----------------------------------------------------------------------------
coverage_nofastfloat:
name: coverage/c++${{matrix.std}}/nofastfloat
# if: github.ref == 'refs/heads/master'
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
- {std: 11, cxx: g++-7, cc: gcc-7, bt: Coverage, os: ubuntu-18.04}
- {std: 14, cxx: g++-7, cc: gcc-7, bt: Coverage, os: ubuntu-18.04}
- {std: 17, cxx: g++-7, cc: gcc-7, bt: Coverage, os: ubuntu-18.04}
env: {
STD: "${{matrix.std}}",
CXX_: "${{matrix.cxx}}",
BT: "${{matrix.bt}}",
OS: "${{matrix.os}}",
CODECOV_TOKEN: "${{secrets.CODECOV_TOKEN}}",
COVERALLS_REPO_TOKEN: "${{secrets.COVERALLS_REPO_TOKEN}}",
BDIR: "build/nofastfloat-${{matrix.cxx}}-cxx${{matrix.std}}",
IDIR: "install/nofastfloat-${{matrix.cxx}}-cxx${{matrix.std}}",
}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: nofastfloat-configure------------------------------------------------
run: |
set -x
mkdir -p $BDIR
mkdir -p $IDIR
cmake -S . -B $BDIR \
-DC4CORE_WITH_FASTFLOAT=OFF \
-DC4_CXX_STANDARD=${{matrix.std}} \
-DC4CORE_CXX_STANDARD=${{matrix.std}} \
-DC4CORE_BUILD_TESTS=ON \
-DC4CORE_VALGRIND=OFF \
-DC4CORE_COVERAGE_CODECOV=ON \
-DC4CORE_COVERAGE_CODECOV_SILENT=ON \
-DC4CORE_COVERAGE_COVERALLS=ON \
-DC4CORE_COVERAGE_COVERALLS_SILENT=ON \
-DCMAKE_INSTALL_PREFIX=$IDIR \
-DCMAKE_BUILD_TYPE=Coverage \
-DCMAKE_CXX_COMPILER=${{matrix.cxx}} \
-DCMAKE_C_COMPILER=${{matrix.cc}}
- name: nofastfloat-build
run: |
cmake --build $BDIR --config Coverage --target c4core-test-build -j
- name: nofastfloat-run
run: |
cmake --build $BDIR --config Coverage --target c4core-test-run
- name: nofastfloat-submit
run: |
cmake --build $BDIR --config Coverage --target c4core-coverage-submit-codecov
#cmake --build $BDIR --config Coverage --target c4core-coverage-submit-coveralls
#----------------------------------------------------------------------------
windows:
name: win/${{matrix.cxx}}/c++${{matrix.std}}/${{matrix.bt}}
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
- {std: 11, cxx: vs2017, bt: Debug , os: windows-2016, bitlinks: shared64 static32}
- {std: 11, cxx: vs2017, bt: Release, os: windows-2016, bitlinks: shared64 static32}
- {std: 14, cxx: vs2017, bt: Debug , os: windows-2016, bitlinks: shared64 static32}
- {std: 14, cxx: vs2017, bt: Release, os: windows-2016, bitlinks: shared64 static32}
- {std: 11, cxx: vs2019, bt: Debug , os: windows-2019, bitlinks: shared64 static32}
- {std: 11, cxx: vs2019, bt: Release, os: windows-2019, bitlinks: shared64 static32}
- {std: 14, cxx: vs2019, bt: Debug , os: windows-2019, bitlinks: shared64 static32}
- {std: 14, cxx: vs2019, bt: Release, os: windows-2019, bitlinks: shared64 static32}
- {std: 17, cxx: vs2019, bt: Debug , os: windows-2019, bitlinks: shared64 static32}
- {std: 17, cxx: vs2019, bt: Release, os: windows-2019, bitlinks: shared64 static32}
- {std: 20, cxx: vs2019, bt: Debug , os: windows-2019, bitlinks: shared64 static32}
- {std: 20, cxx: vs2019, bt: Release, os: windows-2019, bitlinks: shared64 static32}
env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
- {name: shared64-run, run: source .github/setenv.sh && c4_run_test shared64}
- {name: shared64-pack, run: source .github/setenv.sh && c4_package shared64}
- name: static64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static64
- {name: static64-build, run: source .github/setenv.sh && c4_build_test static64}
- {name: static64-run, run: source .github/setenv.sh && c4_run_test static64}
- {name: static64-pack, run: source .github/setenv.sh && c4_package static64}
- name: shared32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared32
- {name: shared32-build, run: source .github/setenv.sh && c4_build_test shared32}
- {name: shared32-run, run: source .github/setenv.sh && c4_run_test shared32}
- {name: shared32-pack, run: source .github/setenv.sh && c4_package shared32}
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- {name: static32-pack, run: source .github/setenv.sh && c4_package static32}
#----------------------------------------------------------------------------
gcc_canary:
name: gcc_canary/${{matrix.cxx}}/c++${{matrix.std}}/${{matrix.bt}}
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
- {std: 11, cxx: g++-7 , bt: Debug , os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 11, cxx: g++-7 , bt: Release, os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 20, cxx: g++-10 , bt: Debug , os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 20, cxx: g++-10 , bt: Release, os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 11, cxx: g++-5 , bt: Debug , os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 11, cxx: g++-5 , bt: Release, os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 11, cxx: g++-4.8 , bt: Debug, os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 11, cxx: g++-4.8 , bt: Release, os: ubuntu-18.04, bitlinks: shared64 static32}
env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
- {name: shared64-run, run: source .github/setenv.sh && c4_run_test shared64}
- {name: shared64-pack, run: source .github/setenv.sh && c4_package shared64}
- name: static64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static64
- {name: static64-build, run: source .github/setenv.sh && c4_build_test static64}
- {name: static64-run, run: source .github/setenv.sh && c4_run_test static64}
- {name: static64-pack, run: source .github/setenv.sh && c4_package static64}
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- {name: static32-pack, run: source .github/setenv.sh && c4_package static32}
- name: shared32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared32
- {name: shared32-build, run: source .github/setenv.sh && c4_build_test shared32}
- {name: shared32-run, run: source .github/setenv.sh && c4_run_test shared32}
- {name: shared32-pack, run: source .github/setenv.sh && c4_package shared32}
#----------------------------------------------------------------------------
clang_canary:
name: clang_canary/${{matrix.cxx}}/c++${{matrix.std}}/${{matrix.bt}}
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
- {std: 20, cxx: clang++-10 , bt: Debug , os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 20, cxx: clang++-10 , bt: Release, os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 11, cxx: clang++-6.0, bt: Debug , os: ubuntu-18.04, bitlinks: shared64 static32}
- {std: 11, cxx: clang++-6.0, bt: Release, os: ubuntu-18.04, bitlinks: shared64 static32}
env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
- {name: shared64-run, run: source .github/setenv.sh && c4_run_test shared64}
- {name: shared64-pack, run: source .github/setenv.sh && c4_package shared64}
- name: static64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static64
- {name: static64-build, run: source .github/setenv.sh && c4_build_test static64}
- {name: static64-run, run: source .github/setenv.sh && c4_run_test static64}
- {name: static64-pack, run: source .github/setenv.sh && c4_package static64}
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- {name: static32-pack, run: source .github/setenv.sh && c4_package static32}
- name: shared32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared32
- {name: shared32-build, run: source .github/setenv.sh && c4_build_test shared32}
- {name: shared32-run, run: source .github/setenv.sh && c4_run_test shared32}
- {name: shared32-pack, run: source .github/setenv.sh && c4_package shared32}
#----------------------------------------------------------------------------
clang_tidy:
name: clang_tidy/c++${{matrix.std}}/${{matrix.bt}}
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
# clang tidy takes a long time, so don't do multiple bits/linktypes
- {std: 11, cxx: clang++-9, bt: Debug , lint: clang-tidy, bitlinks: shared64, os: ubuntu-18.04}
- {std: 11, cxx: clang++-9, bt: Debug , lint: clang-tidy, bitlinks: shared32, os: ubuntu-18.04}
- {std: 11, cxx: clang++-9, bt: Debug , lint: clang-tidy, bitlinks: static64, os: ubuntu-18.04}
- {std: 11, cxx: clang++-9, bt: Debug , lint: clang-tidy, bitlinks: static32, os: ubuntu-18.04}
- {std: 11, cxx: clang++-9, bt: ReleaseWithDebInfo, lint: clang-tidy, bitlinks: shared64, os: ubuntu-18.04}
- {std: 11, cxx: clang++-9, bt: ReleaseWithDebInfo, lint: clang-tidy, bitlinks: shared32, os: ubuntu-18.04}
- {std: 11, cxx: clang++-9, bt: ReleaseWithDebInfo, lint: clang-tidy, bitlinks: static64, os: ubuntu-18.04}
- {std: 11, cxx: clang++-9, bt: ReleaseWithDebInfo, lint: clang-tidy, bitlinks: static32, os: ubuntu-18.04}
env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
- {name: shared64-run, run: source .github/setenv.sh && c4_run_test shared64}
- {name: shared64-pack, run: source .github/setenv.sh && c4_package shared64}
- name: static64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static64
- {name: static64-build, run: source .github/setenv.sh && c4_build_test static64}
- {name: static64-run, run: source .github/setenv.sh && c4_run_test static64}
- {name: static64-pack, run: source .github/setenv.sh && c4_package static64}
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- {name: static32-pack, run: source .github/setenv.sh && c4_package static32}
- name: shared32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared32
- {name: shared32-build, run: source .github/setenv.sh && c4_build_test shared32}
- {name: shared32-run, run: source .github/setenv.sh && c4_run_test shared32}
- {name: shared32-pack, run: source .github/setenv.sh && c4_package shared32}
#----------------------------------------------------------------------------
gcc_extended:
name: gcc_extended/${{matrix.cxx}}/c++${{matrix.std}}/${{matrix.bt}}/vg${{matrix.vg}}
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
# VALGRIND
- {std: 11, cxx: g++-10, bt: Debug , vg: ON, os: ubuntu-18.04}
- {std: 11, cxx: g++-10, bt: Release, vg: ON, os: ubuntu-18.04}
- {std: 14, cxx: g++-10, bt: Debug , vg: ON, os: ubuntu-18.04}
- {std: 14, cxx: g++-10, bt: Release, vg: ON, os: ubuntu-18.04}
- {std: 17, cxx: g++-10, bt: Debug , vg: ON, os: ubuntu-18.04}
- {std: 17, cxx: g++-10, bt: Release, vg: ON, os: ubuntu-18.04}
- {std: 20, cxx: g++-10, bt: Debug , vg: ON, os: ubuntu-18.04}
- {std: 20, cxx: g++-10, bt: Release, vg: ON, os: ubuntu-18.04}
#
- {std: 11, cxx: g++-9, bt: Debug , os: ubuntu-18.04}
- {std: 11, cxx: g++-9, bt: Release, os: ubuntu-18.04}
- {std: 11, cxx: g++-8, bt: Debug , os: ubuntu-18.04}
- {std: 11, cxx: g++-8, bt: Release, os: ubuntu-18.04}
- {std: 11, cxx: g++-7, bt: Debug , os: ubuntu-18.04}
- {std: 11, cxx: g++-7, bt: Release, os: ubuntu-18.04}
- {std: 11, cxx: g++-6, bt: Debug , os: ubuntu-18.04}
- {std: 11, cxx: g++-6, bt: Release, os: ubuntu-18.04}
- {std: 11, cxx: g++-5, bt: Debug , os: ubuntu-18.04}
- {std: 11, cxx: g++-5, bt: Release, os: ubuntu-18.04}
- {std: 11, cxx: g++-4.8, bt: Debug, os: ubuntu-18.04}
- {std: 11, cxx: g++-4.8, bt: Release, os: ubuntu-18.04}
env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
- {name: shared64-run, run: source .github/setenv.sh && c4_run_test shared64}
- {name: shared64-pack, run: source .github/setenv.sh && c4_package shared64}
- name: static64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static64
- {name: static64-build, run: source .github/setenv.sh && c4_build_test static64}
- {name: static64-run, run: source .github/setenv.sh && c4_run_test static64}
- {name: static64-pack, run: source .github/setenv.sh && c4_package static64}
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- {name: static32-pack, run: source .github/setenv.sh && c4_package static32}
- name: shared32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared32
- {name: shared32-build, run: source .github/setenv.sh && c4_build_test shared32}
- {name: shared32-run, run: source .github/setenv.sh && c4_run_test shared32}
- {name: shared32-pack, run: source .github/setenv.sh && c4_package shared32}
#----------------------------------------------------------------------------
clang_extended:
name: clang_extended/${{matrix.cxx}}/c++${{matrix.std}}/${{matrix.bt}}/vg${{matrix.vg}}
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
- {std: 20, cxx: clang++-10 , bt: Debug , vg: on, os: ubuntu-18.04}
- {std: 20, cxx: clang++-10 , bt: Release, vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-9 , bt: Debug , vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-9 , bt: Release, vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-8 , bt: Debug , vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-8 , bt: Release, vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-7 , bt: Debug , vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-7 , bt: Release, vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-6.0, bt: Debug , vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-6.0, bt: Release, vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-5.0, bt: Debug , vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-5.0, bt: Release, vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-4.0, bt: Debug , vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-4.0, bt: Release, vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-3.9, bt: Debug , vg: on, os: ubuntu-18.04}
- {std: 11, cxx: clang++-3.9, bt: Release, vg: on, os: ubuntu-18.04}
env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
- {name: shared64-run, run: source .github/setenv.sh && c4_run_test shared64}
- {name: shared64-pack, run: source .github/setenv.sh && c4_package shared64}
- name: static64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static64
- {name: static64-build, run: source .github/setenv.sh && c4_build_test static64}
- {name: static64-run, run: source .github/setenv.sh && c4_run_test static64}
- {name: static64-pack, run: source .github/setenv.sh && c4_package static64}
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- {name: static32-pack, run: source .github/setenv.sh && c4_package static32}
- name: shared32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared32
- {name: shared32-build, run: source .github/setenv.sh && c4_build_test shared32}
- {name: shared32-run, run: source .github/setenv.sh && c4_run_test shared32}
- {name: shared32-pack, run: source .github/setenv.sh && c4_package shared32}
#----------------------------------------------------------------------------
clang_sanitize:
name: clang_sanitize/c++${{matrix.std}}/${{matrix.bt}}/vg${{matrix.vg}}
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
# these jobs take much longer, so run only one bitlink pair per job to profit from parallelism
- {std: 11, cxx: clang++-10 , bt: Debug , vg: ON, san: ALL, bitlinks: shared64 static64, os: ubuntu-18.04}
- {std: 11, cxx: clang++-10 , bt: Debug , vg: ON, san: ALL, bitlinks: shared32 static32, os: ubuntu-18.04}
- {std: 11, cxx: clang++-10 , bt: Release, vg: ON, san: ALL, bitlinks: shared64 static64, os: ubuntu-18.04}
- {std: 11, cxx: clang++-10 , bt: Release, vg: ON, san: ALL, bitlinks: shared32 static32, os: ubuntu-18.04}
- {std: 14, cxx: clang++-10 , bt: Debug , vg: ON, san: ALL, bitlinks: shared64 static64, os: ubuntu-18.04}
- {std: 14, cxx: clang++-10 , bt: Debug , vg: ON, san: ALL, bitlinks: shared32 static32, os: ubuntu-18.04}
- {std: 14, cxx: clang++-10 , bt: Release, vg: ON, san: ALL, bitlinks: shared64 static64, os: ubuntu-18.04}
- {std: 14, cxx: clang++-10 , bt: Release, vg: ON, san: ALL, bitlinks: shared32 static32, os: ubuntu-18.04}
- {std: 17, cxx: clang++-10 , bt: Debug , vg: ON, san: ALL, bitlinks: shared64 static64, os: ubuntu-18.04}
- {std: 17, cxx: clang++-10 , bt: Debug , vg: ON, san: ALL, bitlinks: shared32 static32, os: ubuntu-18.04}
- {std: 17, cxx: clang++-10 , bt: Release, vg: ON, san: ALL, bitlinks: shared64 static64, os: ubuntu-18.04}
- {std: 17, cxx: clang++-10 , bt: Release, vg: ON, san: ALL, bitlinks: shared32 static32, os: ubuntu-18.04}
- {std: 20, cxx: clang++-10 , bt: Debug , vg: ON, san: ALL, bitlinks: shared64 static64, os: ubuntu-18.04}
- {std: 20, cxx: clang++-10 , bt: Debug , vg: ON, san: ALL, bitlinks: shared32 static32, os: ubuntu-18.04}
- {std: 20, cxx: clang++-10 , bt: Release, vg: ON, san: ALL, bitlinks: shared64 static64, os: ubuntu-18.04}
- {std: 20, cxx: clang++-10 , bt: Release, vg: ON, san: ALL, bitlinks: shared32 static32, os: ubuntu-18.04}
env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: shared64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared64
- {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
- {name: shared64-run, run: source .github/setenv.sh && c4_run_test shared64}
- {name: shared64-pack, run: source .github/setenv.sh && c4_package shared64}
- name: static64-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static64
- {name: static64-build, run: source .github/setenv.sh && c4_build_test static64}
- {name: static64-run, run: source .github/setenv.sh && c4_run_test static64}
- {name: static64-pack, run: source .github/setenv.sh && c4_package static64}
- name: static32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test static32
- {name: static32-build, run: source .github/setenv.sh && c4_build_test static32}
- {name: static32-run, run: source .github/setenv.sh && c4_run_test static32}
- {name: static32-pack, run: source .github/setenv.sh && c4_package static32}
- name: shared32-configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test shared32
- {name: shared32-build, run: source .github/setenv.sh && c4_build_test shared32}
- {name: shared32-run, run: source .github/setenv.sh && c4_run_test shared32}
- {name: shared32-pack, run: source .github/setenv.sh && c4_package shared32}
#----------------------------------------------------------------------------
arm:
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
# these jobs take much longer, so run only one bitlink pair per job to profit from parallelism
- {std: 11, bt: Debug , toolchain: cmake/Toolchain-Arm-ubuntu.cmake, cxx: arm-linux-gnueabihf-gcc, os: ubuntu-18.04}
- {std: 11, bt: Release, toolchain: cmake/Toolchain-Arm-ubuntu.cmake, cxx: arm-linux-gnueabihf-gcc, os: ubuntu-18.04}
- {std: 14, bt: Debug , toolchain: cmake/Toolchain-Arm-ubuntu.cmake, cxx: arm-linux-gnueabihf-gcc, os: ubuntu-18.04}
- {std: 14, bt: Release, toolchain: cmake/Toolchain-Arm-ubuntu.cmake, cxx: arm-linux-gnueabihf-gcc, os: ubuntu-18.04}
- {std: 17, bt: Debug , toolchain: cmake/Toolchain-Arm-ubuntu.cmake, cxx: arm-linux-gnueabihf-gcc, os: ubuntu-18.04}
- {std: 17, bt: Release, toolchain: cmake/Toolchain-Arm-ubuntu.cmake, cxx: arm-linux-gnueabihf-gcc, os: ubuntu-18.04}
env: {TOOLCHAIN: "${{matrix.toolchain}}", STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: configure---------------------------------------------------
run: source .github/setenv.sh && c4_cfg_test arm
- {name: build, run: source .github/setenv.sh && c4_build_test arm}
- {name: run, run: source .github/setenv.sh && c4_run_test arm}
- {name: pack, run: source .github/setenv.sh && c4_package arm}
#----------------------------------------------------------------------------
# # https://blog.kitware.com/static-checks-with-cmake-cdash-iwyu-clang-tidy-lwyu-cpplint-and-cppcheck/
# static_analysis:
# continue-on-error: true
# if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
# runs-on: ${{matrix.os}}
# strategy:
# fail-fast: false
# matrix:
# include:
# # these jobs take much longer, so run only one bitlink pair per job to profit from parallelism
# - {std: 11, cxx: clang++-10, bt: Debug , bitlinks: shared64, os: ubuntu-18.04}
# - {std: 11, cxx: clang++-10, bt: Release, bitlinks: shared64, os: ubuntu-18.04}
# - {std: 14, cxx: clang++-10, bt: Debug , bitlinks: shared64, os: ubuntu-18.04}
# - {std: 14, cxx: clang++-10, bt: Release, bitlinks: shared64, os: ubuntu-18.04}
# - {std: 17, cxx: clang++-10, bt: Debug , bitlinks: shared64, os: ubuntu-18.04}
# - {std: 17, cxx: clang++-10, bt: Release, bitlinks: shared64, os: ubuntu-18.04}
# - {std: 20, cxx: clang++-10, bt: Debug , bitlinks: shared64, os: ubuntu-18.04}
# - {std: 20, cxx: clang++-10, bt: Release, bitlinks: shared64, os: ubuntu-18.04}
# env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}"}
# steps:
# - {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
# - {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
# - {name: show info, run: source .github/setenv.sh && c4_show_info}
# - name: shared64-configure---------------------------------------------------
# run: source .github/setenv.sh && c4_cfg_test shared64
# - {name: shared64-build, run: source .github/setenv.sh && c4_build_test shared64}
# - {name: clang-tidy, run: cmake "-DCMAKE_CXX_CLANG_TIDY=/usr/bin/clang-tidy-3.9;-checks=*" ../path/to/source}
# - {name: cppcheck, run: cmake "-DCMAKE_CXX_CPPCHECK=/usr/bin/cppcheck;--std=c++11" ../path/to/source}
# - {name: cpplint, run: cmake "-DCMAKE_CXX_CPPLINT=/usr/local/bin/cpplint;--linelength=179" ..}
# - {name: include-what-you-use, run: cmake "-DCMAKE_CXX_INCLUDE_WHAT_YOU_USE=/usr/bin/iwyu;--transitive_includes_only" ..}
# - {name: link-what-you-use, run: cmake -DCMAKE_LINK_WHAT_YOU_USE=TRUE ..}

View File

@@ -0,0 +1,104 @@
name: test_install
defaults:
#if: "!contains(github.event.head_commit.message, 'skip ci')" # SKIP
run:
# Use a bash shell so we can use the same syntax for environment variable
# access regardless of the host operating system
shell: bash -e -x {0}
on:
# https://github.community/t/how-to-trigger-an-action-on-push-or-pull-request-but-not-both/16662
workflow_dispatch:
push:
branches:
- master
pull_request:
branches:
- master
env:
PROJ_PFX_TARGET: c4core-
PROJ_PFX_CMAKE: C4CORE_
CMAKE_FLAGS:
NUM_JOBS_BUILD: # 4
jobs:
#----------------------------------------------------------------------------
install_tests:
name: ${{matrix.name}}/${{matrix.bt}}
# if: github.ref == 'refs/heads/master'
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
- {name: find_package/linux , sdir: test/test_install , os: ubuntu-18.04, cxx: g++-10 , gen: "-DCMAKE_CXX_COMPILER=g++-10" , tgt: all , bt: Release, vars: "-Dc4core_DIR=$GITHUB_WORKSPACE/$PDIR/lib/cmake/c4core -DC4CORE_TEST_INSTALL_PACKAGE_MODE=ON", commonvars: }
- {name: find_package/linux , sdir: test/test_install , os: ubuntu-18.04, cxx: g++-10 , gen: "-DCMAKE_CXX_COMPILER=g++-10" , tgt: all , bt: Debug , vars: "-Dc4core_DIR=$GITHUB_WORKSPACE/$PDIR/lib/cmake/c4core -DC4CORE_TEST_INSTALL_PACKAGE_MODE=ON", commonvars: }
- {name: find_package/linux/libcxx, sdir: test/test_install , os: ubuntu-18.04, cxx: clang++-9, gen: "-DCMAKE_CXX_COMPILER=clang++-9" , tgt: all , bt: Release, vars: "-Dc4core_DIR=$GITHUB_WORKSPACE/$PDIR/lib/cmake/c4core -DC4CORE_TEST_INSTALL_PACKAGE_MODE=ON", commonvars: "-DC4CORE_USE_LIBCXX=ON"}
- {name: find_package/linux/libcxx, sdir: test/test_install , os: ubuntu-18.04, cxx: clang++-9, gen: "-DCMAKE_CXX_COMPILER=clang++-9" , tgt: all , bt: Debug , vars: "-Dc4core_DIR=$GITHUB_WORKSPACE/$PDIR/lib/cmake/c4core -DC4CORE_TEST_INSTALL_PACKAGE_MODE=ON", commonvars: "-DC4CORE_USE_LIBCXX=ON"}
- {name: find_package/macos , sdir: test/test_install , os: macos-11.0 , cxx: xcode , gen: "-G Xcode -DCMAKE_OSX_ARCHITECTURES=x86_64", tgt: ALL_BUILD, bt: Release, vars: "-Dc4core_DIR=$GITHUB_WORKSPACE/$PDIR/lib/cmake/c4core -DC4CORE_TEST_INSTALL_PACKAGE_MODE=ON", commonvars: }
- {name: find_package/macos , sdir: test/test_install , os: macos-11.0 , cxx: xcode , gen: "-G Xcode -DCMAKE_OSX_ARCHITECTURES=x86_64", tgt: ALL_BUILD, bt: Debug , vars: "-Dc4core_DIR=$GITHUB_WORKSPACE/$PDIR/lib/cmake/c4core -DC4CORE_TEST_INSTALL_PACKAGE_MODE=ON", commonvars: }
- {name: find_package/win , sdir: test/test_install , os: windows-2019, cxx: vs2019 , gen: "-G 'Visual Studio 16 2019' -A x64" , tgt: ALL_BUILD, bt: Release, vars: "-Dc4core_DIR=$GITHUB_WORKSPACE/$PDIR/cmake -DC4CORE_TEST_INSTALL_PACKAGE_MODE=ON", commonvars: }
- {name: find_package/win , sdir: test/test_install , os: windows-2019, cxx: vs2019 , gen: "-G 'Visual Studio 16 2019' -A x64" , tgt: ALL_BUILD, bt: Debug , vars: "-Dc4core_DIR=$GITHUB_WORKSPACE/$PDIR/cmake -DC4CORE_TEST_INSTALL_PACKAGE_MODE=ON", commonvars: }
#
- {name: find_library/linux , sdir: test/test_install , os: ubuntu-18.04, cxx: g++-10 , gen: "-DCMAKE_CXX_COMPILER=g++-10" , tgt: all , bt: Release, vars: "-DCMAKE_PREFIX_PATH=$GITHUB_WORKSPACE/$PDIR -DC4CORE_TEST_INSTALL_PACKAGE_MODE=OFF", commonvars: }
- {name: find_library/linux , sdir: test/test_install , os: ubuntu-18.04, cxx: g++-10 , gen: "-DCMAKE_CXX_COMPILER=g++-10" , tgt: all , bt: Debug , vars: "-DCMAKE_PREFIX_PATH=$GITHUB_WORKSPACE/$PDIR -DC4CORE_TEST_INSTALL_PACKAGE_MODE=OFF", commonvars: }
- {name: find_library/linux/libcxx, sdir: test/test_install , os: ubuntu-18.04, cxx: clang++-9, gen: "-DCMAKE_CXX_COMPILER=clang++-9" , tgt: all , bt: Release, vars: "-DCMAKE_PREFIX_PATH=$GITHUB_WORKSPACE/$PDIR -DC4CORE_TEST_INSTALL_PACKAGE_MODE=OFF", commonvars: "-DC4CORE_USE_LIBCXX=ON"}
- {name: find_library/linux/libcxx, sdir: test/test_install , os: ubuntu-18.04, cxx: clang++-9, gen: "-DCMAKE_CXX_COMPILER=clang++-9" , tgt: all , bt: Debug , vars: "-DCMAKE_PREFIX_PATH=$GITHUB_WORKSPACE/$PDIR -DC4CORE_TEST_INSTALL_PACKAGE_MODE=OFF", commonvars: "-DC4CORE_USE_LIBCXX=ON"}
- {name: find_library/macos , sdir: test/test_install , os: macos-11.0 , cxx: xcode , gen: "-G Xcode -DCMAKE_OSX_ARCHITECTURES=x86_64", tgt: ALL_BUILD, bt: Release, vars: "-DCMAKE_PREFIX_PATH=$GITHUB_WORKSPACE/$PDIR -DC4CORE_TEST_INSTALL_PACKAGE_MODE=OFF", commonvars: }
- {name: find_library/macos , sdir: test/test_install , os: macos-11.0 , cxx: xcode , gen: "-G Xcode -DCMAKE_OSX_ARCHITECTURES=x86_64", tgt: ALL_BUILD, bt: Debug , vars: "-DCMAKE_PREFIX_PATH=$GITHUB_WORKSPACE/$PDIR -DC4CORE_TEST_INSTALL_PACKAGE_MODE=OFF", commonvars: }
- {name: find_library/win , sdir: test/test_install , os: windows-2019, cxx: vs2019 , gen: "-G 'Visual Studio 16 2019' -A x64" , tgt: ALL_BUILD, bt: Release, vars: "-DCMAKE_PREFIX_PATH=$GITHUB_WORKSPACE/$PDIR -DC4CORE_TEST_INSTALL_PACKAGE_MODE=OFF", commonvars: }
- {name: find_library/win , sdir: test/test_install , os: windows-2019, cxx: vs2019 , gen: "-G 'Visual Studio 16 2019' -A x64" , tgt: ALL_BUILD, bt: Debug , vars: "-DCMAKE_PREFIX_PATH=$GITHUB_WORKSPACE/$PDIR -DC4CORE_TEST_INSTALL_PACKAGE_MODE=OFF", commonvars: }
#
- {name: singleheader/linux , sdir: test/test_singleheader, os: ubuntu-18.04, cxx: g++-10 , gen: "-DCMAKE_CXX_COMPILER=g++-10" , tgt: all , bt: Release, vars: , commonvars: }
- {name: singleheader/linux , sdir: test/test_singleheader, os: ubuntu-18.04, cxx: g++-10 , gen: "-DCMAKE_CXX_COMPILER=g++-10" , tgt: all , bt: Debug , vars: , commonvars: }
- {name: singleheader/linux/libcxx, sdir: test/test_singleheader, os: ubuntu-18.04, cxx: clang++-9, gen: "-DCMAKE_CXX_COMPILER=clang++-9" , tgt: all , bt: Release, vars: , commonvars: "-DC4CORE_USE_LIBCXX=ON"}
- {name: singleheader/linux/libcxx, sdir: test/test_singleheader, os: ubuntu-18.04, cxx: clang++-9, gen: "-DCMAKE_CXX_COMPILER=clang++-9" , tgt: all , bt: Debug , vars: , commonvars: "-DC4CORE_USE_LIBCXX=ON"}
- {name: singleheader/macos , sdir: test/test_singleheader, os: macos-11.0 , cxx: xcode , gen: "-G Xcode -DCMAKE_OSX_ARCHITECTURES=x86_64", tgt: ALL_BUILD, bt: Release, vars: , commonvars: }
- {name: singleheader/macos , sdir: test/test_singleheader, os: macos-11.0 , cxx: xcode , gen: "-G Xcode -DCMAKE_OSX_ARCHITECTURES=x86_64", tgt: ALL_BUILD, bt: Debug , vars: , commonvars: }
- {name: singleheader/win , sdir: test/test_singleheader, os: windows-2019, cxx: vs2019 , gen: "-G 'Visual Studio 16 2019' -A x64" , tgt: ALL_BUILD, bt: Release, vars: , commonvars: }
- {name: singleheader/win , sdir: test/test_singleheader, os: windows-2019, cxx: vs2019 , gen: "-G 'Visual Studio 16 2019' -A x64" , tgt: ALL_BUILD, bt: Debug , vars: , commonvars: }
env:
CXX_: "${{matrix.cxx}}"
BT: "${{matrix.bt}}"
OS: "${{matrix.os}}"
BDIR: "build/${{matrix.name}}-${{matrix.bt}}"
IDIR: "install/${{matrix.name}}-${{matrix.bt}}"
PDIR: "prefix/${{matrix.name}}-${{matrix.bt}}"
steps:
- {name: checkout, uses: actions/checkout@v2, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}
- {name: show info, run: source .github/setenv.sh && c4_show_info}
- name: Install python 3.9
uses: actions/setup-python@v2
with: { python-version: 3.9 }
- name: preinstall
run: |
if [ "${{matrix.sdir}}" == "test/test_install" ] ; then
mkdir -p $BDIR-staging
cmake -S . -B $BDIR-staging -DCMAKE_INSTALL_PREFIX=$PDIR -DCMAKE_BUILD_TYPE=${{matrix.bt}} ${{matrix.gen}} ${{matrix.commonvars}}
cmake --build $BDIR-staging --config ${{matrix.bt}} --target ${{matrix.tgt}} -j
cmake --build $BDIR-staging --config ${{matrix.bt}} --target install
fi
- name: configure
run: |
mkdir -p $BDIR
mkdir -p $IDIR
cmake -S ${{matrix.sdir}} -B $BDIR \
-DC4CORE_BUILD_TESTS=ON \
-DC4CORE_VALGRIND=OFF \
-DCMAKE_BUILD_TYPE=${{matrix.bt}} \
-DCMAKE_INSTALL_PREFIX=$IDIR \
${{matrix.gen}} \
${{matrix.vars}} \
${{matrix.commonvars}}
- name: build
run: |
cmake --build $BDIR --config ${{matrix.bt}} --target c4core-test-build -j
- name: run
run: |
cmake --build $BDIR --config ${{matrix.bt}} --target c4core-test-run

View File

@@ -0,0 +1,34 @@
# text editor files
*.bck
\#*
*~
.ccls-cache/
.clangd/
.cache/
.cquery_cached_index/
__pycache__/
# Visual Studio files
.vs/
.vscode/
# QtCreator files
CMakeLists.txt.user
# Eclipse
.project
.cproject
/.settings/
# build files
build/
install/
.python-version
compile_commands.json
# test files
/Testing/
# continuous integration files
.github/vagrant/*.log
.github/vagrant/.vagrant
.github/vagrant/macos/.vagrant
src_singleheader/

View File

@@ -0,0 +1,9 @@
[submodule "cmake"]
path = cmake
url = https://github.com/biojppm/cmake
[submodule "extern/debugbreak"]
path = src/c4/ext/debugbreak
url = https://github.com/biojppm/debugbreak
[submodule "src/c4/ext/fast_float"]
path = src/c4/ext/fast_float
url = https://github.com/fastfloat/fast_float

View File

@@ -0,0 +1,107 @@
cmake_minimum_required(VERSION 3.13 FATAL_ERROR)
include(./cmake/c4Project.cmake)
project(c4core
DESCRIPTION "Multiplatform low-level C++ utilities"
HOMEPAGE_URL "https://github.com/biojppm/c4core"
LANGUAGES CXX)
include(./compat.cmake)
c4_project(VERSION 0.1.8
AUTHOR "Joao Paulo Magalhaes <dev@jpmag.me>")
option(C4CORE_WITH_FASTFLOAT "use fastfloat to parse floats" ON)
set(C4CORE_SRC_FILES
c4/allocator.hpp
c4/base64.hpp
c4/base64.cpp
c4/blob.hpp
c4/bitmask.hpp
c4/charconv.hpp
c4/c4_pop.hpp
c4/c4_push.hpp
c4/char_traits.cpp
c4/char_traits.hpp
c4/common.hpp
c4/compiler.hpp
c4/config.hpp
c4/cpu.hpp
c4/ctor_dtor.hpp
c4/dump.hpp
c4/enum.hpp
c4/error.cpp
c4/error.hpp
c4/export.hpp
c4/format.hpp
c4/format.cpp
c4/hash.hpp
c4/language.hpp
c4/language.cpp
c4/memory_resource.cpp
c4/memory_resource.hpp
c4/memory_util.cpp
c4/memory_util.hpp
c4/platform.hpp
c4/preprocessor.hpp
c4/restrict.hpp
c4/span.hpp
c4/std/std.hpp
c4/std/std_fwd.hpp
c4/std/string.hpp
c4/std/string_fwd.hpp
c4/std/tuple.hpp
c4/std/vector.hpp
c4/std/vector_fwd.hpp
c4/substr.hpp
c4/substr_fwd.hpp
c4/szconv.hpp
c4/type_name.hpp
c4/types.hpp
c4/unrestrict.hpp
c4/utf.hpp
c4/utf.cpp
c4/windows.hpp
c4/windows_pop.hpp
c4/windows_push.hpp
c4/c4core.natvis
#
c4/ext/debugbreak/debugbreak.h
c4/ext/rng/rng.hpp
c4/ext/sg14/inplace_function.h
)
if(C4CORE_WITH_FASTFLOAT)
list(APPEND C4CORE_SRC_FILES
c4/ext/fast_float.hpp
c4/ext/fast_float_all.h
)
endif()
set(C4CORE_AMALGAMATED ${C4CORE_SRC_DIR}/../src_singleheader/c4/c4core_all.hpp)
list(TRANSFORM C4CORE_SRC_FILES PREPEND "${C4CORE_SRC_DIR}/" OUTPUT_VARIABLE C4CORE_SRC_FILES_FULL)
add_custom_target(c4core-amalgamate
python ${CMAKE_CURRENT_LIST_DIR}/tools/amalgamate.py ${C4CORE_AMALGAMATED}
COMMENT "${CMAKE_CURRENT_LIST_DIR}/tools/amalgamate.py ${C4CORE_AMALGAMATED}"
BYPRODUCTS ${C4CORE_AMALGAMATED}
DEPENDS ${C4CORE_SRC_FILES_FULL}
)
c4_add_library(c4core
INC_DIRS
$<BUILD_INTERFACE:${C4CORE_SRC_DIR}> $<INSTALL_INTERFACE:include>
SOURCE_ROOT ${C4CORE_SRC_DIR}
SOURCES ${C4CORE_SRC_FILES}
)
if(NOT C4CORE_WITH_FASTFLOAT)
target_compile_definitions(c4core PUBLIC -DC4CORE_NO_FAST_FLOAT)
endif()
#-------------------------------------------------------
c4_install_target(c4core)
c4_install_exports()
c4_add_dev_targets()
c4_pack_project(TYPE LIBRARY)

View File

@@ -0,0 +1,26 @@
src/c4/ext/sg14/inplace_function.h is distributed under the following terms:
----------------------------------------------------------------------------
Boost Software License - Version 1.0 - August 17th, 2003
Permission is hereby granted, free of charge, to any person or organization
obtaining a copy of the software and accompanying documentation covered by
this license (the "Software") to use, reproduce, display, distribute,
execute, and transmit the Software, and to prepare derivative works of the
Software, and to permit third-parties to whom the Software is furnished to
do so, all subject to the following:
The copyright notices in the Software and this entire statement, including
the above license grant, this restriction and the following disclaimer,
must be included in all copies of the Software, in whole or in part, and
all derivative works of the Software, unless such copies or derivative
works are solely in the form of machine-executable object code generated by
a source language processor.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,20 @@
Copyright (c) 2018, Joao Paulo Magalhaes <dev@jpmag.me>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

232
3rdparty/rapidyaml/ext/c4core/README.md vendored Normal file
View File

@@ -0,0 +1,232 @@
# c4core - C++ core utilities
[![MIT Licensed](https://img.shields.io/badge/License-MIT-green.svg)](https://github.com/biojppm/c4core/blob/master/LICENSE.txt)
[![Docs](https://img.shields.io/badge/docs-docsforge-blue)](https://c4core.docsforge.com/)
[![ci](https://github.com/biojppm/c4core/workflows/ci/badge.svg)](https://github.com/biojppm/c4core/actions?query=ci)
[![Coveralls](https://coveralls.io/repos/github/biojppm/c4core/badge.svg)](https://coveralls.io/github/biojppm/c4core)
[![Codecov](https://codecov.io/gh/biojppm/c4core/branch/master/graph/badge.svg)](https://codecov.io/gh/biojppm/c4core)
[![LGTM alerts](https://img.shields.io/lgtm/alerts/g/biojppm/c4core.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/biojppm/c4core/alerts/)
[![LGTM grade: C/C++](https://img.shields.io/lgtm/grade/cpp/g/biojppm/c4core.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/biojppm/c4core/context:cpp)
c4core is a library of low-level C++ utilities, written with low-latency
projects in mind.
Some of the utilities provided by c4core have already equivalent
functionality in the C++ standard, but they are provided as the existing C++
equivalent may be insufficient (eg, std::string_view), inefficient (eg,
std::string), heavy (eg streams), or plainly unusable on some
platforms/projects, (eg exceptions); some other utilities have equivalent
under consideration for C++ standardisation; and yet some other utilities
have (to my knowledge) no equivalent under consideration. Be that as it may,
I've been using these utilities in this or similar forms for some years now,
and I've found them incredibly useful in my projects. I'm packing these as a
separate library, as all of my projects use it.
c4core is [extensively unit-tested in Linux, Windows and
MacOS](https://github.com/biojppm/c4core/actions). The tests cover
x64, x86, arm, wasm (emscripten), aarch64, ppc64le and s390x
architectures, and include analysing c4core with:
* valgrind
* clang-tidy
* clang sanitizers:
* memory
* address
* undefined behavior
* thread
* [LGTM.com](https://lgtm.com/projects/g/biojppm/c4core)
c4core also works [in
bare-metal](https://github.com/biojppm/c4core/issues/63) as well as
[in RISC-V](https://github.com/biojppm/c4core/pull/69) but at the
moment it's not easy to add automated tests to the CI, so for now
these are not in the list of official architectures.
## Obtaining c4core
c4core uses git submodules. It is best to clone c4core with the `--recursive`
option:
```bash
# using --recursive makes sure git submodules are also cloned at the same time
git clone --recursive https://github.com/biojppm/c4core
```
If you ommit the `--recursive` option, then after cloning you will have to
make git checkout the current version of the submodules, using `git submodule
init` followed by `git submodule update`.
## Using c4core in your project
c4core is built with cmake, and assumes you also use cmake. Although c4core
is NOT header-only, and currently has no install target, you can very easily
use c4core in your project by using
`add_subdirectory(${path_to_c4core_root})` in your CMakeLists.txt; this will
add c4core as a subproject of your project. Doing this is not intrusive to
your cmake project because c4core is fast to build (typically under 10s), and
it also prefixes every cmake variable with `C4CORE_`. But more importantly
this will enable you to compile c4core with the exact same compile settings
used by your project.
Here's a very quick complete example of setting up your project to use
c4core:
```cmake
project(foo)
add_subdirectory(c4core)
add_library(foo foo.cpp)
target_link_libraries(foo PUBLIC c4core) # that's it!
```
Note above that the call to `target_link_libraries()` is using PUBLIC
linking. This is required to make sure the include directories from `c4core`
are transitively used.
## Quick tour
All of the utilities in this library are under the namespace `c4`; any
exposed macros use the prefix `C4_`: eg `C4_ASSERT()`.
### Multi-platform / multi-compiler utilities
```c++
// TODO: elaborate on the topics:
#include <c4/error.hpp>
C4_LIKELY()/C4_UNLIKELY()
C4_RESTRICT, $, c$, $$, c$$
#include <c4/restrict.hpp>
#include <c4/unrestrict.hpp>
#include <c4/windows_push.hpp>
#include <c4/windows_pop.hpp>
C4_UNREACHABLE()
c4::type_name()
```
### Runtime assertions and error handling
```c++
// TODO: elaborate on the topics:
error callback
C4_ASSERT()
C4_XASSERT()
C4_CHECK()
C4_ERROR()
C4_NOT_IMPLEMENTED()
```
### Memory allocation
```c++
// TODO: elaborate on the topics:
c4::aalloc(), c4::afree() // aligned allocation
c4::MemoryResource // global and scope
c4::Allocator
```
### Mass initialization/construction/destruction
```c++
// TODO: elaborate on the topics:
c4::construct()/c4::construct_n()
c4::destroy()/c4::destroy_n()
c4::copy_construct()/c4::copy_construct_n()
c4::copy_assign()/c4::copy_assign_n()
c4::move_construct()/c4::move_construct_n()
c4::move_assign()/c4::move_assign_n()
c4::make_room()/c4::destroy_room()
```
### Writeable string views: c4::substr and c4::csubstr
Here: [`#include <c4/substr.hpp>`](src/c4/substr.hpp)
### Value <-> character interoperation
Here: [`#include <c4/charconv.hpp>`](src/c4/charconv.hpp)
```c++
// TODO: elaborate on the topics:
c4::utoa(), c4::atou()
c4::itoa(), c4::atoi()
c4::ftoa(), c4::atof()
c4::dtoa(), c4::atod()
c4::to_chars(), c4::from_chars()
c4::to_chars_sub()
c4::to_chars_first()
```
### String formatting and parsing
* [`#include <c4/format.hpp>`](src/c4/format.hpp)
```c++
// TODO: elaborate on the topics:
c4::cat(), c4::uncat()
c4::catsep(), c4::uncatsep()
c4::format(), c4::unformat()
// formatting:
c4::raw, c4::craw
```
### `c4::span` and `c4::blob`
* [`#include <c4/span.hpp>`](src/c4/span.hpp)
* [`#include <c4/blob.hpp>`](src/c4/blob.hpp)
### Enums and enum symbols
[`#include <c4/enum.hpp>`](src/c4/enum.hpp)
```c++
// TODO: elaborate on the topics:
c4::e2str(), c4::str2e()
```
### Bitmasks and bitmask symbols
[`#include <c4/bitmask.hpp>`](src/c4/bitmask.hpp)
```c++
// TODO: elaborate on the topics:
c4::bm2str(), c4::str2bm()
```
### Base64 encoding / decoding
[`#include <c4/base64.hpp>`](src/c4/base64.hpp)
### Fuzzy float comparison

View File

@@ -0,0 +1,23 @@
# ROADMAP
## New features
These changes will provide new features, and client code can be kept
unchanged.
## API changes
These changes will require client code to be updated.
* [breaking] drop use of C-style sprintf() formats in error messages and
assertions. Change the implementation to use c4::format()
```c++
C4_ASSERT_MSG(sz > s.size(), "sz=%zu s.size()=%zu", sz, s.size());
// ... the above changes to:
C4_ASSERT_MSG(sz > s.size(), "sz={} s.size()={}", sz, s.size());
```
## Implementation changes
* drop calls to sprintf() in charconv.hpp.

View File

@@ -0,0 +1,3 @@
# 0.1.0
First release.

View File

@@ -0,0 +1,5 @@
# 0.1.1
- Fix parsing of hexadecimal floats ([2d5c3f0](https://github.com/biojppm/c4core/commits/2d5c3f0))
- Fix `csubstr::reverse_sub()` ([902c5b9](https://github.com/biojppm/c4core/commits/902c5b9))
- Fix [#35](https://github.com/biojppm/c4core/issues/35): add SO_VERSION

View File

@@ -0,0 +1,4 @@
- Fix error macros (ie `C4_ERROR()`, `C4_CHECK()`, `C4_ASSERT()`, etc) such that they are a single statement
- `is_debugger_attached()`: add MacOSX version
- Add support for Visual Studio 2022
- Ensure `C4_LITTLE_ENDIAN` is always defined, even with mixed endianness

View File

@@ -0,0 +1 @@
- Update fast_float to [3.2.1](https://github.com/fastfloat/fast_float/releases/tag/v3.2.0)

View File

@@ -0,0 +1,6 @@
- [PR #38](https://github.com/biojppm/c4core/pull/38): add s390x architecture feature macros.
- Fix compiler warnings after update of fast_float to [3.2.1](https://github.com/fastfloat/fast_float/releases/tag/v3.2.0).
### Thanks
@musicinmybrain

View File

@@ -0,0 +1,2 @@
- Add support for aarch64, s390x, ppc64le CPU architectures
- Update debugbreak header (added support for the above architectures)

View File

@@ -0,0 +1,2 @@
- Fix wrong version names in version 0.1.5 (was saying 0.1.4, should be 0.1.5)

View File

@@ -0,0 +1,5 @@
- Fix build with C4CORE_NO_FAST_FLOAT ([#42](https://github.com/biojppm/c4core/pull/42)).
- Fix clang warning in AIX/xlclang ([#44](https://github.com/biojppm/c4core/pull/44)).
### Thanks
--- @mbs-c

View File

@@ -0,0 +1,45 @@
### New features
- Add amalgamation into a single header file ([PR #48](https://github.com/biojppm/c4core/pull/48)):
- The amalgamated header will be available together with the deliverables from each release.
- To generate the amalgamated header:
```
$ python tools/amalgamate.py c4core_all.hpp
```
- To use the amalgamated header:
- Include at will in any header of your project.
- In one - and only one - of your project source files, `#define C4CORE_SINGLE_HDR_DEFINE_NOW` and then `#include <c4core_all.hpp>`. This will enable the function and class definitions in the header file. For example, here's a sample program:
```c++
#include <iostream>
#define C4CORE_SINGLE_HDR_DEFINE_NOW // do this before the include
#include <c4core_all.hpp>
int main()
{
for(c4::csubstr s : c4::csubstr("a/b/c/d").split('/'))
std::cout << s << "\n";
}
```
- Add `csubstr::is_unsigned_integer()` and `csubstr::is_real()` ([PR #49](https://github.com/biojppm/c4core/pull/49)).
- CMake: add alias target c4core::c4core, guaranteeing that the same code can be used with `add_subdirectory()` and `find_package()`. (see [rapidyaml #173](https://github.com/biojppm/rapidyaml/issues/173))
- Add support for compilation with emscripten (WebAssembly+javascript) ([PR #52](https://github.com/biojppm/c4core/pull/52)).
### Fixes
- Fix edge cases with empty strings in `span::first()`, `span::last()` and `span::range()` ([PR #49](https://github.com/biojppm/c4core/pull/49)).
- Accept octal numbers in `substr::first_real_span()` and `substr::is_real()` ([PR #49](https://github.com/biojppm/c4core/pull/49)).
- `substr`: fix coverage misses in number query methods ([PR #49](https://github.com/biojppm/c4core/pull/49)).
- Use single-header version of fast_float ([PR #49](https://github.com/biojppm/c4core/pull/47)).
- Suppress warnings triggered from fast_float in clang (`-Wfortify-source`) ([PR #49](https://github.com/biojppm/c4core/pull/47)).
- Add missing `inline` in [src/c4/ext/rng/rng.hpp](src/c4/ext/rng/rng.hpp) ([PR #49](https://github.com/biojppm/c4core/pull/47)).
- Fix compilation of [src/c4/ext/rng/inplace_function.h](src/c4/ext/inplace_function.h) in C++11 ([PR #49](https://github.com/biojppm/c4core/pull/47)).
- Change order of headers, notably in `windows_push.hpp` ([PR #47](https://github.com/biojppm/c4core/pull/47)).
- In `c4/charconv.hpp`: do not use C4_ASSERT in `to_c_fmt()`, which is `constexpr`.
- Fix [#53](https://github.com/biojppm/c4core/issues/53): cmake install targets were missing call to `export()` ([PR #55](https://github.com/biojppm/c4core/pull/55)).
- Fix linking of subprojects with libc++: flags should be forwarded through `CMAKE_***_FLAGS` instead of being set explicitly per-target ([PR #54](https://github.com/biojppm/c4core/pull/54)).
### Thanks
- @cschreib

View File

@@ -0,0 +1,31 @@
### Breaking changes
- fix [#63](https://github.com/biojppm/c4core/issues/63): remove `c4/time.hpp` and `c4/time.cpp` which prevented compilation in bare-metal mode ([PR #64](https://github.com/biojppm/c4core/issues/64)).
### New features
- Added decoding of UTF codepoints: `c4::decode_code_point()` ([PR #65](https://github.com/biojppm/c4core/issues/65)).
- Experimental feature: add formatted-dumping facilities: using semantics like `c4::cat()`, `c4::catsep()` and `c4::format()`, where the subject is not a string buffer but a dump callback accepting strings. This still requires a string buffer for serialization of non-string types, but the buffer's required size is now limited to the max serialized size of non-string arguments, in contrast to the requirement in `c4::cat()` et al which is the total serialized size of every argument. This enables very efficient and generic printf-like semantics with reuse of a single small buffer, and allows direct-printing to terminal or file ([PR #67](https://github.com/biojppm/c4core/issues/67)). This feature is still experimental and a minor amount of changes to the API is possible.
- Added macro `C4_IF_CONSTEXPR` resolving to `if constexpr (...)` if the c++ standard is at least c++17.
- `csubstr`: add `count(csubstr)` overload.
- Add support for RISC-V architectures ([PR #69](https://github.com/biojppm/c4core/issues/69)).
- Add support for bare-metal compilation ([PR #64](https://github.com/biojppm/c4core/issues/64)).
- gcc >= 4.8 support using polyfills for missing templates and features ([PR #74](https://github.com/biojppm/c4core/pull/74) and [PR #68](https://github.com/biojppm/c4core/pull/68)).
### Fixes
- `csubstr::operator==(std::nullptr_t)` now returns true if either `.str==nullptr` or `.len==0`.
- Fix: `bool operator==(const char (&s)[N], csubstr)` and `operator==(const char (&s)[N], substr)`. The template declaration for these functions had an extra `const` which prevented these functions to participate in overload resolution, which in some cases resulted in calls resolving to `operator==(std::string const&, csubstr)` if that header was visible ([PR #64](https://github.com/biojppm/c4core/issues/64)).
- Fix `csubstr::last_not_of()`: optional positional parameter was ignored [PR #62](https://github.com/biojppm/c4core/pull/62).
- `atof()`, `atod()`, `atox()`, `substr::is_real()`, `substr::first_real_span()`: accept `infinity`, `inf` and `nan` as valid reals [PR #60](https://github.com/biojppm/c4core/pull/60).
- Add missing export symbols [PR #56](https://github.com/biojppm/c4core/pull/56), [PR #57](https://github.com/biojppm/c4core/pull/57).
- `c4/substr_fwd.hpp`: fix compilation failure in Xcode 12 and earlier, where the forward declaration for `std::allocator` is inside the `inline namespace __1`, unlike later versions [PR #61](https://github.com/biojppm/c4core/pull/61), reported in [rapidyaml#185](https://github.com/biojppm/rapidyaml/issues/185).
- `c4/error.hpp`: fix compilation failure in debug mode in Xcode 12 and earlier: `__clang_major__` does not mean the same as in the common clang, and as a result the warning `-Wgnu-inline-cpp-without-extern` does not exist there.
### Thanks
- @danngreen
- @Xeonacid
- @aviktorov
- @fargies

View File

@@ -0,0 +1 @@
__pycache__

View File

@@ -0,0 +1,120 @@
# this function works both with multiconfig and single-config generators.
function(set_default_build_type which)
# CMAKE_CONFIGURATION_TYPES is available only for multiconfig generators.
# so set the build type only if CMAKE_CONFIGURATION_TYPES does not exist.
if(NOT CMAKE_CONFIGURATION_TYPES) # not a multiconfig generator?
if(NOT CMAKE_BUILD_TYPE)
if(NOT which)
set(which RelWithDebInfo)
endif()
message("Defaulting to ${which} build.")
set(CMAKE_BUILD_TYPE ${which} CACHE STRING "")
endif()
endif()
endfunction()
# https://stackoverflow.com/questions/31546278/where-to-set-cmake-configuration-types-in-a-project-with-subprojects
function(setup_configuration_types)
set(options0arg
)
set(options1arg
DEFAULT
)
set(optionsnarg
TYPES
)
cmake_parse_arguments("" "${options0arg}" "${options1arg}" "${optionsnarg}" ${ARGN})
if(NOT TYPES)
set(TYPES Release Debug RelWithDebInfo MinSizeRel)
endif()
# make it safe to call repeatedly
if(NOT _setup_configuration_types_done)
set(_setup_configuration_types_done 1 CACHE INTERNAL "")
# No reason to set CMAKE_CONFIGURATION_TYPES if it's not a multiconfig generator
# Also no reason mess with CMAKE_BUILD_TYPE if it's a multiconfig generator.
if(CMAKE_CONFIGURATION_TYPES) # multiconfig generator?
set(CMAKE_CONFIGURATION_TYPES "${TYPES}" CACHE STRING "")
else() # single-config generator
set_property(CACHE CMAKE_BUILD_TYPE PROPERTY HELPSTRING "Choose the type of build")
set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "${TYPES}")
# set the valid options for cmake-gui drop-down list
endif()
endif()
endfunction()
# https://stackoverflow.com/questions/31546278/where-to-set-cmake-configuration-types-in-a-project-with-subprojects
function(add_configuration_type name)
set(flag_vars
C_FLAGS
CXX_FLAGS
SHARED_LINKER_FLAGS
STATIC_LINKER_FLAGS
MODULE_LINKER_FLAGS
EXE_LINKER_FLAGS
RC_FLAGS
)
set(options0arg
PREPEND # when defaulting to a config, prepend to it instead of appending to it
SET_MAIN_FLAGS # eg, set CMAKE_CXX_FLAGS from CMAKE_CXX_FLAGS_${name}
)
set(options1arg
DEFAULT_FROM # take the initial value of the flags from this config
)
set(optionsnarg
C_FLAGS
CXX_FLAGS
SHARED_LINKER_FLAGS
STATIC_LINKER_FLAGS
MODULE_LINKER_FLAGS
EXE_LINKER_FLAGS
RC_FLAGS
)
cmake_parse_arguments(_act "${options0arg}" "${options1arg}" "${optionsnarg}" ${ARGN})
string(TOUPPER ${name} UNAME)
# make it safe to call repeatedly
if(NOT _add_configuration_type_${name})
set(_add_configuration_type_${name} 1 CACHE INTERNAL "")
setup_configuration_types()
if(CMAKE_CONFIGURATION_TYPES) # multiconfig generator?
set(CMAKE_CONFIGURATION_TYPES "${CMAKE_CONFIGURATION_TYPES};${name}" CACHE STRING "" FORCE)
else() # single-config generator
set_property(CACHE CMAKE_BUILD_TYPE PROPERTY HELPSTRING "Choose the type of build" FORCE)
set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS "${CMAKE_BUILD_TYPES};${name}" FORCE)
# set the valid options for cmake-gui drop-down list
endif()
# now set up the configuration
message(STATUS "config: CMAKE_${f}_${UNAME} --- ${val}")
foreach(f ${flag_vars})
set(val ${_act_${f}})
message(STATUS "config: ${name}: ${f} --- ${val}")
if(_act_DEFAULT_FROM)
if(_act_PREPEND)
set(val "${val} ${CMAKE_${f}_${_act_DEFAULT_FROM}}")
else()
set(val "${CMAKE_${f}_${_act_DEFAULT_FROM}} ${val}")
endif()
endif()
message(STATUS "config: CMAKE_${f}_${UNAME} --- ${val}")
set(CMAKE_${f}_${UNAME} "${val}" CACHE STRING "" FORCE)
mark_as_advanced(CMAKE_${f}_${UNAME})
if(_act_SET_MAIN_FLAGS)
set(CMAKE_${f} "${CMAKE_${f}_${UNAME}}" CACHE STRING "" FORCE)
endif()
endforeach()
endif()
endfunction()

View File

@@ -0,0 +1,30 @@
# create hierarchical source groups based on a dir tree
#
# EXAMPLE USAGE:
#
# create_source_group("src" "${SRC_ROOT}" "${SRC_LIST}")
#
# Visual Studio usually has the equivalent to this:
#
# create_source_group("Header Files" ${PROJ_SRC_DIR} "${PROJ_HEADERS}")
# create_source_group("Source Files" ${PROJ_SRC_DIR} "${PROJ_SOURCES}")
#
# TODO: <jpmag> this was taken from a stack overflow answer. Need to find it
# and add a link here.
macro(create_source_group GroupPrefix RootDir ProjectSources)
set(DirSources ${ProjectSources})
foreach(Source ${DirSources})
#message(STATUS "s=${Source}")
string(REGEX REPLACE "${RootDir}" "" RelativePath "${Source}")
#message(STATUS " ${RelativePath}")
string(REGEX REPLACE "[\\\\/][^\\\\/]*$" "" RelativePath "${RelativePath}")
#message(STATUS " ${RelativePath}")
string(REGEX REPLACE "^[\\\\/]" "" RelativePath "${RelativePath}")
#message(STATUS " ${RelativePath}")
string(REGEX REPLACE "/" "\\\\\\\\" RelativePath "${RelativePath}")
#message(STATUS " ${RelativePath}")
source_group("${GroupPrefix}\\${RelativePath}" FILES ${Source})
#message(STATUS " ${Source}")
endforeach(Source)
endmacro(create_source_group)

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,215 @@
# (C) 2017 Joao Paulo Magalhaes <dev@jpmag.me>
include(CMakeParseArguments)
#------------------------------------------------------------------------------
# Usage:
#
# ExternalProject_GetFwdArgs(output_var
# [NO_DEFAULTS]
# [VARS var1 var2 ...]
# [EXCLUDE xvar1 xvar2 ...]
# [QUIET]
# )
#
# Get the current cmake environment in a sequence of -DVAR=${VAR}
# tokens so that the environment can be forwarded to an external
# cmake project through CMAKE_ARGS.
#
# Example:
# ExternalProject_GetFwdArgs(FWD_ARGS)
# ExternalProject_Add(foo SOURCE_DIR ../foo
# CMAKE_ARGS ${FWD_ARGS}
# ... etc)
#
# Use this function to enable forwarding the current cmake environment
# to an external project. It outputs all the needed variables in the
# form of a sequence of -DVAR=value, suitable for use in the CMAKE_ARGS
# clause of ExternalProject_Add().
#
# This function uses ExternalProject_GetFwdVarNames() to find out the
# list of variables to export. If this behaviour does not fit your
# needs you can:
#
# * append more of your own variables (using the VARS
# argument). The vars specified in this option will each be
# added to the output in the form of -Dvar=${var}
#
# * you can also avoid any defaults obtained through usage of
# ExternalProject_GetFwdNames() by specifying NO_DEFAULTS.
#
# Example with custom variable names (adding more):
# ExternalProject_GetFwdVarNames(FWD_ARGS VARS USER_VAR1 USER_VAR2)
# ExternalProjectAdd(foo SOURCE_DIR ../foo CMAKE_ARGS ${FWD_ARGS})
#
# Example with custom variable names (just your own):
# ExternalProject_GetFwdVarNames(FWD_ARGS NO_DEFAULTS VARS USER_VAR1 USER_VAR2)
# ExternalProjectAdd(foo SOURCE_DIR ../foo CMAKE_ARGS ${FWD_ARGS})
#
function(ExternalProject_GetFwdArgs output_var)
set(options0arg
NO_DEFAULTS
QUIET
)
set(options1arg
)
set(optionsnarg
VARS
EXCLUDE
)
cmake_parse_arguments(_epgfa "${options0arg}" "${options1arg}" "${optionsnarg}" ${ARGN})
if(NOT _epfga_NO_DEFAULTS)
ExternalProject_GetFwdVarNames(_fwd_names)
endif()
if(_epgfa_VARS)
list(APPEND _fwd_names ${_epgfa_VARS})
endif()
if(_epgfa_EXCLUDE)
list(REMOVE_ITEM _fwd_names ${_epgfa_EXCLUDE})
endif()
set(_epgfa_args)
foreach(_f ${_fwd_names})
if(${_f})
list(APPEND _epgfa_args -D${_f}=${${_f}})
if(NOT _epfga_QUIET)
message(STATUS "ExternalProject_GetFwdArgs: ${_f}=${${_f}}")
endif()
endif()
endforeach()
set(${output_var} "${_epgfa_args}" PARENT_SCOPE)
endfunction(ExternalProject_GetFwdArgs)
#------------------------------------------------------------------------------
# Gets a default list with the names of variables to forward to an
# external project. This function creates a list of common cmake
# variable names which have an impact in the output binaries or their
# placement.
function(ExternalProject_GetFwdVarNames output_var)
# these common names are irrespective of build type
set(names
CMAKE_GENERATOR
CMAKE_INSTALL_PREFIX
CMAKE_ARCHIVE_OUTPUT_DIRECTORY
CMAKE_LIBRARY_OUTPUT_DIRECTORY
CMAKE_RUNTIME_OUTPUT_DIRECTORY
CMAKE_AR
CMAKE_BUILD_TYPE
CMAKE_INCLUDE_PATH
CMAKE_LIBRARY_PATH
#CMAKE_MODULE_PATH # this is dangerous as it can override the external project's build files.
CMAKE_PREFIX_PATH
BUILD_SHARED_LIBS
CMAKE_CXX_COMPILER
CMAKE_C_COMPILER
CMAKE_LINKER
CMAKE_MAKE_PROGRAM
CMAKE_NM
CMAKE_OBJCOPY
CMAKE_RANLIB
CMAKE_STRIP
CMAKE_TOOLCHAIN_FILE
#CMAKE_CONFIGURATION_TYPES # not this. external projects will have their own build configurations
)
# these names have per-build type values;
# use CMAKE_CONFIGURATION_TYPES to construct the list
foreach(v
CMAKE_CXX_FLAGS
CMAKE_C_FLAGS
CMAKE_EXE_LINKER_FLAGS
CMAKE_MODULE_LINKER_FLAGS
CMAKE_SHARED_LINKER_FLAGS)
list(APPEND names ${v})
foreach(t ${CMAKE_CONFIGURATION_TYPES})
string(TOUPPER ${t} u)
list(APPEND names ${v}_${u})
endforeach()
endforeach()
set(${output_var} "${names}" PARENT_SCOPE)
endfunction(ExternalProject_GetFwdVarNames)
#------------------------------------------------------------------------------
macro(ExternalProject_Import name)
set(options0arg
)
set(options1arg
PREFIX # look only here when findind
)
set(optionsnarg
INCLUDE_PATHS # use these dirs for searching includes
LIBRARY_PATHS # use these dirs for searching libraries
INCLUDES # find these includes and append them to ${name}_INCLUDE_DIRS
INCLUDE_DIR_SUFFIXES
LIBRARIES # find these libs and append them to ${name}_LIBRARIES
LIBRARY_DIR_SUFFIXES
)
cmake_parse_arguments(_eep "${options0arg}" "${options1arg}" "${optionsnarg}" ${ARGN})
if(NOT _eep_PREFIX)
message(FATAL_ERROR "no prefix was given")
endif()
include(FindPackageHandleStandardArgs)
#----------------------------------------------------------------
# includes
# the list of paths to search for includes
set(_eep_ipaths ${_eep_PREFIX})
foreach(_eep_i ${_eep_INCLUDE_DIRS})
list(APPEND _eep_ipaths ${__eep_PREFIX}/${_eep_i})
endforeach()
# find the includes that were asked for, and add
# their paths to the includes list
set(_eep_idirs)
foreach(_eep_i ${_eep_INCLUDES})
find_path(_eep_path_${_eep_i} ${_eep_i}
PATHS ${_eep_ipaths}
PATH_SUFFIXES include ${_eep_INCLUDE_DIR_SUFFIXES}
NO_DEFAULT_PATH
)
if(NOT _eep_path_${_eep_i})
message(FATAL_ERROR "could not find include: ${_eep_i}")
endif()
#message(STATUS "include: ${_eep_i} ---> ${_eep_path_${_eep_i}}")
list(APPEND _eep_idirs ${_eep_path_${_eep_i}})
find_package_handle_standard_args(${_eep_i}_INCLUDE_DIR DEFAULT_MSG _eep_path_${_eep_i})
endforeach()
if(_eep_idirs)
list(REMOVE_DUPLICATES _eep_idirs)
endif()
# save the include list
set(${name}_INCLUDE_DIRS "${_eep_idirs}" CACHE STRING "" FORCE)
#----------------------------------------------------------------
# libraries
# the list of paths to search for libraries
set(_eep_lpaths ${_eep_PREFIX})
foreach(_eep_i ${_eep_LIBRARIES})
list(APPEND _eep_lpaths ${__eep_PREFIX}/${_eep_i})
endforeach()
# find any libraries that were asked for
set(_eep_libs)
foreach(_eep_i ${_eep_LIBRARIES})
find_library(_eep_lib_${_eep_i} ${_eep_i}
PATHS ${_eep_lpaths}
PATH_SUFFIXES lib ${_eep_LIBRARY_DIR_SUFFIXES}
NO_DEFAULT_PATH
)
if(NOT _eep_lib_${_eep_i})
message(FATAL_ERROR "could not find library: ${_eep_i}")
endif()
#message(STATUS "lib: ${_eep_i} ---> ${_eep_lib_${_eep_i}}")
list(APPEND _eep_libs ${_eep_lib_${_eep_i}})
find_package_handle_standard_args(${_eep_i}_LIBRARY DEFAULT_MSG _eep_lib_${_eep_i})
endforeach()
# save the include list
set(${name}_LIBRARIES ${_eep_libs} CACHE STRING "")
endmacro(ExternalProject_Import)

View File

@@ -0,0 +1,75 @@
# Find the win10 SDK path.
if ("$ENV{WIN10_SDK_PATH}$ENV{WIN10_SDK_VERSION}" STREQUAL "" )
get_filename_component(WIN10_SDK_PATH "[HKEY_LOCAL_MACHINE\\SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows\\v10.0;InstallationFolder]" ABSOLUTE CACHE)
get_filename_component(TEMP_WIN10_SDK_VERSION "[HKEY_LOCAL_MACHINE\\SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows\\v10.0;ProductVersion]" ABSOLUTE CACHE)
get_filename_component(WIN10_SDK_VERSION ${TEMP_WIN10_SDK_VERSION} NAME)
elseif(TRUE)
set (WIN10_SDK_PATH $ENV{WIN10_SDK_PATH})
set (WIN10_SDK_VERSION $ENV{WIN10_SDK_VERSION})
endif ("$ENV{WIN10_SDK_PATH}$ENV{WIN10_SDK_VERSION}" STREQUAL "" )
# WIN10_SDK_PATH will be something like C:\Program Files (x86)\Windows Kits\10
# WIN10_SDK_VERSION will be something like 10.0.14393 or 10.0.14393.0; we need the
# one that matches the directory name.
if (IS_DIRECTORY "${WIN10_SDK_PATH}/Include/${WIN10_SDK_VERSION}.0")
set(WIN10_SDK_VERSION "${WIN10_SDK_VERSION}.0")
endif (IS_DIRECTORY "${WIN10_SDK_PATH}/Include/${WIN10_SDK_VERSION}.0")
# Find the d3d12 and dxgi include path, it will typically look something like this.
# C:\Program Files (x86)\Windows Kits\10\Include\10.0.10586.0\um\d3d12.h
# C:\Program Files (x86)\Windows Kits\10\Include\10.0.10586.0\shared\dxgi1_4.h
find_path(D3D12_INCLUDE_DIR # Set variable D3D12_INCLUDE_DIR
d3d12.h # Find a path with d3d12.h
HINTS "${WIN10_SDK_PATH}/Include/${WIN10_SDK_VERSION}/um"
DOC "path to WIN10 SDK header files"
HINTS
)
find_path(DXGI_INCLUDE_DIR # Set variable DXGI_INCLUDE_DIR
dxgi1_4.h # Find a path with dxgi1_4.h
HINTS "${WIN10_SDK_PATH}/Include/${WIN10_SDK_VERSION}/shared"
DOC "path to WIN10 SDK header files"
HINTS
)
if ("${DXC_BUILD_ARCH}" STREQUAL "x64" )
find_library(D3D12_LIBRARY NAMES d3d12.lib
HINTS ${WIN10_SDK_PATH}/Lib/${WIN10_SDK_VERSION}/um/x64 )
elseif (CMAKE_GENERATOR MATCHES "Visual Studio.*ARM" OR "${DXC_BUILD_ARCH}" STREQUAL "ARM")
find_library(D3D12_LIBRARY NAMES d3d12.lib
HINTS ${WIN10_SDK_PATH}/Lib/${WIN10_SDK_VERSION}/um/arm )
elseif (CMAKE_GENERATOR MATCHES "Visual Studio.*ARM64" OR "${DXC_BUILD_ARCH}" STREQUAL "ARM64")
find_library(D3D12_LIBRARY NAMES d3d12.lib
HINTS ${WIN10_SDK_PATH}/Lib/${WIN10_SDK_VERSION}/um/arm64 )
elseif ("${DXC_BUILD_ARCH}" STREQUAL "Win32" )
find_library(D3D12_LIBRARY NAMES d3d12.lib
HINTS ${WIN10_SDK_PATH}/Lib/${WIN10_SDK_VERSION}/um/x86 )
endif ("${DXC_BUILD_ARCH}" STREQUAL "x64" )
if ("${DXC_BUILD_ARCH}" STREQUAL "x64" )
find_library(DXGI_LIBRARY NAMES dxgi.lib
HINTS ${WIN10_SDK_PATH}/Lib/${WIN10_SDK_VERSION}/um/x64 )
elseif (CMAKE_GENERATOR MATCHES "Visual Studio.*ARM" OR "${DXC_BUILD_ARCH}" STREQUAL "ARM")
find_library(DXGI_LIBRARY NAMES dxgi.lib
HINTS ${WIN10_SDK_PATH}/Lib/${WIN10_SDK_VERSION}/um/arm )
elseif (CMAKE_GENERATOR MATCHES "Visual Studio.*ARM64" OR "${DXC_BUILD_ARCH}" STREQUAL "ARM64")
find_library(DXGI_LIBRARY NAMES dxgi.lib
HINTS ${WIN10_SDK_PATH}/Lib/${WIN10_SDK_VERSION}/um/arm64 )
elseif ("${DXC_BUILD_ARCH}" STREQUAL "Win32" )
find_library(DXGI_LIBRARY NAMES dxgi.lib
HINTS ${WIN10_SDK_PATH}/Lib/${WIN10_SDK_VERSION}/um/x86 )
endif ("${DXC_BUILD_ARCH}" STREQUAL "x64" )
set(D3D12_LIBRARIES ${D3D12_LIBRARY} ${DXGI_LIBRARY})
set(D3D12_INCLUDE_DIRS ${D3D12_INCLUDE_DIR} ${DXGI_INCLUDE_DIR})
include(FindPackageHandleStandardArgs)
# handle the QUIETLY and REQUIRED arguments and set D3D12_FOUND to TRUE
# if all listed variables are TRUE
find_package_handle_standard_args(D3D12 DEFAULT_MSG
D3D12_INCLUDE_DIRS D3D12_LIBRARIES)
mark_as_advanced(D3D12_INCLUDE_DIRS D3D12_LIBRARIES)

View File

@@ -0,0 +1,76 @@
# Attempt to find the D3D12 libraries
# Defines:
#
# DX12_FOUND - system has DX12
# DX12_INCLUDE_PATH - path to the DX12 headers
# DX12_LIBRARIES - path to the DX12 libraries
# DX12_LIB - d3d12.lib
set(DX12_FOUND "NO")
if(WIN32)
set(WIN10_SDK_DIR "C:/Program Files (x86)/Windows Kits/10")
#set(WIN10_SDK_VERSION "10.0.10069.0")
file(GLOB WIN10_SDK_VERSIONS
LIST_DIRECTORIES TRUE
RELATIVE "${WIN10_SDK_DIR}/Lib"
"${WIN10_SDK_DIR}/Lib/*")
list(SORT WIN10_SDK_VERSIONS)
list(GET WIN10_SDK_VERSIONS -1 WIN10_SDK_VERSION)
if(CMAKE_CL_64)
set(w10ARCH x64)
elseif(CMAKE_GENERATOR MATCHES "Visual Studio.*ARM" OR "${DXC_BUILD_ARCH}" STREQUAL "ARM")
set(w10ARCH arm)
elseif(CMAKE_GENERATOR MATCHES "Visual Studio.*ARM64" OR "${DXC_BUILD_ARCH}" STREQUAL "ARM64")
set(w10ARCH arm64)
else()
set(w10ARCH x86)
endif()
# Look for the windows 8 sdk
find_path(DX12_INC_DIR
NAMES d3d12.h
PATHS "${WIN10_SDK_DIR}/Include/${WIN10_SDK_VERSION}/um"
DOC "Path to the d3d12.h file"
)
find_path(DXGI_INC_DIR
NAMES dxgi1_4.h
PATHS "${WIN10_SDK_DIR}/Include/${WIN10_SDK_VERSION}/shared"
DOC "Path to the dxgi header file"
)
if(DX12_INC_DIR AND DXGI_INC_DIR)
find_library(DX12_LIB
NAMES d3d12
PATHS "${WIN10_SDK_DIR}/Lib/${WIN10_SDK_VERSION}/um/${w10ARCH}"
NO_DEFAULT_PATH
DOC "Path to the d3d12.lib file"
)
find_library(DXGI_LIB
NAMES dxgi
PATHS "${WIN10_SDK_DIR}/Lib/${WIN10_SDK_VERSION}/um/${w10ARCH}"
NO_DEFAULT_PATH
DOC "Path to the dxgi.lib file"
)
if(DX12_LIB AND DXGI_LIB)
set(DX12_FOUND "YES")
set(DX12_LIBRARIES ${DX12_LIB} ${DXGI_LIB})
mark_as_advanced(DX12_INC_DIR DX12_LIB)
mark_as_advanced(DXGI_INC_DIR DXGI_LIB)
endif()
endif()
endif(WIN32)
if(DX12_FOUND)
if(NOT DX12_FIND_QUIETLY)
message(STATUS "DX12 headers found at ${DX12_INC_DIR}")
endif()
else()
if(DX12_FIND_REQUIRED)
message(FATAL_ERROR "Could NOT find Direct3D12")
endif()
if(NOT DX12_FIND_QUIETLY)
message(STATUS "Could NOT find Direct3D12")
endif()
endif()

View File

@@ -0,0 +1,53 @@
function(_c4_intersperse_with_flag outvar flag)
if(MSVC AND "${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC") # it may be clang as well
set(f "/${flag}")
else()
set(f "-${flag}")
endif()
set(out)
foreach(i ${ARGN})
if(NOT "${i}" STREQUAL "")
set(out "${out} ${f} '${i}'")
# ... Following this are several unsuccessful attempts to make
# sure that an empty generator expression passed as part of the
# arguments won't be expanded to nothing between successive
# flags. For example, -I /some/include -I -I /other/include,
# which is wrong as it misses an empty quote. This causes
# clang-tidy in particular to fail. Maybe this is happening
# because the result is passed to separate_arguments() which
# prevents the lists from being evaluated correctly. Also, note
# that add_custom_target() has the following options which may
# help: COMMAND_EXPAND_LISTS and VERBATIM.
# Anyway -- for now it is working, but maybe the generator
# expression approach turns out to work while being much cleaner
# than the current approach.
#set(c $<GENEX_EVAL,$<BOOL:${i}>>)
#set(c $<BOOL:${i}>) # i may be a generator expression the evaluates to empty
#set(s "${f} ${i}")
#set(e "${f} aaaaaaWTF")
#list(APPEND out $<IF:${c},${s},${e}>)
#list(APPEND out $<${c},${s}>)
#list(APPEND out $<GENEX_EVAL:${c},${s}>)
#list(APPEND out $<TARGET_GENEX_EVAL:${tgt},${c},${s}>)
endif()
endforeach()
## https://cmake.org/cmake/help/latest/manual/cmake-generator-expressions.7.html#string-valued-generator-expressions
#if(ARGN)
# set(out "${f}$<JOIN:${ARGN},;${f}>")
#endif()
set(${outvar} ${out} PARENT_SCOPE)
endfunction()
function(c4_get_define_flags outvar)
_c4_intersperse_with_flag(out D ${ARGN})
set(${outvar} ${out} PARENT_SCOPE)
endfunction()
function(c4_get_include_flags outvar)
_c4_intersperse_with_flag(out I ${ARGN})
set(${outvar} ${out} PARENT_SCOPE)
endfunction()

View File

@@ -0,0 +1,51 @@
function(get_lib_names lib_names base)
set(${lib_names})
foreach(__glnname ${ARGN})
if(WIN32)
set(__glnn ${__glnname}.lib)
else()
set(__glnn lib${__glnname}.a)
endif()
list(APPEND ${lib_names} "${base}${__glnn}")
endforeach()
set(lib_names ${lib_names} PARENT_SCOPE)
endfunction()
function(get_dll_names dll_names base)
set(${dll_names})
foreach(__glnname ${ARGN})
if(WIN32)
set(__glnn ${__glnname}.dll)
else()
set(__glnn lib${__glnname}.so)
endif()
list(APPEND ${dll_names} "${base}${__glnn}")
endforeach()
set(dll_names ${dll_names} PARENT_SCOPE)
endfunction()
function(get_script_names script_names base)
set(${script_names})
foreach(__glnname ${ARGN})
if(WIN32)
set(__glnn ${__glnname}.bat)
else()
set(__glnn ${__glnname}.sh)
endif()
list(APPEND ${script_names} "${base}${__glnn}")
endforeach()
set(script_names ${script_names} PARENT_SCOPE)
endfunction()
function(get_exe_names exe_names base)
set(${exe_names})
foreach(__glnname ${ARGN})
if(WIN32)
set(__glnn ${__glnname}.exe)
else()
set(__glnn ${__glnname})
endif()
list(APPEND ${exe_names} "${base}${__glnn}")
endforeach()
set(exe_names ${exe_names} PARENT_SCOPE)
endfunction()

View File

@@ -0,0 +1,20 @@
Copyright (c) 2018, Joao Paulo Magalhaes <dev@jpmag.me>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,275 @@
# 2006-2008 (c) Viva64.com Team
# 2008-2016 (c) OOO "Program Verification Systems"
#
# Version 2
function (pvs_studio_relative_path VAR ROOT FILEPATH)
set("${VAR}" "${FILEPATH}" PARENT_SCOPE)
if ("${FILEPATH}" MATCHES "^/.*$")
file(RELATIVE_PATH RPATH "${ROOT}" "${FILEPATH}")
if (NOT "${RPATH}" MATCHES "^\\.\\..*$")
set("${VAR}" "${RPATH}" PARENT_SCOPE)
endif ()
endif ()
endfunction ()
function (pvs_studio_join_path VAR DIR1 DIR2)
if ("${DIR2}" MATCHES "^(/|~).*$" OR "${DIR1}" STREQUAL "")
set("${VAR}" "${DIR2}" PARENT_SCOPE)
else ()
set("${VAR}" "${DIR1}/${DIR2}" PARENT_SCOPE)
endif ()
endfunction ()
macro (pvs_studio_append_flags_from_property CXX C DIR PREFIX)
if (NOT "${PROPERTY}" STREQUAL "NOTFOUND" AND NOT "${PROPERTY}" STREQUAL "PROPERTY-NOTFOUND")
foreach (PROP ${PROPERTY})
pvs_studio_join_path(PROP "${DIR}" "${PROP}")
list(APPEND "${CXX}" "${PREFIX}${PROP}")
list(APPEND "${C}" "${PREFIX}${PROP}")
endforeach ()
endif ()
endmacro ()
macro (pvs_studio_append_standard_flag FLAGS STANDARD)
if ("${STANDARD}" MATCHES "^(99|11|14|17)$")
if ("${PVS_STUDIO_PREPROCESSOR}" MATCHES "gcc|clang")
list(APPEND "${FLAGS}" "-std=c++${STANDARD}")
endif ()
endif ()
endmacro ()
function (pvs_studio_set_directory_flags DIRECTORY CXX C)
set(CXX_FLAGS "${${CXX}}")
set(C_FLAGS "${${C}}")
get_directory_property(PROPERTY DIRECTORY "${DIRECTORY}" INCLUDE_DIRECTORIES)
pvs_studio_append_flags_from_property(CXX_FLAGS C_FLAGS "${DIRECTORY}" "-I")
get_directory_property(PROPERTY DIRECTORY "${DIRECTORY}" COMPILE_DEFINITIONS)
pvs_studio_append_flags_from_property(CXX_FLAGS C_FLAGS "" "-D")
set("${CXX}" "${CXX_FLAGS}" PARENT_SCOPE)
set("${C}" "${C_FLAGS}" PARENT_SCOPE)
endfunction ()
function (pvs_studio_set_target_flags TARGET CXX C)
set(CXX_FLAGS "${${CXX}}")
set(C_FLAGS "${${C}}")
get_target_property(PROPERTY "${TARGET}" INCLUDE_DIRECTORIES)
pvs_studio_append_flags_from_property(CXX_FLAGS C_FLAGS "${DIRECTORY}" "-I")
get_target_property(PROPERTY "${TARGET}" COMPILE_DEFINITIONS)
pvs_studio_append_flags_from_property(CXX_FLAGS C_FLAGS "" "-D")
get_target_property(PROPERTY "${TARGET}" CXX_STANDARD)
pvs_studio_append_standard_flag(CXX_FLAGS "${PROPERTY}")
set("${CXX}" "${CXX_FLAGS}" PARENT_SCOPE)
set("${C}" "${C_FLAGS}" PARENT_SCOPE)
endfunction ()
function (pvs_studio_set_source_file_flags SOURCE)
set(LANGUAGE "")
string(TOLOWER "${SOURCE}" SOURCE_LOWER)
if ("${LANGUAGE}" STREQUAL "" AND "${SOURCE_LOWER}" MATCHES "^.*\\.(c|cpp|cc|cx|cxx|cp|c\\+\\+)$")
if ("${SOURCE}" MATCHES "^.*\\.c$")
set(LANGUAGE C)
else ()
set(LANGUAGE CXX)
endif ()
endif ()
if ("${LANGUAGE}" STREQUAL "C")
set(CL_PARAMS ${PVS_STUDIO_C_FLAGS} ${PVS_STUDIO_TARGET_C_FLAGS} -DPVS_STUDIO)
elseif ("${LANGUAGE}" STREQUAL "CXX")
set(CL_PARAMS ${PVS_STUDIO_CXX_FLAGS} ${PVS_STUDIO_TARGET_CXX_FLAGS} -DPVS_STUDIO)
endif ()
set(PVS_STUDIO_LANGUAGE "${LANGUAGE}" PARENT_SCOPE)
set(PVS_STUDIO_CL_PARAMS "${CL_PARAMS}" PARENT_SCOPE)
endfunction ()
function (pvs_studio_analyze_file SOURCE SOURCE_DIR BINARY_DIR)
set(PLOGS ${PVS_STUDIO_PLOGS})
pvs_studio_set_source_file_flags("${SOURCE}")
get_filename_component(SOURCE "${SOURCE}" REALPATH)
pvs_studio_relative_path(SOURCE_RELATIVE "${SOURCE_DIR}" "${SOURCE}")
pvs_studio_join_path(SOURCE "${SOURCE_DIR}" "${SOURCE}")
set(LOG "${BINARY_DIR}/PVS-Studio/${SOURCE_RELATIVE}.plog")
get_filename_component(LOG "${LOG}" REALPATH)
get_filename_component(PARENT_DIR "${LOG}" DIRECTORY)
if (EXISTS "${SOURCE}" AND NOT TARGET "${LOG}" AND NOT "${PVS_STUDIO_LANGUAGE}" STREQUAL "")
add_custom_command(OUTPUT "${LOG}"
COMMAND mkdir -p "${PARENT_DIR}"
COMMAND rm -f "${LOG}"
COMMAND "${PVS_STUDIO_BIN}" analyze
--output-file "${LOG}"
--source-file "${SOURCE}"
${PVS_STUDIO_ARGS}
--cl-params ${PVS_STUDIO_CL_PARAMS} "${SOURCE}"
WORKING_DIRECTORY "${BINARY_DIR}"
DEPENDS "${SOURCE}" "${PVS_STUDIO_CONFIG}"
VERBATIM
COMMENT "Analyzing ${PVS_STUDIO_LANGUAGE} file ${SOURCE_RELATIVE}")
list(APPEND PLOGS "${LOG}")
endif ()
set(PVS_STUDIO_PLOGS "${PLOGS}" PARENT_SCOPE)
endfunction ()
function (pvs_studio_analyze_target TARGET DIR)
set(PVS_STUDIO_PLOGS "${PVS_STUDIO_PLOGS}")
set(PVS_STUDIO_TARGET_CXX_FLAGS "")
set(PVS_STUDIO_TARGET_C_FLAGS "")
get_target_property(PROPERTY "${TARGET}" SOURCES)
pvs_studio_relative_path(BINARY_DIR "${CMAKE_SOURCE_DIR}" "${DIR}")
if ("${BINARY_DIR}" MATCHES "^/.*$")
pvs_studio_join_path(BINARY_DIR "${CMAKE_BINARY_DIR}" "PVS-Studio/__${BINARY_DIR}")
else ()
pvs_studio_join_path(BINARY_DIR "${CMAKE_BINARY_DIR}" "${BINARY_DIR}")
endif ()
file(MAKE_DIRECTORY "${BINARY_DIR}")
pvs_studio_set_directory_flags("${DIR}" PVS_STUDIO_TARGET_CXX_FLAGS PVS_STUDIO_TARGET_C_FLAGS)
pvs_studio_set_target_flags("${TARGET}" PVS_STUDIO_TARGET_CXX_FLAGS PVS_STUDIO_TARGET_C_FLAGS)
if (NOT "${PROPERTY}" STREQUAL "NOTFOUND" AND NOT "${PROPERTY}" STREQUAL "PROPERTY-NOTFOUND")
foreach (SOURCE ${PROPERTY})
pvs_studio_join_path(SOURCE "${DIR}" "${SOURCE}")
pvs_studio_analyze_file("${SOURCE}" "${DIR}" "${BINARY_DIR}")
endforeach ()
endif ()
set(PVS_STUDIO_PLOGS "${PVS_STUDIO_PLOGS}" PARENT_SCOPE)
endfunction ()
function (pvs_studio_add_target)
macro (default VAR VALUE)
if ("${${VAR}}" STREQUAL "")
set("${VAR}" "${VALUE}")
endif ()
endmacro ()
set(PVS_STUDIO_SUPPORTED_PREPROCESSORS "gcc|clang")
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang")
set(DEFAULT_PREPROCESSOR "clang")
else ()
set(DEFAULT_PREPROCESSOR "gcc")
endif ()
set(OPTIONAL OUTPUT ALL)
set(SINGLE LICENSE CONFIG TARGET LOG FORMAT BIN CONVERTER PLATFORM PREPROCESSOR CFG_TEXT)
set(MULTI SOURCES C_FLAGS CXX_FLAGS ARGS DEPENDS ANALYZE)
cmake_parse_arguments(PVS_STUDIO "${OPTIONAL}" "${SINGLE}" "${MULTI}" ${ARGN})
if ("${PVS_STUDIO_CFG}" STREQUAL "" OR NOT "${PVS_STUDIO_CFG_TEXT}" STREQUAL "")
set(PVS_STUDIO_EMPTY_CONFIG ON)
else ()
set(PVS_STUDIO_EMPTY_CONFIG OFF)
endif ()
default(PVS_STUDIO_CFG_TEXT "analysis-mode=4")
default(PVS_STUDIO_CONFIG "${CMAKE_BINARY_DIR}/PVS-Studio.cfg")
default(PVS_STUDIO_C_FLAGS "")
default(PVS_STUDIO_CXX_FLAGS "")
default(PVS_STUDIO_TARGET "pvs")
default(PVS_STUDIO_LOG "PVS-Studio.log")
default(PVS_STUDIO_BIN "pvs-studio-analyzer")
default(PVS_STUDIO_CONVERTER "plog-converter")
default(PVS_STUDIO_PREPROCESSOR "${DEFAULT_PREPROCESSOR}")
default(PVS_STUDIO_PLATFORM "linux64")
if (PVS_STUDIO_EMPTY_CONFIG)
set(PVS_STUDIO_CONFIG_COMMAND echo "${PVS_STUDIO_CFG_TEXT}" > "${PVS_STUDIO_CONFIG}")
else ()
set(PVS_STUDIO_CONFIG_COMMAND touch "${PVS_STUDIO_CONFIG}")
endif ()
add_custom_command(OUTPUT "${PVS_STUDIO_CONFIG}"
COMMAND ${PVS_STUDIO_CONFIG_COMMAND}
WORKING_DIRECTORY "${BINARY_DIR}"
COMMENT "Generating PVS-Studio.cfg")
if (NOT "${PVS_STUDIO_PREPROCESSOR}" MATCHES "^${PVS_STUDIO_SUPPORTED_PREPROCESSORS}$")
message(FATAL_ERROR "Preprocessor ${PVS_STUDIO_PREPROCESSOR} isn't supported. Available options: ${PVS_STUDIO_SUPPORTED_PREPROCESSORS}.")
endif ()
pvs_studio_append_standard_flag(PVS_STUDIO_CXX_FLAGS "${CMAKE_CXX_STANDARD}")
pvs_studio_set_directory_flags("${CMAKE_CURRENT_SOURCE_DIR}" PVS_STUDIO_CXX_FLAGS PVS_STUDIO_C_FLAGS)
if (NOT "${PVS_STUDIO_LICENSE}" STREQUAL "")
pvs_studio_join_path(PVS_STUDIO_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}" "${PVS_STUDIO_LICENSE}")
list(APPEND PVS_STUDIO_ARGS --lic-file "${PVS_STUDIO_LICENSE}")
endif ()
list(APPEND PVS_STUDIO_ARGS --cfg "${PVS_STUDIO_CONFIG}"
--platform "${PVS_STUDIO_PLATFORM}"
--preprocessor "${PVS_STUDIO_PREPROCESSOR}")
set(PVS_STUDIO_PLOGS "")
foreach (TARGET ${PVS_STUDIO_ANALYZE})
set(DIR "${CMAKE_CURRENT_SOURCE_DIR}")
string(FIND "${TARGET}" ":" DELIM)
if ("${DELIM}" GREATER "-1")
math(EXPR DELIMI "${DELIM}+1")
string(SUBSTRING "${TARGET}" "${DELIMI}" "-1" DIR)
string(SUBSTRING "${TARGET}" "0" "${DELIM}" TARGET)
pvs_studio_join_path(DIR "${CMAKE_CURRENT_SOURCE_DIR}" "${DIR}")
endif ()
pvs_studio_analyze_target("${TARGET}" "${DIR}")
list(APPEND PVS_STUDIO_DEPENDS "${TARGET}")
endforeach ()
set(PVS_STUDIO_TARGET_CXX_FLAGS "")
set(PVS_STUDIO_TARGET_C_FLAGS "")
foreach (SOURCE ${PVS_STUDIO_SOURCES})
pvs_studio_analyze_file("${SOURCE}" "${CMAKE_CURRENT_SOURCE_DIR}" "${CMAKE_CURRENT_BINARY_DIR}")
endforeach ()
pvs_studio_relative_path(LOG_RELATIVE "${CMAKE_BINARY_DIR}" "${PVS_STUDIO_LOG}")
if (PVS_STUDIO_PLOGS)
set(COMMANDS COMMAND cat ${PVS_STUDIO_PLOGS} > "${PVS_STUDIO_LOG}")
set(COMMENT "Generating ${LOG_RELATIVE}")
if (NOT "${PVS_STUDIO_FORMAT}" STREQUAL "" OR PVS_STUDIO_OUTPUT)
if ("${PVS_STUDIO_FORMAT}" STREQUAL "")
set(PVS_STUDIO_FORMAT "errorfile")
endif ()
list(APPEND COMMANDS
COMMAND mv "${PVS_STUDIO_LOG}" "${PVS_STUDIO_LOG}.pvs.raw"
COMMAND "${PVS_STUDIO_CONVERTER}" -t "${PVS_STUDIO_FORMAT}" "${PVS_STUDIO_LOG}.pvs.raw" -o "${PVS_STUDIO_LOG}"
COMMAND rm -f "${PVS_STUDIO_LOG}.pvs.raw")
endif ()
else ()
set(COMMANDS COMMAND touch "${PVS_STUDIO_LOG}")
set(COMMENT "Generating ${LOG_RELATIVE}: no sources found")
endif ()
add_custom_command(OUTPUT "${PVS_STUDIO_LOG}"
${COMMANDS}
COMMENT "${COMMENT}"
DEPENDS ${PVS_STUDIO_PLOGS}
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}")
if (PVS_STUDIO_ALL)
set(ALL "ALL")
else ()
set(ALL "")
endif ()
if (PVS_STUDIO_OUTPUT)
set(COMMANDS COMMAND cat "${PVS_STUDIO_LOG}" 1>&2)
else ()
set(COMMANDS "")
endif ()
add_custom_target("${PVS_STUDIO_TARGET}" ${ALL} ${COMMANDS} WORKING_DIRECTORY "${CMAKE_BINARY_DIR}" DEPENDS ${PVS_STUDIO_DEPENDS} "${PVS_STUDIO_LOG}")
endfunction ()

View File

@@ -0,0 +1,25 @@
# create a script that applies a patch (it's different in windows)
# to generate a patch:
# subversion: svn diff --patch-compatible > path/to/the/patch.diff
function(apply_patch patch where mark)
if(NOT EXISTS "${mark}")
if(NOT Patch_EXECUTABLE)
find_package(Patch REQUIRED)
endif()
file(TO_NATIVE_PATH ${patch} patch_native)
get_filename_component(patch_name "${patch}" NAME)
message(STATUS "Applying patch: ${patch_name}")
execute_process(
COMMAND "${Patch_EXECUTABLE}" "-p0" "--input=${patch_native}"
WORKING_DIRECTORY "${where}"
RESULT_VARIABLE status)
if(NOT status STREQUAL "0")
message(FATAL_ERROR "could not apply patch: ${patch} ---> ${where}")
else()
file(TOUCH "${mark}")
endif()
endif()
endfunction()

View File

@@ -0,0 +1,27 @@
function(status)
message(STATUS "${ARGV}")
endfunction()
function(print_var var)
message(STATUS "${var}=${${var}} ${ARGN}")
endfunction()
function(print_vars)
foreach(a ${ARGN})
message(STATUS "${a}=${${a}}")
endforeach(a)
endfunction()
function(debug_var debug var)
if(${debug})
message(STATUS "${var}=${${var}} ${ARGN}")
endif()
endfunction()
function(debug_vars debug)
if(${debug})
foreach(a ${ARGN})
message(STATUS "${a}=${${a}}")
endforeach(a)
endif()
endfunction()

View File

@@ -0,0 +1,25 @@
# cmake project utilities
Useful cmake scripts, at [c4Project.cmake](c4Project.cmake).
## Project utilities
## Adding targets
### Target types
## Downloading and configuring third-party projects at configure time
## Setting up tests
### Coverage
### Static analysis
### Valgrind
## Setting up benchmarks
## License
MIT License

View File

@@ -0,0 +1,176 @@
function(c4_get_architecture_defines output_var)
c4_get_target_cpu_architecture(arch)
if("${arch}" STREQUAL "x86_64")
set(defines __x86_64__)
elseif("${arch}" STREQUAL "i386")
set(defines __i386__)
elseif("${arch}" STREQUAL "armv8_64")
set(defines __arm__ __aarch64__)
elseif("${arch}" STREQUAL "armv8")
set(defines __arm__ __ARM_ARCH_8__)
elseif("${arch}" STREQUAL "armv7")
set(defines __arm__ __ARM_ARCH_7__)
elseif("${arch}" STREQUAL "armv6")
set(defines __arm__ __ARM_ARCH_6__)
elseif("${arch}" STREQUAL "armv5")
set(defines __arm__ __ARM_ARCH_5__)
elseif("${arch}" STREQUAL "armv4")
set(defines __arm__ __ARM_ARCH_4T__)
elseif("${arch}" STREQUAL "ia64")
set(defines __ia64__)
elseif("${arch}" STREQUAL "ppc64")
set(defines __ppc64__)
elseif("${arch}" STREQUAL "ia64")
set(defines __ia64__)
elseif("${arch}" STREQUAL "riscv64")
set(defines __riscv64__)
elseif("${arch}" STREQUAL "riscv32")
set(defines __riscv32__)
else()
message(FATAL_ERROR "unknown target architecture: ${arch}")
endif()
set(${output_var} ${defines} PARENT_SCOPE)
endfunction()
# adapted from https://github.com/axr/solar-cmake/blob/master/TargetArch.cmake
# Set ppc_support to TRUE before including this file or ppc and ppc64
# will be treated as invalid architectures since they are no longer supported by Apple
function(c4_get_target_cpu_architecture output_var)
# this should be more or less in line with c4core/cpu.hpp
set(archdetect_c_code "
#if defined(__x86_64) || defined(__x86_64__) || defined(__amd64) || defined(_M_X64)
#error cmake_ARCH x86_64
#elif defined(__i386) || defined(__i386__) || defined(_M_IX86)
#error cmake_ARCH i386
#elif defined(__arm__) || defined(_M_ARM) \
|| defined(__TARGET_ARCH_ARM) || defined(__aarch64__) || defined(_M_ARM64)
#if defined(__aarch64__) || defined(_M_ARM64)
#error cmake_ARCH armv8_64
#else
#if defined(__ARM_ARCH_8__) || (defined(__TARGET_ARCH_ARM) && __TARGET_ARCH_ARM >= 8)
#error cmake_ARCH armv8
#elif defined(__ARM_ARCH_7__) || defined(_ARM_ARCH_7) \
|| defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7R__) \
|| defined(__ARM_ARCH_7M__) || defined(__ARM_ARCH_7S__) \
|| (defined(__TARGET_ARCH_ARM) && __TARGET_ARCH_ARM >= 7) \
|| (defined(_M_ARM) && _M_ARM >= 7)
#error cmake_ARCH armv7
#elif defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) \
|| defined(__ARM_ARCH_6T2__) || defined(__ARM_ARCH_6Z__) \
|| defined(__ARM_ARCH_6K__) || defined(__ARM_ARCH_6ZK__) \
|| defined(__ARM_ARCH_6M__) \
|| (defined(__TARGET_ARCH_ARM) && __TARGET_ARCH_ARM >= 6)
#error cmake_ARCH armv6
#elif defined(__ARM_ARCH_5TEJ__) \
|| (defined(__TARGET_ARCH_ARM) && __TARGET_ARCH_ARM >= 5)
#error cmake_ARCH armv5
#elif defined(__ARM_ARCH_4T__) \
|| (defined(__TARGET_ARCH_ARM) && __TARGET_ARCH_ARM >= 4)
#error cmake_ARCH armv4
#else
#error cmake_ARCH arm
#endif
#endif
#elif defined(__ia64) || defined(__ia64__) || defined(_M_IA64)
#error cmake_ARCH ia64
#elif defined(__ppc__) || defined(__ppc) || defined(__powerpc__) \
|| defined(_ARCH_COM) || defined(_ARCH_PWR) || defined(_ARCH_PPC) \
|| defined(_M_MPPC) || defined(_M_PPC)
#if defined(__ppc64__) || defined(__powerpc64__) || defined(__64BIT__)
#error cmake_ARCH ppc64
#else
#error cmake_ARCH ppc32
#endif
#elif defined(__riscv)
#if __riscv_xlen == 64
#error cmake_ARCH riscv64
#else
#error cmake_ARCH riscv32
#endif
#endif
#error cmake_ARCH unknown
")
if(APPLE AND CMAKE_OSX_ARCHITECTURES)
# On OS X we use CMAKE_OSX_ARCHITECTURES *if* it was set
# First let's normalize the order of the values
# Note that it's not possible to compile PowerPC applications if you are using
# the OS X SDK version 10.6 or later - you'll need 10.4/10.5 for that, so we
# disable it by default
# See this page for more information:
# http://stackoverflow.com/questions/5333490/how-can-we-restore-ppc-ppc64-as-well-as-full-10-4-10-5-sdk-support-to-xcode-4
# Architecture defaults to i386 or ppc on OS X 10.5 and earlier, depending on the CPU type detected at runtime.
# On OS X 10.6+ the default is x86_64 if the CPU supports it, i386 otherwise.
foreach(osx_arch ${CMAKE_OSX_ARCHITECTURES})
if("${osx_arch}" STREQUAL "ppc" AND ppc_support)
set(osx_arch_ppc TRUE)
elseif("${osx_arch}" STREQUAL "i386")
set(osx_arch_i386 TRUE)
elseif("${osx_arch}" STREQUAL "x86_64")
set(osx_arch_x86_64 TRUE)
elseif("${osx_arch}" STREQUAL "ppc64" AND ppc_support)
set(osx_arch_ppc64 TRUE)
else()
message(FATAL_ERROR "Invalid OS X arch name: ${osx_arch}")
endif()
endforeach()
# Now add all the architectures in our normalized order
if(osx_arch_ppc)
list(APPEND ARCH ppc)
endif()
if(osx_arch_i386)
list(APPEND ARCH i386)
endif()
if(osx_arch_x86_64)
list(APPEND ARCH x86_64)
endif()
if(osx_arch_ppc64)
list(APPEND ARCH ppc64)
endif()
else()
file(WRITE "${CMAKE_BINARY_DIR}/detect_cpu_arch.c" "${archdetect_c_code}")
enable_language(C)
# Detect the architecture in a rather creative way...
# This compiles a small C program which is a series of ifdefs that selects a
# particular #error preprocessor directive whose message string contains the
# target architecture. The program will always fail to compile (both because
# file is not a valid C program, and obviously because of the presence of the
# #error preprocessor directives... but by exploiting the preprocessor in this
# way, we can detect the correct target architecture even when cross-compiling,
# since the program itself never needs to be run (only the compiler/preprocessor)
try_run(
run_result_unused
compile_result_unused
"${CMAKE_BINARY_DIR}"
"${CMAKE_BINARY_DIR}/detect_cpu_arch.c"
COMPILE_OUTPUT_VARIABLE ARCH
CMAKE_FLAGS CMAKE_OSX_ARCHITECTURES=${CMAKE_OSX_ARCHITECTURES}
)
# Parse the architecture name from the compiler output
string(REGEX MATCH "cmake_ARCH ([a-zA-Z0-9_]+)" ARCH "${ARCH}")
# Get rid of the value marker leaving just the architecture name
string(REPLACE "cmake_ARCH " "" ARCH "${ARCH}")
# If we are compiling with an unknown architecture this variable should
# already be set to "unknown" but in the case that it's empty (i.e. due
# to a typo in the code), then set it to unknown
if (NOT ARCH)
set(ARCH unknown)
endif()
endif()
set(${output_var} "${ARCH}" PARENT_SCOPE)
endfunction()

View File

@@ -0,0 +1,29 @@
SET(CMAKE_SYSTEM_NAME Linux)
SET(CMAKE_SYSTEM_PROCESSOR arm)
SET(CMAKE_SYSTEM_VERSION 1)
set(CMAKE_CROSSCOMPILING TRUE)
find_program(CC_GCC arm-linux-gnueabihf-gcc REQUIRED)
set(CMAKE_FIND_ROOT_PATH /usr/arm-gnueabihf)
# Cross compiler
SET(CMAKE_C_COMPILER arm-linux-gnueabihf-gcc)
SET(CMAKE_CXX_COMPILER arm-linux-gnueabihf-g++)
set(CMAKE_LIBRARY_ARCHITECTURE arm-linux-gnueabihf)
# Search for programs in the build host directories
SET(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
# Libraries and headers in the target directories
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
set(THREADS_PTHREAD_ARG "0" CACHE STRING "Result from TRY_RUN" FORCE)
get_filename_component(TOOLCHAIN_DIR "${CC_GCC}" DIRECTORY)
get_filename_component(TOOLCHAIN_DIR "${TOOLCHAIN_DIR}" DIRECTORY)
set(TOOLCHAIN_SO_DIR "${TOOLCHAIN_DIR}/arm-linux-gnueabihf/")
#/home/jpmag/local/arm/gcc-arm-9.2-2019.12-x86_64-arm-none-linux-gnueabihf
set(CMAKE_CROSSCOMPILING_EMULATOR qemu-arm -L ${TOOLCHAIN_SO_DIR})

View File

@@ -0,0 +1,84 @@
# taken from https://stackoverflow.com/a/49086560
# tested with the toolchain from ARM:
# gcc-arm-9.2-2019.12-mingw-w64-i686-arm-none-linux-gnueabihf.tar.xz
# found at
# https://developer.arm.com/tools-and-software/open-source-software/developer-tools/gnu-toolchain/gnu-a/downloads
# see also:
# https://stackoverflow.com/questions/42371788/how-to-run-helloworld-on-arm
# https://dev.to/younup/cmake-on-stm32-the-beginning-3766
SET(CMAKE_SYSTEM_NAME Linux)
SET(CMAKE_SYSTEM_PROCESSOR arm)
SET(CMAKE_SYSTEM_VERSION 1)
set(CMAKE_CROSSCOMPILING TRUE)
find_program(CC_GCC arm-none-linux-gnueabihf-gcc REQUIRED)
set(CMAKE_FIND_ROOT_PATH /usr/arm-linux-gnueabihf)
# Cross compiler
SET(CMAKE_C_COMPILER arm-none-linux-gnueabihf-gcc)
SET(CMAKE_CXX_COMPILER arm-none-linux-gnueabihf-g++)
set(CMAKE_LIBRARY_ARCHITECTURE arm-none-linux-gnueabihf)
# Search for programs in the build host directories
SET(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
# Libraries and headers in the target directories
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
set(THREADS_PTHREAD_ARG "0" CACHE STRING "Result from TRY_RUN" FORCE)
get_filename_component(TOOLCHAIN_DIR "${CC_GCC}" DIRECTORY)
get_filename_component(TOOLCHAIN_DIR "${TOOLCHAIN_DIR}" DIRECTORY)
set(TOOLCHAIN_SO_DIR "${TOOLCHAIN_DIR}/arm-none-linux-gnueabihf/libc/")
#/home/jpmag/local/arm/gcc-arm-9.2-2019.12-x86_64-arm-none-linux-gnueabihf
set(CMAKE_CROSSCOMPILING_EMULATOR qemu-arm -L ${TOOLCHAIN_SO_DIR})
return()
set(CMAKE_SYSTEM_NAME Generic)
set(CMAKE_SYSTEM_PROCESSOR arm)
set(CMAKE_SYSTEM_VERSION 1)
set(CMAKE_CROSSCOMPILING 1)
set(CMAKE_C_COMPILER "arm-none-eabi-gcc")
set(CMAKE_CXX_COMPILER "arm-none-eabi-g++")
set(CMAKE_FIND_ROOT_PATH /usr/arm-none-eabi)
set(CMAKE_EXE_LINKER_FLAGS "--specs=nosys.specs" CACHE INTERNAL "")
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
set(COMPILER_FLAGS "-marm -mfpu=neon -mfloat-abi=hard -mcpu=cortex-a9 -D_GNU_SOURCE")
message(STATUS)
message(STATUS)
message(STATUS)
if(NOT DEFINED CMAKE_C_FLAGS)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COMPILER_FLAGS}" CACHE STRING "")
else()
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COMPILER_FLAGS}")
endif()
message(STATUS)
message(STATUS)
message(STATUS)
message(STATUS)
if(NOT DEFINED CMAKE_CXX_FLAGS)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COMPILER_FLAGS}" CACHE STRING "")
else()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COMPILER_FLAGS}")
endif()

View File

@@ -0,0 +1,73 @@
# Copyright 2017 Autodesk Inc. http://www.autodesk.com
#
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
# This module is shared; use include blocker.
if( _PS4_TOOLCHAIN_ )
return()
endif()
set(_PS4_TOOLCHAIN_ 1)
# PS4 SCE version requirement
set(REQUIRED_PS4_VERSION "4.000")
# Get PS4 SCE environment
if( EXISTS "$ENV{SCE_ROOT_DIR}" AND IS_DIRECTORY "$ENV{SCE_ROOT_DIR}" )
string(REGEX REPLACE "\\\\" "/" PS4_ROOT $ENV{SCE_ROOT_DIR})
string(REGEX REPLACE "//" "/" PS4_ROOT ${PS4_ROOT})
if( EXISTS "$ENV{SCE_ORBIS_SDK_DIR}" AND IS_DIRECTORY "$ENV{SCE_ORBIS_SDK_DIR}" )
string(REGEX REPLACE "\\\\" "/" PS4_SDK $ENV{SCE_ORBIS_SDK_DIR})
string(REGEX REPLACE "//" "/" PS4_SDK ${PS4_SDK})
get_filename_component(SCE_VERSION "${PS4_SDK}" NAME)
endif()
endif()
# Report and check version if it exist
if( NOT "${SCE_VERSION}" STREQUAL "" )
message(STATUS "PS4 SCE version found: ${SCE_VERSION}")
if( NOT "${SCE_VERSION}" MATCHES "${REQUIRED_PS4_VERSION}+" )
message(WARNING "Expected PS4 SCE version: ${REQUIRED_PS4_VERSION}")
if( PLATFORM_TOOLCHAIN_ENVIRONMENT_ONLY )
set(PS4_ROOT)
set(PS4_SDK)
endif()
endif()
endif()
# If we only want the environment values, exit now
if( PLATFORM_TOOLCHAIN_ENVIRONMENT_ONLY )
return()
endif()
# We are building PS4 platform, fail if PS4 SCE not found
if( NOT PS4_ROOT OR NOT PS4_SDK )
message(FATAL_ERROR "Engine requires PS4 SCE SDK to be installed in order to build PS4 platform.")
endif()
# Tell CMake we are cross-compiling to PS4 (Orbis)
set(CMAKE_SYSTEM_NAME Orbis)
set(PS4 True)
# Set CMake system root search path
set(CMAKE_SYSROOT "${PS4_ROOT}")
# Set compilers to the ones found in PS4 SCE SDK directory
set(CMAKE_C_COMPILER "${PS4_SDK}/host_tools/bin/orbis-clang.exe")
set(CMAKE_CXX_COMPILER "${PS4_SDK}/host_tools/bin/orbis-clang++.exe")
set(CMAKE_ASM_COMPILER "${PS4_SDK}/host_tools/bin/orbis-as.exe")
# Only search the PS4 SCE SDK, not the remainder of the host file system
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)

View File

@@ -0,0 +1,93 @@
# Copyright 2017 Autodesk Inc. http://www.autodesk.com
#
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
# This module is shared; use include blocker.
if( _XB1_TOOLCHAIN_ )
return()
endif()
set(_XB1_TOOLCHAIN_ 1)
# XB1 XDK version requirement
set(REQUIRED_XB1_TOOLCHAIN_VERSION "160305")
# Get XDK environment
if( EXISTS "$ENV{DurangoXDK}" AND IS_DIRECTORY "$ENV{DurangoXDK}" )
string(REGEX REPLACE "\\\\" "/" XDK_ROOT $ENV{DurangoXDK})
string(REGEX REPLACE "//" "/" XDK_ROOT ${XDK_ROOT})
endif()
# Fail if XDK not found
if( NOT XDK_ROOT )
if( PLATFORM_TOOLCHAIN_ENVIRONMENT_ONLY )
return()
endif()
message(FATAL_ERROR "Engine requires XB1 XDK to be installed in order to build XB1 platform.")
endif()
# Get toolchain version
get_filename_component(XDK_TOOLCHAIN_VERSION "[HKEY_LOCAL_MACHINE\\SOFTWARE\\Wow6432Node\\Microsoft\\Durango XDK\\${REQUIRED_XB1_TOOLCHAIN_VERSION};EditionVersion]" NAME)
if( XDK_TOOLCHAIN_VERSION STREQUAL REQUIRED_XB1_TOOLCHAIN_VERSION )
message(STATUS "Found required XDK toolchain version (${XDK_TOOLCHAIN_VERSION})")
else()
get_filename_component(XDK_TOOLCHAIN_VERSION "[HKEY_LOCAL_MACHINE\\SOFTWARE\\Wow6432Node\\Microsoft\\Durango XDK;Latest]" NAME)
message(WARNING "Could not find required XDK toolchain version (${REQUIRED_XB1_TOOLCHAIN_VERSION}), using latest version instead (${XDK_TOOLCHAIN_VERSION})")
endif()
# If we only want the environment values, exit now
if( PLATFORM_TOOLCHAIN_ENVIRONMENT_ONLY )
return()
endif()
# Find XDK compiler directory
if( CMAKE_GENERATOR STREQUAL "Visual Studio 11 2012" )
set(XDK_COMPILER_DIR "${XDK_ROOT}/${XDK_TOOLCHAIN_VERSION}/Compilers/dev11.1")
elseif( CMAKE_GENERATOR STREQUAL "Visual Studio 14 2015" )
get_filename_component(XDK_COMPILER_DIR "[HKEY_CURRENT_USER\\Software\\Microsoft\\VisualStudio\\14.0_Config\\Setup\\VC;ProductDir]" DIRECTORY)
if( DEFINED XDK_COMPILER_DIR )
string(REGEX REPLACE "\\\\" "/" XDK_COMPILER_DIR ${XDK_COMPILER_DIR})
string(REGEX REPLACE "//" "/" XDK_COMPILER_DIR ${XDK_COMPILER_DIR})
endif()
if( NOT XDK_COMPILER_DIR )
message(FATAL_ERROR "Can't find Visual Studio 2015 installation path.")
endif()
else()
message(FATAL_ERROR "Unsupported Visual Studio version!")
endif()
# Tell CMake we are cross-compiling to XBoxOne (Durango)
set(CMAKE_SYSTEM_NAME Durango)
set(XBOXONE True)
# Set CMake system root search path
set(CMAKE_SYSROOT "${XDK_COMPILER_DIR}")
# Set the compilers to the ones found in XboxOne XDK directory
set(CMAKE_C_COMPILER "${XDK_COMPILER_DIR}/vc/bin/amd64/cl.exe")
set(CMAKE_CXX_COMPILER "${XDK_COMPILER_DIR}/vc/bin/amd64/cl.exe")
set(CMAKE_ASM_COMPILER "${XDK_COMPILER_DIR}/vc/bin/amd64/ml64.exe")
# Force compilers to skip detecting compiler ABI info and compile features
set(CMAKE_C_COMPILER_FORCED True)
set(CMAKE_CXX_COMPILER_FORCED True)
set(CMAKE_ASM_COMPILER_FORCED True)
# Only search the XBoxOne XDK, not the remainder of the host file system
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
# Global variables
set(XBOXONE_SDK_REFERENCES "Xbox Services API, Version=8.0;Xbox GameChat API, Version=8.0")

View File

@@ -0,0 +1,216 @@
import re
import os
class cmtfile:
"""commented file"""
def __init__(self, filename):
self.filename = filename
def __str__(self):
return self.filename
class cmttext:
"""commented text"""
def __init__(self, text):
self.text = text
def __str__(self):
return self.text
class ignfile:
"""ignore file"""
def __init__(self, filename):
self.filename = filename
def __str__(self):
return self.filename
class hdrfile:
"""header file, with custom include guard"""
def __init__(self, filename, incpattern, include_guard=None):
self.filename = filename
self.incpattern = incpattern
self.include_guard = include_guard
def __str__(self):
return self.filename
class injfile:
"""header file, to be injected at the first include point"""
def __init__(self, filename, incpattern):
self.filename = filename
self.incpattern = incpattern
def __str__(self):
return self.filename
class injcode:
"""direct code to inject"""
def __init__(self, code):
self.code = code
def __str__(self):
return self.code
class onlyif:
def __init__(self, condition, obj):
self.condition = condition
self.obj = obj
def catfiles(filenames, rootdir,
include_regexes,
definition_macro,
repo,
result_incguard):
sepb = "//" + ("**" * 40)
sepf = "//" + ("--" * 40)
to_inject = {}
custom_include_guards = {}
def banner(s):
return f"\n\n\n{sepb}\n{sepf}\n// {s}\n// {repo}/{s}\n{sepf}\n{sepb}\n\n"
def footer(s):
return f"\n\n// (end {repo}/{s})\n"
def incguard(filename):
return custom_include_guards.get(filename,
f"{filename.replace('.','_').replace('/','_').upper()}_")
def replace_include(rx, match, line, guard):
line = line.rstrip()
incl = match.group(1)
if to_inject.get(incl) is None:
if guard is None:
guard = incguard(incl)
return f"""// amalgamate: removed include of
// {repo}/src/{incl}
//{line}
#if !defined({guard}) && !defined(_{guard})
#error "amalgamate: file {incl} must have been included at this point"
#endif /* {guard} */\n
"""
else:
entry = to_inject[incl]
del to_inject[incl]
return append_file(entry.filename)
def append_file(filename, guard=None):
s = ""
with open(filename) as f:
for line in f.readlines():
for rx in include_regexes:
match = rx.match(line)
if match:
line = replace_include(rx, match, line, guard)
s += line
return s
def append_cpp(filename):
return f"""#ifdef {definition_macro}
{append_file(filename)}
#endif /* {definition_macro} */
"""
def is_src(filename):
return filename.endswith(".cpp") or filename.endswith(".c")
def cmtline(line, more=""):
if len(line.strip()) > 0:
return f"// {line}{more}"
else:
return "//\n"
out = ""
for entry in filenames:
if isinstance(entry, onlyif):
if entry.condition:
entry = entry.obj
else:
continue
if isinstance(entry, ignfile):
pass
elif isinstance(entry, cmttext):
for line in entry.text.split("\n"):
out += cmtline(line, "\n")
elif isinstance(entry, cmtfile):
filename = f"{rootdir}/{entry.filename}"
out += banner(entry.filename)
with open(filename) as file:
for line in file.readlines():
out += cmtline(line)
elif isinstance(entry, injcode):
out += f"\n{entry.code}\n"
elif isinstance(entry, injfile):
entry.filename = f"{rootdir}/{entry.filename}"
to_inject[entry.incpattern] = entry
else:
filename = f"{rootdir}/{entry}"
out += banner(entry)
if isinstance(entry, hdrfile):
if entry.include_guard is not None:
custom_include_guards[entry.incpattern] = entry.include_guard
out += append_file(filename, entry.include_guard)
else:
assert isinstance(entry, str)
if is_src(filename):
out += append_cpp(filename)
else:
out += append_file(filename)
out += footer(entry)
return f"""#ifndef {result_incguard}
{out}
#endif /* {result_incguard} */
"""
def include_only_first(file_contents: str):
rx = [
re.compile(r'^\s*#\s*include "(.*?)".*'),
re.compile(r'^\s*#\s*include <(.*?)>.*'),
]
already_included = {}
out = ""
for line in file_contents.split("\n"):
for expr in rx:
match = expr.match(line)
if match:
incl = match.group(1)
if already_included.get(incl) is None:
already_included[incl] = line
if incl.endswith(".h"):
cpp_version = f"c{incl[:-2]}"
already_included[cpp_version] = line
elif incl.startswith("c") and not (incl.endswith(".h") or incl.endswith(".hpp")):
c_version = f"{incl[1:]}.h"
already_included[c_version] = line
else:
line = f"//included above:\n//{line}"
break
out += line
out += "\n"
return out
def mkparser(**bool_args):
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("output", default=None, nargs='?', help="output file. defaults to stdout")
for k, (default, help) in bool_args.items():
# https://stackoverflow.com/questions/15008758/parsing-boolean-values-with-argparse
feature = parser.add_mutually_exclusive_group(required=False)
yes = '--' + k
no = '--no-' + k
if default:
yes_default = "this is the default"
no_default = f"the default is {yes}"
else:
yes_default = f"the default is {no}"
no_default = "this is the default"
feature.add_argument(yes, dest=k, action='store_true', help=f"{help}. {yes_default}.")
feature.add_argument(no, dest=k, action='store_false', help=f"{help}. {no_default}.")
parser.set_defaults(**{k: default})
return parser
def file_put_contents(filename: str, contents: str):
if filename is None:
print(contents)
else:
dirname = os.path.dirname(filename)
if dirname:
os.makedirs(dirname, exist_ok=True)
with open(filename, "w") as output:
output.write(contents)

View File

@@ -0,0 +1 @@
static/*

View File

@@ -0,0 +1,7 @@
# Benchmark explorer
You need to start a http server on this folder:
```shellsession
$ python bm.py serve .
```

View File

@@ -0,0 +1,475 @@
/* https://stackoverflow.com/questions/9050345/selecting-last-element-in-javascript-array */
function last(arr)
{
return arr[arr.length - 1];
};
function dbg()
{
/* pass ?dbg=1 to enable debug logs */
/*if(!getParam('dbg', 0)){
return;
}*/
elm = $("#dbg");
var s = "";
for (var i = 0; i < arguments.length; i++) {
if(i > 0) s += ' ';
s += arguments[i].toString();
}
console.log(s);
s+= "\n";
elm.append(document.createTextNode(s));
}
function iterArr(arr, fn) {
for (var key in arr) {
if (arr.hasOwnProperty(key)) {
fn(key, arr[key]);
}
}
}
function fileContents(file, onComplete)
{
dbg(`${file}: requesting...`);
var data;
$.get(file, function(d) {
dbg(`${file}: got response! ${d.length}B...`);
if(onComplete) {
onComplete(d);
}
}, "text");
}
/* https://stackoverflow.com/questions/7394748/whats-the-right-way-to-decode-a-string-that-has-special-html-entities-in-it/7394787 */
function decodeHtmlEntities(str)
{
return str
.replace("&amp;", "&")
.replace("&lt;", "<")
.replace("&gt;", ">")
.replace("&quot;", "\"")
.replace(/&#(\d+);/g, function(match, dec) {
return String.fromCharCode(dec);
});
}
/* https://stackoverflow.com/questions/6234773/can-i-escape-html-special-chars-in-javascript */
function escapeHtml(unsafe)
{
return unsafe
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/"/g, "&quot;")
.replace(/'/g, "&#039;");
}
/* URL params ----------------------------------------------------------------- */
var _curr_url_params = null;
function parseUrlParams()
{
var keyvals = [];
var keys = document.location.search.substring(1).split('&');
dbg("keys=", keys)
for(var i = 0; i < keys.length; i++) {
var key = keys[i].split('=');
dbg("i=", i, " key=", key);
keyvals.push(key[0]);
keyvals[key[0]] = key[1];
}
_curr_url_params = keyvals;
}
function dbgParams() {
iterArr(_curr_url_params, function(key, val){ dbg("url params:", key, "=", val); })
}
function getParam(name, fallback)
{
if(_curr_url_params === null) { parseUrlParams(); }
if(name in _curr_url_params) {
return _curr_url_params[name];
}
return fallback;
}
function setParam(name, value) {
if(_curr_url_params === null) { parseUrlParams(); }
_curr_url_params[name] = value;
// https://stackoverflow.com/questions/486896/adding-a-parameter-to-the-url-with-javascript
document.location.search = joinParams();
}
function joinParams() {
if(_curr_url_params === null) { parseUrlParams(); }
var s = "";
iterArr(_curr_url_params, function(key, val){
if(s != ""){ s += '&'; }
s += `${key}=${val}`;
});
return s;
}
/* ----------------------------------------------------------------------------- */
function colMax(data, col)
{
var max = -1.e30;
data.forEach(function(item, index){
max = item[col] > max ? item[col] : max;
});
return max;
}
function colMin(data, col)
{
var min = 1.e30;
data.forEach(function(item, index){
min = item[col] < min ? item[col] : min;
});
return min;
}
/* https://stackoverflow.com/questions/2283566/how-can-i-round-a-number-in-javascript-tofixed-returns-a-string */
function toFixedNumber(num, digits, base)
{
var pow = Math.pow(base||10, digits);
return Math.round(num*pow) / pow;
}
function humanReadable(sz, base=1024, precision=3)
{
var i = -1;
var units;
if(base == 1000)
{
units = ['k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y'];
}
else if(base == 1024)
{
units = ['ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'];
}
do
{
sz /= base;
i++;
} while (sz > base);
return sz.toFixed(precision) + units[i];
};
/* ----------------------------------------------------------------------------- */
class BmResults
{
constructor(dict={})
{
Object.assign(this, dict);
for(var i = 0; i < this.benchmarks.length; ++i) {
var bm = this.benchmarks[i];
bm.name = decodeHtmlEntities(bm.name);
bm.run_name = decodeHtmlEntities(bm.run_name);
}
}
}
var bmSpecs;
function iterBms(fn)
{
iterArr(bmSpecs.bm, fn);
}
function loadSpecs(specs)
{
dbg("loading specs ....");
iterArr(specs, function(k, v){dbg("k=", k, 'v=', v); });
$("#heading-title").html(`Benchmarks: <a href="${specs.url}">${specs.projname}</a>`);
bmSpecs = specs;
var toc = $("#toc");
/*toc.append(`<li><a href="#" onclick="setParam('bm', 'all');">Load all</a></li>`);*/
iterBms(function(key, bm) {
toc.append(`<li><a href="#${key}" onclick="setParam('bm', '${key}');">${key}</a>: ${bm.specs.desc}</li>`)
bm.name = key;
});
// load if required
currBm = getParam("bm", "");
dbg("params=", _curr_url_params, currBm);
if(currBm != "") {
dbg("loading BM from URL:", currBm)
loadBm(currBm);
}
}
function normalizeBy(results, column_name, best_fn)
{
var best = best_fn(results.benchmarks, column_name);
results.benchmarks.forEach(function(item, index){
item[`${column_name}_normalized`] = item[column_name] / best;
});
}
function loadAll()
{
var id = "#bm-results";
$(id).empty();
var i = 0;
iterBms(function(key, bm){
if(i++ > 0) $(id).append("<div class='bm-sep'><hr/></div>");
appendBm(key);
});
}
function loadBm(key)
{
dbg("loading-.....", key);
/*if(key == "all") {
loadAll();
}*/
$("#bm-results").empty();
var bm = bmSpecs.bm[key];
if(bm.src != "") {
fileContents(bm.src, function(data){
dbg(`${key}: got src data!`)
bm.src_data = data;
});
}
var latestRun = last(bm.entries);
var bmfile = `${latestRun}/${key}.json`;
dbg("bmfile=", bmfile);
fileContents("bm/"+bmfile, function(data){
dbg(`${key}: got bm data!`)
bm.results_data = new BmResults(JSON.parse(data));
bm.results_data.benchmarks.forEach(function(item, index){
item.id = index;
});
normalizeBy(bm.results_data, 'iterations', colMin);
normalizeBy(bm.results_data, 'real_time', colMin, );
normalizeBy(bm.results_data, 'cpu_time', colMin);
normalizeBy(bm.results_data, 'bytes_per_second', colMin);
normalizeBy(bm.results_data, 'items_per_second', colMin);
appendBm(latestRun, key, bm);
});
}
function appendBm(run_id, id, bm)
{
if($(document).find(`bm-results-${id}`).length == 0)
{
$("#bm-results").append(`
<div id="bm-results-${id}">
<h2 id="bm-title-${id}">${id}</h2>
<h3 id="heading-details-table-${id}">Run details</h3><table id="table-details-${id}" class="datatable" width="800px"></table>
<h3 id="heading-table-${id}">Result tables</h3>
<h4 id="heading-table-${id}_pretty">Results</h4><table id="table-${id}_pretty" class="datatable" width="800px"></table>
<h4 id="heading-table-${id}_normalized">Normalized by column min</h4><table id="table-${id}_normalized" class="datatable" width="800px"></table>
<h3 id="heading-chart-${id}">Chart</h2>
<div id="chart-container-${id}"></div>
<h3 id="heading-code-${id}">Code</h2>
<pre><code id="code-${id}" class="lang-c++"></code></pre>
</div>
`);
}
var results = bm.results_data;
var code = bm.src_data;
loadDetailsTable(run_id, id, bm, results);
loadTable(id, bm, results);
loadChart(id, bm, results);
loadCode(id, bm, code);
}
function loadCode(elmId, bm, code)
{
var elm = $(`#code-${elmId}`);
elm.text(code);
/* hljs.highlightBlock(elm); // this doesn't work */
/* ... and this is very inefficient: */
document.querySelectorAll('pre code').forEach((block) => {
hljs.highlightBlock(block);
});
}
function parseRunId(run_id)
{
// example:
// commit id / cpu id - system id - build id
// git20201204_202919-b3f7fa7/x86_64_b9db3176-linux_4e9326b4-64bit_Debug_gcc10.2.0_10c5d03c
// git20201203_193348-2974fb0/x86_64_16ac0500-win32_59f3579c-64bit_MinSizeRel_msvc19.28.29304.1_32f6fc66
// to tune the regex: https://regex101.com/r/rdkPi8/1
// commit / cpu - system - build
var rx = /^(.+?)-([0-9a-f]{7})\/(.+?)_([0-9a-f]{8})-(.+?)_([0-9a-f]{8})-(.+?)_([0-9a-f]{8})$/gim;
var tag = rx.exec(run_id);
dbg("fdx: run_id=", run_id);
dbg("fdx: tag=", tag);
dbg("fdx: len=", tag.length);
return {
commit_id: `${tag[2]}: ${tag[1]}`,
cpu_id: `${tag[4]}: ${tag[3]} `,
system_id: `${tag[6]}: ${tag[5]}`,
build_id: `${tag[8]}: ${tag[7]}`,
};
}
function getBuildId(run_id)
{
return parseRunId(run_id).build_id;
}
function loadDetailsTable(run_id, id, bm, results)
{
var url = bmSpecs.url;
var run = bmSpecs.runs[run_id];
var commit = bmSpecs.commit[run.commit].specs;
var cpu = bmSpecs.cpu[run.cpu].specs;
var system = bmSpecs.system[run.system].specs;
let other_commit_entries = bmSpecs.commit[run.commit].entries.filter(
entry_run => entry_run != run_id
).map(entry_run => getBuildId(entry_run)).join('<br>');
/* https://datatables.net/ */
$(`#table-details-${id}`).DataTable({
data: results.benchmarks,
info: false,
paging: false,
searching: false,
retrieve: false,
order: [],
columns: [
{title: "", data: "desc"},
{title: "", data: "contents"},
],
data: [
{desc: "benchmark id" , contents: id},
{desc: "commit" , contents: ahref(`${url}/commit/${commit.sha1}`, commit.sha1)},
{desc: "commit date" , contents: ahref(`${url}/commit/${commit.sha1}`, commit.committed_datetime)},
{desc: "commit summary", contents: ahref(`${url}/commit/${commit.sha1}`, commit.summary)},
{desc: "source tree" , contents: ahref(`${url}/tree/${commit.sha1}`, `tree @ ${commit.sha1}`)},
{desc: "benchmark" , contents: ahref(`${url}/tree/${commit.sha1}/${bm.specs.src}`, `source @ ${commit.sha1}`)},
{desc: "cpu used" , contents: `${cpu.arch} ${cpu.brand_raw}`},
{desc: "system used" , contents: `${system.uname.system} ${system.uname.release}`},
{desc: "this build" , contents: `<pre>${getBuildId(run_id)}</pre>`},
{desc: "commit builds" , contents: `<pre>${other_commit_entries}</pre>`},
]
});
function ahref(url, txt) { return `<a href="${url}" target="_blank">${txt}</a>`; }
}
function loadTable(id, bm, results)
{
function render_int(data, type, row, meta) { return toFixedNumber(data, 0); }
function render_megas(data, type, row, meta) { return toFixedNumber(data / 1.e6, 3); }
function render_fixed(data, type, row, meta) { return toFixedNumber(data, 3); }
function render_human(data, type, row, meta) { return humanReadable(data, 1000, 3); }
addTable("_pretty" , "" , {ns: render_int, iters: render_megas, rates: render_megas});
addTable("_normalized", "_normalized", {ns: render_fixed, iters: render_fixed, rates: render_fixed});
function addTable(suffix, data_suffix, renderers) {
/* https://datatables.net/ */
var searching = (results.benchmarks.count > 20);
var ratePrefix = renderers.rates == render_megas ? "M" : "";
var iterPrefix = renderers.iters == render_megas ? "M" : "";
var clockSuffix = data_suffix == "_normalized" ? "" : "(ns)";
$(`#table-${id}${suffix}`).DataTable( {
data: results.benchmarks,
info: false,
paging: false,
searching: searching,
retrieve: searching,
/* https://datatables.net/reference/option/columns.type */
columns: [
{title: "ID", data: "id", type: "num"},
{title: "Name", data: "name", render: function(data, type, row, meta) { return escapeHtml(data); }},
{title: `${ratePrefix}B/s` , data: `bytes_per_second${data_suffix}`, type: "num", className: "text-right", render: renderers.rates},
{title: `${ratePrefix}items/s` , data: `items_per_second${data_suffix}`, type: "num", className: "text-right", render: renderers.rates},
{title: `Clock${clockSuffix}` , data: `real_time${data_suffix}` , type: "num", className: "text-right", render: renderers.ns},
{title: `CPU${clockSuffix}` , data: `cpu_time${data_suffix}` , type: "num", className: "text-right", render: renderers.ns},
{title: `${ratePrefix}Iterations`, data: `iterations${data_suffix}` , type: "num", className: "text-right", render: renderers.iters},
]});
}
}
function loadChart(id, bm, results)
{
addChartFromColumn('bytes_per_second_normalized', "B/s", "(more is better)");
addChartFromColumn('items_per_second_normalized', "items/s", "(more is better)");
addChartFromColumn('iterations_normalized', "Iterations", "(more is better)");
addChartFromColumn('real_time_normalized', "Clock time", "(less is better)");
addChartFromColumn('cpu_time_normalized', "CPU time", "(less is better)");
function addChartFromColumn(column, column_name, obs) {
var elmId = `chart-${id}-${column}`;
var canvas = `${elmId}-canvas`;
$(`#chart-container-${id}`).append(`
<div id="${elmId}" class="chart">
<canvas id="${canvas}"></canvas>
</div>
`);
var chart = new CanvasJS.Chart(elmId, {
animationEnabled: false,
title:{
fontSize: 24,
/* text: `${id}: ${column_name}\n${obs}` */
text: `${column_name}\n${obs}`
},
axisX: {
labelFontSize: 12,
},
data: [{
type: "bar",
axisYType: "secondary",
color: "#eb7434",/*"#014D65",*/
dataPoints: results.benchmarks.map(function(item){
return {
indexLabelFormatter: function(e) { return e.dataPoint.indexLabel; },
indexLabelFontSize: 16,
indexLabel: item.name,
/* label: item.name, */
y: item[column],
/* save the result here: the tooltip will show the full thing */
benchmark_results: item
};
}),
}],
toolTip: {
/*content: "{indexLabel}: {y}",*/
contentFormatter: function(e){
function hr(val) { return humanReadable(val, 1000, 3); }
function fx(val) { return toFixedNumber(val, 3); }
function fxi(val) { return toFixedNumber(val, 0); }
function getRow(name, abs, rel) { return `<tr><td>${name}</td><td>${abs}</td><td>${rel}x min</td></tr>`; }
var r = e.entries[0].dataPoint.benchmark_results;
var hdrRow = `<tr><th></th><th>Absolute</th><th>Normalized</th></tr>`;
var bpsRow = getRow("B/s", hr(r.bytes_per_second), fx(r.bytes_per_second_normalized));
var ipsRow = getRow("items/s", hr(r.items_per_second), fx(r.items_per_second_normalized));
var cpuRow = getRow("CPU", fxi(r.cpu_time) + "ns", fx(r.cpu_time_normalized));
var clockRow = getRow("Clock", fxi(r.real_time) + "ns", fx(r.real_time_normalized));
var itersRow = getRow("Iterations", hr(r.iterations), fx(r.iterations_normalized));
var table = `<table>${hdrRow}${bpsRow}${ipsRow}${cpuRow}${clockRow}${itersRow}</table>`;
return `<h4>${escapeHtml(r.name)}</h4>${table}`;
}
}
});
chart.render();
}
}

View File

@@ -0,0 +1,568 @@
import os
import sys
import argparse
import requests
import flask
import json
import re
import yaml
import shutil
import mmh3
from munch import Munch, munchify
from flask import render_template, redirect, url_for, send_from_directory
from markupsafe import escape
def log(*args, **kwargs):
print(*args, **kwargs, flush=True)
def myhash_combine(curr, value):
return curr ^ (value + 0x9e3779b9 + (curr<<6) + (curr>>2))
def optionals(obj, *attrs):
ret = []
for attr in attrs:
if not hasattr(obj, attr):
log("attr not present:", attr)
continue
ret.append(getattr(obj, attr))
return ret
def myhash(*args):
h = 137597
for a in args:
if isinstance(a, str):
if a == "":
continue
b = bytes(a, "utf8")
else:
b = bytes(a)
hb = mmh3.hash(b, signed=False)
h = myhash_combine(h, hb)
s = hex(h)
return s[2:min(10, len(s))]
def copy_file_to_dir(file, dir):
dir = os.path.abspath(dir)
src = os.path.abspath(file)
dst = f"{dir}/{os.path.basename(src)}"
if not os.path.exists(dir):
os.makedirs(dir)
if os.path.exists(dst):
os.remove(dst)
log("copy:", src, "-->", dst)
shutil.copy(src, dst)
return dst
def chk(f):
log(f"looking for file:", f)
assert os.path.exists(f), f
return f
def load_yml_file(filename):
if not os.path.exists(filename):
raise Exception(f"not found: {filename}")
with open(filename) as f:
return load_yml(f.read())
def dump_yml(data, filename):
with open(filename, "w") as f:
yaml.safe_dump(data, f)
def load_yml(yml):
return munchify(yaml.safe_load(yml))
def dump_json(data, filename):
with open(filename, "w") as f:
f.write(json.dumps(data, indent=2, sort_keys=True))
def main():
#
parser = argparse.ArgumentParser(description="Browse benchmark results", prog="bm")
parser.add_argument("--debug", action="store_true", help="enable debug mode")
subparsers = parser.add_subparsers()
#
sp = subparsers.add_parser("create", help="create benchmark collection")
sp.set_defaults(func=BenchmarkCollection.create_new)
sp.add_argument("--debug", action="store_true", help="enable debug mode")
sp.add_argument("filename", type=str, help="the YAML file with the benchmark specs")
sp.add_argument("target", type=str, help="the directory to store the results")
#
sp = subparsers.add_parser("meta", help="get the required meta-information: cpu info, commit data")
sp.set_defaults(func=add_meta)
sp.add_argument("--debug", action="store_true", help="enable debug mode")
sp.add_argument("results", type=str, help="the directory with the results")
sp.add_argument("cmakecache", type=str, help="the path to the CMakeCache.txt file used to build the benchmark binaries")
sp.add_argument("build_type", type=str, help="the build type, eg Release Debug MinSizeRel RelWithDebInfo")
#
sp = subparsers.add_parser("add", help="add benchmark results")
sp.set_defaults(func=add_results)
sp.add_argument("--debug", action="store_true", help="enable debug mode")
sp.add_argument("results", type=str, help="the directory with the results")
sp.add_argument("target", type=str, help="the directory to store the results")
#
sp = subparsers.add_parser("serve", help="serve benchmark results")
sp.set_defaults(func=serve)
sp.add_argument("--debug", action="store_true", help="enable debug mode")
sp.add_argument("bmdir", type=os.path.abspath, default=os.getcwd(), help="the directory with the results. default=.")
sp.add_argument("-H", "--host", type=str, default="localhost", help="host. default=%(default)s")
sp.add_argument("-p", "--port", type=int, default=8000, help="port. default=%(default)s")
#
sp = subparsers.add_parser("export", help="export static html")
sp.set_defaults(func=freeze)
sp.add_argument("--debug", action="store_true", help="enable debug mode")
sp.add_argument("bmdir", type=os.path.abspath, default=os.getcwd(), help="the directory with the results. default=.")
#
sp = subparsers.add_parser("deps", help="install server dependencies")
sp.set_defaults(func=lambda _: download_deps())
sp.add_argument("--debug", action="store_true", help="enable debug mode")
#
args = parser.parse_args(sys.argv[1:] if len(sys.argv) > 1 else ["serve"])
if args.debug:
log(args)
args.func(args)
def get_manifest(args):
bmdir = os.path.abspath(args.bmdir)
manif_yml = os.path.join(bmdir, "manifest.yml")
manif_json = os.path.join(bmdir, "manifest.json")
manif = load_yml_file(manif_yml)
dump_json(manif, manif_json)
return manif
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
app = flask.Flask(__name__,
template_folder='template')
def _setup_app(args):
def _s(prop, val):
assert not hasattr(app, prop), prop
setattr(app, prop, val)
_s('args', args)
_s('manifest', get_manifest(args))
if args.debug:
app.config["DEBUG"] = True
def freeze(args):
"https://pythonhosted.org/Frozen-Flask/"
from flask_frozen import Freezer
_setup_app(args)
freezer = Freezer(app)
freezer.freeze(debug=args.debug)
def serve(args):
_setup_app(args)
app.run(host=args.host, port=args.port, debug=args.debug)
@app.route("/")
def home():
log("requested home")
return render_template("index.html")
@app.route("/<path>")
def other_(path):
path = escape(path)
d = app.args.bmdir
log("requested other path:", path, "---", os.path.join(d, path))
return send_from_directory(d, path)
@app.route("/static/<path>")
def static_(path):
path = escape(path)
d = os.path.join(app.args.bmdir, "static")
log("requested static path:", path, "---", os.path.join(d, path))
return send_from_directory(d, path, cache_timeout=1) # timeout in seconds
@app.route("/bm/<commit>/<run>/<resultjson>")
def bm_(commit, run, resultjson):
commit = escape(commit)
run = escape(run)
resultjson = escape(resultjson)
d = os.path.join(app.args.bmdir, "runs", commit, run)
log("requested result:", os.path.join(d, resultjson))
return send_from_directory(d, resultjson, cache_timeout=1) # timeout in seconds
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
def download_deps():
deps = [
"https://code.jquery.com/jquery-3.3.1.js",
"https://code.jquery.com/jquery-3.3.1.js",
"https://code.jquery.com/ui/1.12.1/jquery-ui.js",
"https://cdn.datatables.net/1.10.20/js/jquery.dataTables.js",
"https://cdn.datatables.net/1.10.20/js/jquery.dataTables.min.js",
"https://cdn.datatables.net/1.10.20/css/jquery.dataTables.css",
"https://cdn.datatables.net/1.10.20/css/jquery.dataTables.min.css",
"https://www.chartjs.org/dist/2.9.1/Chart.min.js",
#("https://cdnjs.cloudflare.com/ajax/libs/highlight.js/10.3.2/styles/github.css", "highlight.github.css"),
("https://cdnjs.cloudflare.com/ajax/libs/highlight.js/10.3.2/styles/github.min.css", "highlight.github.min.css"),
#"https://cdnjs.cloudflare.com/ajax/libs/highlight.js/10.3.2/highlight.js",
"https://cdnjs.cloudflare.com/ajax/libs/highlight.js/10.3.2/highlight.min.js",
]
for src in deps:
if type(src) == str:
base = os.path.basename(src)
else:
src, base = src
dst = f"{os.getcwd()}/static/{base}"
download_url(src, dst)
def download_url(url, dst):
log("download url:", url, "--->", dst)
req = requests.get(url, stream=True)
if req.status_code == 200:
sz = 0
with open(dst, 'wb') as f:
for chunk in req:
f.write(chunk)
sz += len(chunk)
log(f"........ finished: {sz}B")
else:
log(f" error:", req.status_code, url)
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
class BenchmarkCollection:
@staticmethod
def create_new(args):
dir = args.target
filename = os.path.join(dir, "bm.yml")
manifest = os.path.join(dir, "manifest.yml")
if not os.path.exists(dir):
os.makedirs(dir)
shutil.copyfile(args.filename, filename)
dump_yml(load_yml("""{runs: {}, bm: {}}"""), manifest)
return __class__(dir)
def __init__(self, dir):
if not os.path.exists(dir):
raise Exception(f"not found: {dir}")
self.dir = os.path.abspath(dir)
self.runs_dir = os.path.join(self.dir, "runs")
self.manifest = os.path.join(self.dir, "manifest.yml")
self.filename = os.path.join(self.dir, "bm.yml")
self.specs = munchify(load_yml_file(self.filename))
self.manif = munchify(load_yml_file(self.manifest))
def add(self, results_dir):
results_dir = os.path.abspath(results_dir)
dst_dir, meta = self._read_run(results_dir)
self._add_run(results_dir, dst_dir, meta)
dump_yml(self.manif, self.manifest)
def _read_run(self, results_dir):
log("adding run...")
id = f"{len(self.manif.runs.keys()):05d}"
log(f"adding run: id={id}")
meta = ResultMeta.load(results_dir)
dst_dir = os.path.join(self.runs_dir, meta.name)
return dst_dir, meta
def _add_run(self, results_dir, dst_dir, meta):
cats = self._add_meta_categories(meta)
for filename in ("meta.yml",
"CMakeCCompiler.cmake",
"CMakeCXXCompiler.cmake",
"CMakeSystem.cmake",
"compile_commands.json"):
filename = os.path.join(results_dir, filename)
if os.path.exists(filename):
copy_file_to_dir(filename, dst_dir)
else:
if not filename.endswith("compile_commands.json"):
raise Exception(f"wtf???? {filename}")
for name, specs in self.specs.bm.items():
if not hasattr(specs, 'variants'):
filename = chk(f"{results_dir}/{name}.json")
dst = copy_file_to_dir(filename, dst_dir)
self._add_bm_run(name, specs, meta)
else:
for t in specs.variants:
tname = f"{name}-{t}"
filename = chk(f"{results_dir}/{tname}.json")
dst = copy_file_to_dir(filename, dst_dir)
self._add_bm_run(tname, specs, meta)
def _add_bm_run(self, name, specs, meta):
if name not in self.manif.bm.keys():
self.manif.bm[name] = Munch(specs=specs, entries=[])
entry = self.manif.bm[name]
entry.specs = specs
if meta.name not in entry.entries:
entry.entries.append(meta.name)
def _add_meta_categories(self, meta):
run = Munch()
for catname in ('commit', 'cpu', 'system', 'build'):
meta_item = getattr(meta, catname)
self._add_item_to_category(meta.name, catname, meta_item)
run[catname] = meta_item.storage_id
# build specs are too verbose; remove them
self.manif.build[meta.build.storage_id].specs = Munch()
self.manif.runs[meta.name] = run
def _add_item_to_category(self, run, category_name, item):
if not hasattr(self.manif, category_name):
setattr(self.manif, category_name, Munch())
category = getattr(self.manif, category_name)
if item.storage_id not in category.keys():
category[item.storage_id] = Munch(specs=item, entries=[])
entry = category[item.storage_id]
entry.specs = item
if run not in entry.entries:
entry.entries.append(run)
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
class ResultMeta(Munch):
def __init__(self, results_dir, cmakecache, build_type):
super().__init__(self)
self.date = __class__.get_date()
self.commit = __class__.get_commit(results_dir)
self.cpu = __class__.get_cpu_info()
self.system = __class__.get_sys_info()
self.build = __class__.get_build_info(cmakecache, build_type)
self.name = self._get_name()
@staticmethod
def load(results_dir):
results_dir = os.path.join(os.path.abspath(results_dir), "meta.yml")
data = load_yml_file(results_dir)
return munchify(data)
def save(self, results_dir):
out = os.path.join(results_dir, "meta.yml")
log("saving meta:", out)
dump_yml(self, out)
self.build.save(results_dir)
@staticmethod
def get_date():
import datetime
now = datetime.datetime.now()
return now.strftime("%Y%m%d-%H%M%S")
def _get_name(self):
commit = self.commit.storage_name
cpu = self.cpu.storage_name
sys = self.system.storage_name
build = self.build.storage_name
name = f"{commit}/{cpu}-{sys}-{build}"
return name
@staticmethod
def get_commit(results_dir):
import git
repo = git.Repo(results_dir, search_parent_directories=True)
commit = repo.head.commit
commit = {p: str(getattr(commit, p))
for p in ('message', 'summary', 'name_rev',
'author',
'authored_datetime',
'committer',
'committed_datetime',)}
commit = Munch(commit)
commit.message = commit.message.strip()
commit.sha1 = commit.name_rev[:7]
spl = commit.authored_datetime.split(" ")
date = re.sub(r'-', '', spl[0])
time = re.sub(r'(\d+):(\d+):(\d+).*', r'\1\2\3', spl[1])
commit.storage_id = commit.sha1
commit.storage_name = f"git{date}_{time}-{commit.sha1}"
return commit
@staticmethod
def get_cpu_info():
import cpuinfo
nfo = cpuinfo.get_cpu_info()
nfo = Munch(nfo)
for a in ('cpu_version', 'cpu_version_string', 'python_version'):
if hasattr(nfo, a):
delattr(nfo, a)
for a in ('arch_string_raw', 'brand_raw', 'hardware_raw', 'vendor_id_raw'):
if not hasattr(nfo, a):
setattr(nfo, a, '')
nfo.storage_id = myhash(
nfo.arch_string_raw, nfo.brand_raw, nfo.hardware_raw, nfo.vendor_id_raw,
nfo.arch, nfo.bits, nfo.count, nfo.family, nfo.model, nfo.stepping,
",".join(nfo.flags), nfo.hz_advertised_friendly,
nfo.l2_cache_associativity,
nfo.l2_cache_line_size,
nfo.l2_cache_size,
nfo.l3_cache_size,
*optionals('l1_data_cache_size', 'l1_instruction_cache_size')
)
nfo.storage_name = f"{nfo.arch.lower()}_{nfo.storage_id}"
return nfo
@staticmethod
def get_sys_info():
import platform
uname = platform.uname()
nfo = Munch(
sys_platform=sys.platform,
sys=platform.system(),
uname=Munch(
machine=uname.machine,
node=uname.node,
release=uname.release,
system=uname.system,
version=uname.version,
)
)
nfo.storage_id = myhash(
nfo.sys_platform,
nfo.uname.machine,
)
nfo.storage_name = f"{nfo.sys_platform}_{nfo.storage_id}"
return nfo
@staticmethod
def get_build_info(cmakecache_txt, buildtype):
nfo = CMakeCache(cmakecache_txt)
def _btflags(name):
return (getattr(nfo, name), getattr(nfo, f"{name}_{buildtype.upper()}"))
nfo.storage_id = myhash(
buildtype,
nfo.CMAKE_CXX_COMPILER_ID,
nfo.CMAKE_CXX_COMPILER_VERSION,
nfo.CMAKE_CXX_COMPILER_VERSION_INTERNAL,
nfo.CMAKE_CXX_COMPILER_ABI,
nfo.CMAKE_CXX_SIZEOF_DATA_PTR,
nfo.CMAKE_C_COMPILER_ID,
nfo.CMAKE_C_COMPILER_VERSION,
nfo.CMAKE_C_COMPILER_VERSION_INTERNAL,
nfo.CMAKE_C_COMPILER_ABI,
nfo.CMAKE_C_SIZEOF_DATA_PTR,
*_btflags("CMAKE_CXX_FLAGS"),
*_btflags("CMAKE_C_FLAGS"),
*_btflags("CMAKE_STATIC_LINKER_FLAGS"),
*_btflags("CMAKE_SHARED_LINKER_FLAGS"),
)
#
ccname = nfo.CMAKE_CXX_COMPILER_ID.lower()
if ccname == "gnu":
ccname = "gcc"
ccname += nfo.CMAKE_CXX_COMPILER_VERSION.lower()
#
if nfo.CMAKE_C_SIZEOF_DATA_PTR == "4":
bits = "32bit"
elif nfo.CMAKE_C_SIZEOF_DATA_PTR == "8":
bits = "64bit"
else:
raise Exception("unknown architecture")
#
nfo.storage_name = f"{bits}_{buildtype}_{ccname}_{nfo.storage_id}"
return nfo
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
class CMakeCache(Munch):
def __init__(self, cmakecache_txt):
import glob
for line in iter_cmake_lines(cmakecache_txt):
spl = line.split("=")
if len(spl) < 2:
continue
k, ty = spl[0].split(":")
v = "=".join(spl[1:]).strip()
setattr(self, k, v)
bdir = os.path.dirname(os.path.abspath(cmakecache_txt))
self._c_compiler_file = sorted(glob.glob(f"{bdir}/CMakeFiles/*/CMakeCCompiler.cmake"))[-1] # get the last
self._cxx_compiler_file = sorted(glob.glob(f"{bdir}/CMakeFiles/*/CMakeCXXCompiler.cmake"))[-1] # get the last
self._system_file = sorted(glob.glob(f"{bdir}/CMakeFiles/*/CMakeSystem.cmake"))[-1] # get the last
self._load_cmake_file(self._c_compiler_file)
self._load_cmake_file(self._cxx_compiler_file)
ccomfile = f"{bdir}/compile_commands.json"
self._compile_commands_file = ccomfile if os.path.exists(ccomfile) else None
def _load_cmake_file(self, filename):
for line in iter_cmake_lines(filename):
if not line.startswith("set("):
continue
k = re.sub(r"set\((.*)\ +(.*)\)", r"\1", line)
v = re.sub(r"set\((.*)\ +(.*)\)", r"\2", line)
v = v.strip('"').strip("'").strip()
setattr(self, k, v)
def save(self, results_dir):
copy_file_to_dir(self._c_compiler_file, results_dir)
copy_file_to_dir(self._cxx_compiler_file, results_dir)
copy_file_to_dir(self._system_file, results_dir)
if self._compile_commands_file is not None:
copy_file_to_dir(self._compile_commands_file, results_dir)
def iter_cmake_lines(filename):
with open(filename) as f:
for line in f.readlines():
line = line.strip()
if line.startswith("#") or line.startswith("//") or len(line) == 0:
continue
yield line
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
def add_results(args):
log("adding results:", args.results)
col = BenchmarkCollection(args.target)
col.add(args.results)
def add_meta(args):
log("adding bm run metadata to results dir:", args.results)
meta = ResultMeta(results_dir=args.results,
cmakecache=args.cmakecache,
build_type=args.build_type)
meta.save(args.results)
log("adding bm run metadata to results dir: success!")
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,10 @@
munch
pyyaml
py-cpuinfo
psutil
gitpython
flask
markupsafe
Frozen-Flask
requests
mmh3

View File

@@ -0,0 +1,45 @@
<!DOCTYPE HTML>
<html>
<head>
<meta charset="UTF-8">
<link rel="shortcut icon" href="#">
<link rel="stylesheet" type="text/css" href="/static/jquery-ui.min.css"/>
<link rel="stylesheet" type="text/css" href="/static/jquery.dataTables.min.css"/>
<link rel="stylesheet" type="text/css" href="/static/highlight.github.min.css"/>
<style>
body {
font-family: "Trebuchet MS", sans-serif;
margin: 50px;
}
.chart {
height: 700px; max-width: 920px; margin: 0px auto;
}
</style>
</head>
<body>
<h1 id="heading-title">Title</h1>
<div>
Available benchmarks:
<ul id="toc"></ul>
</div>
<div id="bm-results"></div>
<div><pre id="dbg"></pre></div>
<!-- scripts -->
<script type="text/javascript" src="/static/jquery-3.3.1.min.js"></script>
<script type="text/javascript" src="/static/jquery-ui.js"></script>
<script type="text/javascript" src="/static/jquery.canvasjs.min.js"></script>
<script type="text/javascript" src="/static/Chart.min.js"></script>
<script type="text/javascript" src="/static/jquery.dataTables.min.js"></script>
<script type="text/javascript" src="/static/highlight.min.js"></script>
<script type="text/javascript" src="/bm.js"></script>
<script type="text/javascript">
$(document).ready(function() {
$.getJSON('/manifest.json', function(specs){
loadSpecs(specs);
})
});
</script>
</body>
</html>

View File

@@ -0,0 +1,105 @@
if(NOT _c4CatSourcesIncluded)
set(_c4CatSourcesIncluded ON)
#------------------------------------------------------------------------------
# concatenate the source files to an output file, adding preprocessor adjustment
# for correct file/line reporting
function(c4_cat_sources files output umbrella_target)
_c4_cat_sources_create_cat(cat)
c4_to_full_path("${files}" full_files) # we must work with full paths
c4_separate_list("${full_files}" sepfiles) # and use a string instead of a list
c4_dbg("${_c4_prefix}: catting sources to ${output}")
if(NOT EXISTS "${output}")
# the cat command is executed at build time, but we need the output
# file to exist to be able to create the target. so to bootstrap, just
# run the command now
c4_dbg("${_c4_prefix}: creating ${output} for the first time")
execute_process(
COMMAND ${cat} "${sepfiles}" "${output}"
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}"
)
else()
c4_dbg("output exists: ${output}")
endif()
# add a custom command invoking our cat script for the input files
add_custom_command(OUTPUT ${output}
COMMAND ${cat} "${sepfiles}" "${output}"
DEPENDS ${files}
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}"
COMMENT "concatenating sources to ${output}")
if(NOT TARGET ${umbrella_target})
add_custom_target(${umbrella_target} DEPENDS ${output} ${files})
endif()
endfunction(c4_cat_sources)
#------------------------------------------------------------------------------
# get a cat script
function(_c4_cat_sources_create_cat catfile)
# create a script to concatenate the sources
if(WIN32)
set(cat ${CMAKE_BINARY_DIR}/_c4catfiles.bat)
set(cattmp ${CMAKE_BINARY_DIR}/${CMAKE_FILES_DIRECTORY}/_c4catfiles.bat)
else()
set(cat ${CMAKE_BINARY_DIR}/_c4catfiles.sh)
set(cattmp ${CMAKE_BINARY_DIR}/${CMAKE_FILES_DIRECTORY}/_c4catfiles.sh)
endif()
set(${catfile} ${cat} PARENT_SCOPE)
if(NOT EXISTS ${cat})
if(WIN32)
file(WRITE ${cattmp} "
setlocal EnableDelayedExpansion
set \"src_files=%1\"
set \"out_file=%2\"
echo.>\"out_file%\"
for %%f in (%src_files%) do (
echo.>>\"%out_file%\"
echo.>>\"%out_file%\"
echo \"/*BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB*/\".>>\"%out_file%\"
echo \"/*BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB*/\".>>\"%out_file%\"
echo \"/*BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB*/\".>>\"%out_file%\"
echo \"#line 1 \\\"%%f\\\" // reset __LINE__ and __FILE__ to the correct value\".>>\"%out_file%\"
type %%f>>\"%out_file%\"
)
")
else()
file(WRITE ${cattmp} "#!/bin/sh
src_files=$1
out_file=$2
#echo \"src_files $src_files\"
#echo \"out_file $out_file\"
cat > $out_file << EOF
// DO NOT EDIT.
// this is an auto-generated file, and will be overwritten
EOF
for f in $src_files ; do
cat >> $out_file <<EOF
/*BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB*/
/*BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB*/
/*BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB*/
#line 1 \"$f\"
EOF
cat $f >> $out_file
done
echo \"Wrote output to $out_file\"
")
endif()
# add execute permissions
get_filename_component(catdir ${cat} DIRECTORY)
file(COPY ${cattmp} DESTINATION ${catdir}
FILE_PERMISSIONS
OWNER_READ OWNER_WRITE OWNER_EXECUTE
GROUP_READ GROUP_EXECUTE
WORLD_READ WORLD_EXECUTE
)
endif()
endfunction()
endif(NOT _c4CatSourcesIncluded)

View File

@@ -0,0 +1,121 @@
# (C) 2019 Joao Paulo Magalhaes <dev@jpmag.me>
if(NOT _c4_doxygen_included)
set(_c4_doxygen_included ON)
#------------------------------------------------------------------------------
# TODO use customizations from https://cmake.org/cmake/help/v3.9/module/FindDoxygen.html
function(c4_setup_doxygen umbrella_option)
cmake_dependent_option(${_c4_uprefix}BUILD_DOCS "Enable targets to build documentation for ${prefix}" ON "${umbrella_option}" OFF)
if(${_c4_uprefix}BUILD_DOCS)
find_package(Doxygen QUIET)
if(DOXYGEN_FOUND)
c4_log("enabling documentation targets")
else()
c4_dbg("doxygen not found")
endif()
endif()
endfunction()
#------------------------------------------------------------------------------
function(c4_add_doxygen doc_name)
if(NOT ${_c4_uprefix}BUILD_DOCS)
return()
endif()
#
set(opt0
)
set(opt1
DOXYFILE DOXYFILE_IN
PROJ
PROJ_BRIEF
VERSION
OUTPUT_DIR
CLANG_DATABASE_PATH
)
set(optN
INPUT
FILE_PATTERNS
EXCLUDE
EXCLUDE_PATTERNS
EXCLUDE_SYMBOLS
STRIP_FROM_PATH
STRIP_FROM_INC_PATH
EXAMPLE_PATH
)
cmake_parse_arguments("" "${opt0}" "${opt1}" "${optN}" ${ARGN})
#
if(NOT _PROJ)
set(_PROJ ${_c4_ucprefix})
endif()
if(NOT _DOXYFILE AND NOT _DOXYFILE_IN)
set(_DOXYFILE_IN ${CMAKE_CURRENT_LIST_DIR}/Doxyfile.in)
endif()
if(NOT _OUTPUT_DIR)
if("${doc_name}" MATCHES "^[Dd]oc")
set(_OUTPUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/${doc_name})
else()
set(_OUTPUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/doc/${doc_name})
endif()
endif()
#
_c4_doxy_fwd_to_cmd(_PROJ OFF)
_c4_doxy_fwd_to_cmd(_PROJ_BRIEF OFF)
_c4_doxy_fwd_to_cmd(_VERSION OFF)
_c4_doxy_fwd_to_cmd(_OUTPUT_DIR OFF)
_c4_doxy_fwd_to_cmd(_CLANG_DATABASE_PATH OFF)
_c4_doxy_fwd_to_cmd(_INPUT ON)
_c4_doxy_fwd_to_cmd(_FILE_PATTERNS ON)
_c4_doxy_fwd_to_cmd(_EXCLUDE ON)
_c4_doxy_fwd_to_cmd(_EXCLUDE_PATTERNS ON)
_c4_doxy_fwd_to_cmd(_EXCLUDE_SYMBOLS ON)
_c4_doxy_fwd_to_cmd(_STRIP_FROM_PATH ON)
_c4_doxy_fwd_to_cmd(_STRIP_FROM_INC_PATH ON)
_c4_doxy_fwd_to_cmd(_EXAMPLE_PATH ON)
#
if("${doc_name}" MATCHES "^[Dd]oc")
set(tgt ${_c4_lcprefix}-${doc_name})
else()
set(tgt ${_c4_lcprefix}-doc-${doc_name})
endif()
#
if(_DOXYFILE)
set(doxyfile_out ${_DOXYFILE})
elseif(_DOXYFILE_IN)
set(doxyfile_out ${_OUTPUT_DIR}/Doxyfile)
set(config_script ${_c4_project_dir}/c4DoxygenConfig.cmake)
add_custom_command(OUTPUT ${doxyfile_out}
COMMAND ${CMAKE_COMMAND} -E remove -f ${doxyfile_out}
COMMAND ${CMAKE_COMMAND} -DDOXYFILE_IN=${_DOXYFILE_IN} -DDOXYFILE_OUT=${doxyfile_out} ${defs} '-DALLVARS=${allvars}' '-DLISTVARS=${listvars}' -P ${config_script}
DEPENDS ${_DOXYFILE_IN} ${config_script}
COMMENT "${tgt}: generating ${doxyfile_out}"
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR})
endif()
#
add_custom_target(${tgt}
COMMAND ${DOXYGEN_EXECUTABLE} ${doxyfile_out}
DEPENDS ${doxyfile_out}
WORKING_DIRECTORY ${_OUTPUT_DIR}
COMMENT "${tgt}: docs will be placed in ${_OUTPUT_DIR}"
VERBATIM)
_c4_set_target_folder(${tgt} doc)
endfunction()
macro(_c4_doxy_fwd_to_cmd varname is_list)
if(NOT ("${${varname}}" STREQUAL ""))
if("${defs}" STREQUAL "")
set(li "-D${varname}=${${varname}}")
else()
set(li ${defs})
list(APPEND li "-D${varname}='${${varname}}'")
endif()
set(defs ${li})
endif()
set(allvars "${allvars};${varname}")
if(${is_list})
set(listvars "${listvars};${varname}")
endif()
endmacro()
endif(NOT _c4_doxygen_included)

Some files were not shown because too many files have changed in this diff Show More