Sperg

AuthorAlphazone
Submission date2018-07-17 03:57:38.049930
Rating5172
Matches played291
Win rate49.83

Use rpsrunner.py to play unranked matches on your computer.

Source code:

HARDCODED = True
K = 8
nvm = 1
lbegin = 0
lend = 24
rbegin = lend
rend = rbegin + lend - lbegin
mbegin = rend
mend = mbegin + 2
naddr = 2 ** (K + 1) - 1
nops = mend
nterm = 11 #keep not a factor of nops
    
global match_score
if input == "":
    import random
    import uuid
    import pickle
    
    last_output = "S"
    match_score = 0
    
    if not 'vms' in locals() or len(locals()['vms']) < nvm:
        if HARDCODED:
            vms = [
            b'(dp0\nVmatch_score\np1\nL22L\nsVfitness\np2\nL48263799L\nsVid\np3\nccopy_reg\n_reconstructor\np4\n(cuuid\nUUID\np5\nc__builtin__\nobject\np6\nNtp7\nRp8\n(dp9\nVint\np10\nL192011604822237475992320820387428139038L\nsbsVd\np11\n(lp12\n(dp13\nVhistory\np14\n(lp15\nVS\np16\nasa(dp17\na(dp18\na(dp19\na(dp20\na(dp21\na(dp22\na(dp23\na(dp24\na(dp25\na(dp26\na(dp27\na(dp28\na(dp29\na(dp30\na(dp31\na(dp32\na(dp33\na(dp34\na(dp35\na(dp36\na(dp37\na(dp38\na(dp39\na(dp40\na(dp41\na(dp42\na(dp43\na(dp44\na(dp45\na(dp46\na(dp47\na(dp48\na(dp49\na(dp50\na(dp51\na(dp52\na(dp53\na(dp54\na(dp55\na(dp56\na(dp57\na(dp58\na(dp59\na(dp60\na(dp61\na(dp62\na(dp63\na(dp64\na(dp65\na(dp66\na(dp67\na(dp68\na(dp69\na(dp70\na(dp71\na(dp72\na(dp73\na(dp74\na(dp75\na(dp76\na(dp77\na(dp78\na(dp79\na(dp80\na(dp81\na(dp82\na(dp83\na(dp84\na(dp85\na(dp86\na(dp87\na(dp88\na(dp89\na(dp90\na(dp91\na(dp92\na(dp93\na(dp94\na(dp95\na(dp96\na(dp97\na(dp98\na(dp99\na(dp100\na(dp101\na(dp102\na(dp103\na(dp104\na(dp105\na(dp106\na(dp107\na(dp108\na(dp109\na(dp110\na(dp111\na(dp112\na(dp113\na(dp114\na(dp115\na(dp116\na(dp117\na(dp118\na(dp119\na(dp120\na(dp121\na(dp122\na(dp123\na(dp124\na(dp125\na(dp126\na(dp127\na(dp128\na(dp129\na(dp130\na(dp131\na(dp132\na(dp133\na(dp134\na(dp135\na(dp136\na(dp137\na(dp138\na(dp139\na(dp140\na(dp141\na(dp142\na(dp143\na(dp144\na(dp145\na(dp146\na(dp147\na(dp148\na(dp149\na(dp150\na(dp151\na(dp152\na(dp153\na(dp154\na(dp155\na(dp156\na(dp157\na(dp158\na(dp159\na(dp160\na(dp161\na(dp162\na(dp163\na(dp164\na(dp165\na(dp166\na(dp167\na(dp168\na(dp169\na(dp170\na(dp171\na(dp172\na(dp173\na(dp174\na(dp175\na(dp176\na(dp177\na(dp178\na(dp179\na(dp180\na(dp181\na(dp182\na(dp183\na(dp184\na(dp185\na(dp186\na(dp187\na(dp188\na(dp189\na(dp190\na(dp191\na(dp192\na(dp193\na(dp194\na(dp195\na(dp196\na(dp197\na(dp198\na(dp199\na(dp200\na(dp201\na(dp202\na(dp203\na(dp204\na(dp205\na(dp206\na(dp207\na(dp208\na(dp209\na(dp210\na(dp211\na(dp212\na(dp213\na(dp214\na(dp215\na(dp216\na(dp217\na(dp218\na(dp219\na(dp220\na(dp221\na(dp222\na(dp223\na(dp224\na(dp225\na(dp226\na(dp227\na(dp228\na(dp229\na(dp230\na(dp231\na(dp232\na(dp233\na(dp234\na(dp235\na(dp236\na(dp237\na(dp238\na(dp239\na(dp240\na(dp241\na(dp242\na(dp243\na(dp244\na(dp245\na(dp246\na(dp247\na(dp248\na(dp249\na(dp250\na(dp251\na(dp252\na(dp253\na(dp254\na(dp255\na(dp256\na(dp257\na(dp258\na(dp259\na(dp260\na(dp261\na(dp262\na(dp263\na(dp264\na(dp265\na(dp266\na(dp267\na(dp268\na(dp269\na(dp270\na(dp271\na(dp272\nVP\np273\nF0.0\nsg16\nF0.0\nsVR\np274\nF0.9999999999999999\nsa(dp275\na(dp276\na(dp277\na(dp278\na(dp279\na(dp280\na(dp281\na(dp282\na(dp283\na(dp284\na(dp285\na(dp286\na(dp287\na(dp288\na(dp289\na(dp290\na(dp291\na(dp292\na(dp293\na(dp294\na(dp295\na(dp296\na(dp297\na(dp298\na(dp299\na(dp300\na(dp301\na(dp302\na(dp303\na(dp304\na(dp305\na(dp306\na(dp307\na(dp308\na(dp309\na(dp310\na(dp311\na(dp312\na(dp313\na(dp314\na(dp315\na(dp316\na(dp317\na(dp318\na(dp319\na(dp320\na(dp321\na(dp322\na(dp323\na(dp324\na(dp325\na(dp326\na(dp327\na(dp328\na(dp329\na(dp330\na(dp331\na(dp332\na(dp333\na(dp334\na(dp335\na(dp336\na(dp337\na(dp338\na(dp339\ng14\n(lp340\ng273\nasa(dp341\na(dp342\ng14\n(lp343\ng274\nasa(dp344\na(dp345\na(dp346\na(dp347\na(dp348\na(dp349\na(dp350\na(dp351\na(dp352\ng273\nF0.3445646120768967\nsg16\nF0.4236165723050094\nsg274\nF0.23181881561809392\nsa(dp353\na(dp354\na(dp355\na(dp356\na(dp357\na(dp358\na(dp359\na(dp360\na(dp361\na(dp362\na(dp363\na(dp364\na(dp365\na(dp366\na(dp367\na(dp368\na(dp369\na(dp370\na(dp371\na(dp372\na(dp373\na(dp374\na(dp375\na(dp376\na(dp377\na(dp378\na(dp379\na(dp380\na(dp381\na(dp382\na(dp383\na(dp384\na(dp385\na(dp386\na(dp387\na(dp388\na(dp389\na(dp390\na(dp391\na(dp392\na(dp393\na(dp394\na(dp395\na(dp396\na(dp397\na(dp398\na(dp399\na(dp400\na(dp401\na(dp402\na(dp403\na(dp404\na(dp405\na(dp406\na(dp407\na(dp408\na(dp409\na(dp410\na(dp411\na(dp412\na(dp413\na(dp414\na(dp415\na(dp416\na(dp417\na(dp418\na(dp419\na(dp420\na(dp421\na(dp422\na(dp423\na(dp424\na(dp425\na(dp426\na(dp427\na(dp428\na(dp429\na(dp430\na(dp431\na(dp432\na(dp433\na(dp434\na(dp435\na(dp436\na(dp437\na(dp438\na(dp439\na(dp440\na(dp441\na(dp442\na(dp443\na(dp444\na(dp445\na(dp446\na(dp447\na(dp448\na(dp449\na(dp450\na(dp451\na(dp452\na(dp453\na(dp454\na(dp455\na(dp456\na(dp457\na(dp458\na(dp459\na(dp460\na(dp461\na(dp462\na(dp463\na(dp464\na(dp465\na(dp466\na(dp467\na(dp468\na(dp469\na(dp470\na(dp471\na(dp472\na(dp473\na(dp474\na(dp475\na(dp476\na(dp477\na(dp478\na(dp479\na(dp480\na(dp481\na(dp482\na(dp483\na(dp484\na(dp485\na(dp486\na(dp487\na(dp488\na(dp489\na(dp490\na(dp491\na(dp492\na(dp493\na(dp494\na(dp495\na(dp496\na(dp497\na(dp498\na(dp499\na(dp500\na(dp501\na(dp502\na(dp503\na(dp504\na(dp505\na(dp506\na(dp507\na(dp508\na(dp509\na(dp510\na(dp511\na(dp512\na(dp513\na(dp514\na(dp515\na(dp516\na(dp517\na(dp518\na(dp519\na(dp520\na(dp521\na(dp522\na(dp523\na(dp524\na(dp525\na(dp526\na(dp527\na(dp528\na(dp529\na(dp530\nasVscore\np531\nL22L\nsVmove\np532\ng273\nsVpc\np533\nL0L\nsVrounds\np534\nL764235L\nsVdp\np535\nL0L\nsVmatch_rounds\np536\nL1000L\nsVp\np537\n(lp538\nL3351889L\naL2437167L\naL984628L\naL941548L\naL4838874L\naL7314254L\naL7682447L\naL2718485L\naL2214418L\naL2870025L\naL5082098L\naL2912046L\naL3653377L\naL7740884L\naL1262051L\naL2089916L\naL2434717L\naL5625726L\naL2723120L\naL4370640L\naL6326443L\naL5525330L\naL3141868L\naL5566129L\naL1835575L\naL4156400L\naL436873L\naL6940382L\naL6438984L\naL2719845L\naL4054884L\naL1560824L\naL5197522L\naL6329780L\naL6694481L\naL8015834L\naL6062831L\naL5318539L\naL7319492L\naL5393526L\naL3649717L\naL4421939L\naL6466765L\naL7529837L\naL6559618L\naL7692312L\naL7236008L\naL5209543L\naL7342712L\naL5698254L\naL7421736L\naL50537L\naL3775095L\naL4701967L\naL2403535L\naL5515630L\naL1258933L\naL6647982L\naL3025534L\naL7904730L\naL2078339L\naL2645882L\naL5135954L\naL5469159L\naL6211877L\naL3160313L\naL6378779L\naL6832418L\naL2358007L\naL964071L\naL2314381L\naL6241409L\naL3328856L\naL2848815L\naL3701028L\naL4217067L\naL1319795L\naL2415377L\naL3169047L\naL2008333L\naL8125770L\naL5712539L\naL5568076L\naL911626L\naL7849459L\naL7730231L\naL3957399L\naL4892676L\naL7235860L\naL5035823L\naL2874573L\naL6734254L\naL8155023L\naL771762L\naL4139652L\naL3372900L\naL2542818L\naL727740L\naL4465557L\naL439986L\naL2680274L\naL1944093L\naL5124885L\naL3262497L\naL6830387L\naL4303097L\naL129319L\naL5375893L\naL7579504L\naL4348381L\naL6574517L\naL7711224L\naL1567845L\naL7110963L\naL6000324L\naL4345207L\naL55820L\naL6073815L\naL6698559L\naL1263188L\naL2643676L\naL3136171L\naL3757768L\naL2488572L\naL7111725L\naL1821397L\naL3427618L\naL28442L\naL3090268L\naL4918175L\naL771883L\naL4333109L\naL6198608L\naL1562652L\naL2338443L\naL103238L\naL100483L\naL1190762L\naL1809670L\naL3284230L\naL6319127L\naL3545333L\naL6083920L\naL5451666L\naL7177507L\naL7170605L\naL1153426L\naL1312238L\naL7396532L\naL4030137L\naL7022411L\naL942854L\naL5778094L\naL5359426L\naL3187348L\naL5595546L\naL4451441L\naL3725568L\naL4624395L\naL6090657L\naL7492709L\naL3598616L\naL4778903L\naL6507887L\naL4652337L\naL502919L\naL3225151L\naL8046914L\naL980976L\naL7426609L\naL2348027L\naL3009474L\naL4032143L\naL3313615L\naL5279444L\naL4163094L\naL3869375L\naL7156758L\naL7456637L\naL3497555L\naL6554003L\naL7497791L\naL2894194L\naL8309577L\naL5107453L\naL1252920L\naL1843801L\naL4378006L\naL5220686L\naL3361540L\naL2897296L\naL4746065L\naL4885504L\naL5346359L\naL7231988L\naL3588071L\naL1537571L\naL2204478L\naL8103557L\naL12847L\naL7727627L\naL2766857L\naL2792198L\naL7838353L\naL135229L\naL6568782L\naL3767103L\naL5137765L\naL1301688L\naL5252916L\naL7897015L\naL3766096L\naL2731890L\naL3185916L\naL3634624L\naL2502515L\naL8360754L\naL2564986L\naL6175303L\naL5589953L\naL7018194L\naL65635L\naL7660295L\naL640159L\naL6561508L\naL7835589L\naL6222283L\naL7839055L\naL7191151L\naL6296627L\naL4482757L\naL2924608L\naL7679250L\naL791193L\naL6230772L\naL5541476L\naL6633526L\naL1001116L\naL4791719L\naL40934L\naL3102031L\naL7845474L\naL6886640L\naL2698866L\naL187816L\naL5967751L\naL723893L\naL7203898L\naL5702268L\naL6213525L\naL531346L\naL7224548L\naL5494506L\naL4895273L\naL6580249L\naL7423292L\naL2682023L\naL4044326L\naL1398665L\naL2863750L\naL417260L\naL7813709L\naL3907785L\naL4174799L\naL2340740L\naL3459782L\naL8266142L\naL2543026L\naL4046969L\naL60365L\naL4593326L\naL1295130L\naL8153003L\naL3240404L\naL1126935L\naL693249L\naL536355L\naL6185191L\naL3540675L\naL729387L\naL3408347L\naL1722298L\naL5673581L\naL7420196L\naL6771453L\naL8369788L\naL3227190L\naL7280202L\naL3719668L\naL4285512L\naL5820522L\naL8298782L\naL5515905L\naL4426122L\naL6097255L\naL5420312L\naL7394495L\naL7473495L\naL149855L\naL5516162L\naL5601705L\naL711625L\naL2713749L\naL7410446L\naL2606451L\naL6779052L\naL7935524L\naL8080257L\naL4560204L\naL7637619L\naL7495282L\naL3518397L\naL6598704L\naL3289454L\naL6236783L\naL6122171L\naL3323072L\naL2519057L\naL8154151L\naL356240L\naL2545062L\naL216510L\naL2815603L\naL1731381L\naL2085842L\naL4103561L\naL5245579L\naL3739250L\naL4161072L\naL1348003L\naL285248L\naL3740603L\naL6264319L\naL7686206L\naL7609349L\naL4873932L\naL5323470L\naL1620326L\naL225794L\naL5885750L\naL6932721L\naL5803422L\naL7416334L\naL8292364L\naL1690372L\naL1804033L\naL5334049L\naL1449593L\naL2802923L\naL8280085L\naL1497210L\naL3844209L\naL4864224L\naL6073002L\naL1756824L\naL7917837L\naL7946133L\naL617865L\naL3612130L\naL732716L\naL4993917L\naL5668134L\naL4071407L\naL3573035L\naL4674042L\naL5608180L\naL7988219L\naL5503650L\naL1323035L\naL6565680L\naL2804535L\naL3669130L\naL1053564L\naL4761043L\naL5920864L\naL7736053L\naL7557076L\naL6270320L\naL2993643L\naL1291879L\naL5195538L\naL2290638L\naL8004252L\naL4850254L\naL5060079L\naL6913807L\naL2935326L\naL7746353L\naL4601389L\naL152416L\naL4041596L\naL7319705L\naL7775819L\naL938804L\naL2062287L\naL1757703L\naL8275849L\naL8091641L\naL3793935L\naL7613907L\naL5090953L\naL7470906L\naL601761L\naL5949877L\naL1794666L\naL4880624L\naL5152257L\naL6889407L\naL7896605L\naL3647944L\naL7532272L\naL3664888L\naL3023739L\naL1677740L\naL1470770L\naL8179066L\naL8069120L\naL5309609L\naL6316493L\naL1412502L\naL5302774L\naL8341304L\naL4440189L\naL7261684L\naL8176141L\naL2327678L\naL5507378L\naL8363465L\naL3195142L\naL2293739L\naL7255137L\naL1001487L\naL4586606L\naL986516L\naL3625948L\naL888367L\naL4302741L\naL7896421L\naL7224393L\naL3229974L\naL4387570L\naL5759412L\naL2182272L\naL7111811L\naL2025501L\naL2136098L\naL140178L\naL2418635L\naL2330574L\naL3788363L\naL41015L\naL3192294L\naL676840L\naL7862963L\naL543561L\naL8069429L\naL1835074L\naL268224L\naL7225331L\naL216884L\naL4857528L\naL3105491L\naL4912712L\naL2759507L\naL1206615L\naL4338L\naL8152616L\naL6648678L\naL409414L\naL8083907L\naL4522165L\naL4828506L\naL7640256L\naL6438580L\naL2853908L\naL4563252L\naL6210262L\naL4990253L\naL5388636L\naL2169959L\naL3389388L\naL5792292L\naL7193645L\naL4178974L\naL3383674L\naL7278696L\naL6968374L\naL1104043L\naL6550242L\naL2215858L\naL6712762L\naL6069455L\naL7504865L\naL3065935L\naL4704538L\naL423000L\naL5250721L\naL6907330L\naL6771811L\naL2661886L\naL7417978L\naL6418101L\naL830065L\naL1124509L\naL3995173L\naL398187L\naL301247L\naL5724370L\naL6428894L\naL5899993L\naL2256012L\nas.'
            ]
            vms = list(map(lambda code: pickle.loads(code), vms))
        else:
            def make(pspace):
                return {
                        "p": pspace,
                        "pc": 0,
                        "dp": 0,
                        "score": 0,
                        "fitness": 0,
                        "rounds": 0,
                        "move": "S",
                        "id": uuid.uuid4(),
                }
                
            if not 'vms' in locals():
                vms = []
            else:
                #fixup in case supplied vms are just pspace in tuple
                for i in range(len(vms)):
                    if not isinstance(vms, dict):
                        vms[i] = make(vms[i])
                        
            while len(vms) < nvm:
                vms.append(make([random.getrandbits(23) for _ in range(naddr)]))
    
    for vm in vms:
        vm["score"] = 0
        vm["match_rounds"] = 0
        vm["d"] = [{} for _ in range(naddr)]
        
    def randselect(P):
        r = random.random()
        for i in range(len(P)):
            p = P[i]
            if r < p:
                return i
            r -= p
        return len(P) - 1
    
    def weightedselect(weights):
        S = sum(weights)
        P = [x / S for x in weights]
        return randselect(P)
    
    permutations = [
        { "R": "R", "P": "P", "S": "S" },
        { "R": "R", "P": "S", "S": "P" },
        { "R": "S", "P": "R", "S": "P" },
        { "R": "S", "P": "P", "S": "R" },
        { "R": "P", "P": "S", "S": "R" },
        { "R": "P", "P": "R", "S": "S" },
    ]
    
    rps = "RPS"
    derps = { "R": 0, "P": 1, "S": 2 }
    
    def fromhistory(window, data, f, before_full=None, obs=None):
        if not "history" in data:
            H = []
            data["history"] = H
            move = random.choice("RPS")
        else:
            H = data["history"]
            if len(H) < window:
                if before_full is None:
                    return random.choice("RPS")
                else:
                   return before_full(H)
            else:
                move = f(H)
                del H[0]
        
        if obs is None:
            H.append(move)
        else:
           H.append(obs)
        return move
    
    def randbias(window, data):
        def f(H):
            E = 2 * window / 3
            weights = [max(0, E - H.count(s)) for s in "RPS"]
            return rps[weightedselect(weights)]
        return fromhistory(window, data, f)
    
    def max_of_rps(scores):
        m = max(scores)
        weights = [1 if c == m else 0 for c in scores]
        return rps[weightedselect(weights)]
    
    def naive_predict(H):
        counts = [H.count(s) for s in "RPS"]
        return max_of_rps(counts)
    
    def naive_window(window, data, x):
        return fromhistory(window, data, naive_predict, naive_predict, obs=x)
    
    def dropswitch_window(window, p, data, x):
        def predict(H):
            if "pred" in data and data["pred"] != x and random.random() < p:
                del H[:]
                del data["pred"]
                return random.choice("RPS")
            
            pred = naive_predict(H)
            data["pred"] = pred
            return pred
        return fromhistory(window, data, predict, obs=x)
    
    def decay_scoring(gamma, data, x):
        if "R" in data:
            for s in "RPS":
                data[s] += gamma * ((1 if s == x else 0) - data[s])
        else:
            for s in "RPS":
                data[s] = 0
                
        return max_of_rps([data["R"], data["P"], data["S"]])
    
    def payoff(mymove, opponents):
        if mymove == "R":
            if opponents == "R":
                return 0
            elif opponents == "P":
                return -1
            else: #S
                return 1
        elif mymove == "P":
            if opponents == "R":
                return 1
            elif opponents == "P":
                return 0
            else: #S
                return -1
        elif mymove == "S":
            if opponents == "R":
                return -1
            elif opponents == "P":
                return 1
            else: #S
                return 0
    
    def terminal(op, data):
        if op == 1:
            if input != "":
                return input
        elif op == 2:
            return last_output
        elif op >= 3 and op < 8:
            return randbias(2 ** (op - 3 + 2), data)
        return random.choice("RPS")
    
    def select(op, L, R, data):
        def side(op, S):
            if op >= 1 and op < 6:
                return permutations[op][S]
            elif op >=  6 and op < 12:
                return naive_window(2 ** (op - 6 + 1), data, S)
            elif op >= 12 and op < 18:
                return dropswitch_window(2 ** (op - 12 + 1), 0.5, data, S)
            elif op >= 18 and op < 24:
                return decay_scoring(1 - 0.9 ** (op - 18 + 1), data, S)
        
        #L only
        if op >= lbegin + 1 and op < lend:
            return side(op, L)
            
        #R only
        elif op == rbegin:
            return R
        if op >= rbegin + 1 and op < rend:
            return side(op - rbegin, R)
        
        #mixed
        elif op == mbegin:
            return random.choice([L, R])
        elif op == mbegin + 1:
            return rps[(derps[L] + derps[R]) % 3]
        
        return L
    
    def size(k):
        return 2 ** (k + 1) - 1
    
    def step(vm, pc=0, k=K):
        pspace = vm["p"]
        op = pspace[pc]
        if k == 0:
            op %= nterm
            
            retval = terminal(op, vm["d"][pc])
            assert str(retval) in rps, str(op) + " term bad result"
            return retval
        else:
            op %= nops
            
            L = "S"
            R = "S"
            
            j = k - 1
            if not (op >= rbegin and op < rend): #if not in R only range
                L = step(vm, pc + 1, j)
            if not (op >= lbegin and op < lend): #if not in L only range
                R = step(vm, pc + 1 + size(j), j)
            
            retval = select(op, L, R, vm["d"][pc])
            assert str(retval) in rps, str(op) + " select bad result"
            return retval

    def run():
        global match_score
        if input != "":
            match_score += payoff(last_output, input)
        
        for vm in vms:
            if input != "":
                vm_payoff = payoff(vm["move"], input)
                vm["score"] += vm_payoff
                vm["fitness"] += vm_payoff
                vm["match_score"] = match_score
                vm["rounds"] += 1

            vm["move"] = step(vm)
        best = max(vms, key = lambda x: x["score"])
        best["match_rounds"] += 1
        return best["move"]

output = run()
last_output = output