Description
SpinReversalTransformComposite appears not to work properly when I send in a disconnected graph
To Reproduce
from dwave.system import DWaveSampler, FixedEmbeddingComposite
from dwave.preprocessing.composites import SpinReversalTransformComposite
import numpy as np
num_reads = 20
jay = {(0, 1): 0, (0, 2): -1, (1, 2): 1}
print('h=0','J=', jay)
print('The correlation matrix should be:\n'
'[[1, -1+eps, 1-eps]\n [-1+eps, 1, -1+eps]\n [1+eps, -1+eps, 1]]\n'
'for small eps.\n'
'This does not happen when using SpinReversalComposite')
print('Succeeds with 1 embedding of a length 2 chain, succeeds with 2 parallel embeddings of a length 2 chain, fails when introducing the SpinReversalTransformComposite (later case)')
num_var = len(embeddings[0])
number_of_embeddings_to_use = 2 # len(embeddings)
for k in range(number_of_embeddings_to_use):
for j in range(num_var): # up to 0..1037
embedding_to_use[num_vark+j] = [embeddings[k][j]]
big_h_new[num_vark+j] = 0
for k, v in jay.items():
big_j_new[k] = v
for j in range(1, number_of_embeddings_to_use):
big_j_new[(k[0]+num_varj, k[1]+num_varj)] = v
sampler_kwargs = dict(h=big_h_new,
J=big_j_new,
num_reads=num_reads,
answer_mode='raw',
fast_anneal=True,
annealing_time=5 / 1000)
ss = base_sampler.sample_ising(*sampler_kwargs)
def ss_to_samps(ss):
all_samples = ss.record.sample[:, range(num_var)]
for k in range(1, number_of_embeddings_to_use):
all_samples = np.vstack((all_samples, ss.record.sample[:, range(num_vark, num_var*(k+1))]))
samps = np.array(all_samples, dtype=float) # casting may not be necessary.
return samps
samps = ss_to_samps(ss)
correlation_matrix = np.einsum('si,sj->ij', samps, samps) / num_reads / number_of_embeddings_to_use # this is a full matrix with ones on the diagonal
print(correlation_matrix)
next, this version does not work. If you look at the columns of the ss.record.sample returned, they don't maintain the correct patterns and the off-diagonal parts of the correlation matrix go to zero.
It looks like something gets messed up with the column / variable labels maybe? Unless I'm just doing something totally stupid? (possible?)
it appears as though using the SpinReversalTransformComposite doesn't work properly here
composed_sampler = SpinReversalTransformComposite(base_sampler)
num_spin_reversal_transforms = 5 # Also fails for 1
num_reads = int(num_reads/num_spin_reversal_transforms)
sampler_kwargs.update({'num_reads': num_reads,
'num_spin_reversal_transforms': num_spin_reversal_transforms})
ss = composed_sampler.sample_ising(**sampler_kwargs)
samps = ss_to_samps(ss)
correlation_matrix = np.einsum('si,sj->ij', samps, samps) / num_reads / num_spin_reversal_transforms / number_of_embeddings_to_use # this is a full matrix with ones on the diagonal
print(correlation_matrix)
print('h', big_h_new)
print('J', big_j_new)
print('embedding', embedding_to_use)
Expected behavior
print('The correlation matrix should be:\n'
'[[1, -1+eps, 1-eps]\n [-1+eps, 1, -1+eps]\n [1+eps, -1+eps, 1]]\n'
'for small eps.\n'
'This does not happen when using SpinReversalComposite')
Environment:
OS: Windows
Python version: 3.9.19
Additional context
Add any other context about the problem here.
Description SpinReversalTransformComposite appears not to work properly when I send in a disconnected graph
To Reproduce from dwave.system import DWaveSampler, FixedEmbeddingComposite from dwave.preprocessing.composites import SpinReversalTransformComposite import numpy as np
num_reads = 20 jay = {(0, 1): 0, (0, 2): -1, (1, 2): 1} print('h=0','J=', jay) print('The correlation matrix should be:\n' '[[1, -1+eps, 1-eps]\n [-1+eps, 1, -1+eps]\n [1+eps, -1+eps, 1]]\n' 'for small eps.\n' 'This does not happen when using SpinReversalComposite') print('Succeeds with 1 embedding of a length 2 chain, succeeds with 2 parallel embeddings of a length 2 chain, fails when introducing the SpinReversalTransformComposite (later case)')
Advantage4.1 (default solver) valid embeddings:
embeddings = [[301, 306, 307], [591, 596, 597], [1071, 1077, 340], [807, 813, 373], [104, 949, 954], [247, 854, 860], [125, 131, 1177], [601, 607, 803], [565, 571, 809], [1093, 1098, 136], [1025, 1030, 1031], [808, 814, 469], [79, 816, 822], [1163, 617, 623], [831, 837, 367], [574, 1109, 1115], [1227, 1232, 1233], [516, 665, 670], [144, 661, 667], [1190, 1196, 245], [686, 692, 252], [3, 9, 960], [1059, 1065, 364], [939, 945, 380], [711, 717, 348], [1049, 1055, 544], [49, 54, 744], [1156, 1162, 449], [1117, 1122, 1123], [346, 1131, 1137], [820, 826, 475], [1008, 1009, 1014], [207, 213, 991], [650, 656, 276], [370, 1143, 1149], [86, 92, 900], [257, 1130, 1136], [700, 706, 456], [1107, 1113, 358], [563, 1217, 1223], [1106, 1111, 1112], [1037, 1043, 568], [400, 405, 406], [443, 1240, 1246], [1160, 329, 335], [603, 609, 995], [291, 297, 992], [67, 828, 834], [137, 143, 1189], [832, 838, 463], [527, 1205, 1210], [844, 850, 451], [284, 938, 944], [687, 693, 360], [184, 189, 190], [629, 634, 635], [68, 912, 918], [754, 481, 486], [14, 20, 864], [0, 1, 6], [729, 385, 390], [698, 704, 264], [40, 46, 1080], [905, 911, 542], [951, 957, 344], [1195, 221, 227], [962, 968, 255], [206, 212, 895], [256, 262, 1058], [721, 726, 727], [272, 950, 956], [471, 477, 1000], [533, 1145, 1151], [550, 1121, 1127], [558, 725, 731], [1068, 1074, 88], [1191, 1197, 365], [281, 1178, 287], [548, 953, 959], [758, 764, 265], [170, 176, 865], [84, 90, 708], [163, 853, 859], [167, 1213, 1219], [65, 71, 1188], [263, 1238, 1244], [1154, 209, 215], [576, 582, 707], [735, 741, 337], [230, 236, 871], [1142, 250, 1148], [641, 646, 504], [1027, 153, 1021], [679, 228, 234], [1028, 316, 321], [134, 140, 870], [145, 150, 733], [917, 922, 923], [123, 129, 985], [555, 561, 989], [1203, 1208, 1209], [25, 30, 720], [107, 1225, 1230], [1214, 1220, 251], [85, 732, 738], [1064, 328, 334], [181, 186, 757], [1029, 381, 1023], [899, 530, 536], [350, 867, 356], [999, 1005, 375], [292, 298, 1088], [267, 273, 974], [963, 969, 351], [389, 394, 395], [1083, 1089, 352], [514, 1102, 508], [747, 753, 349], [108, 114, 685], [1165, 1171, 173], [1056, 1062, 76], [1026, 57, 1020], [1201, 1206, 1207], [53, 1164, 1170], [156, 157, 162], [903, 909, 338], [543, 549, 977], [1167, 1173, 341], [175, 841, 847], [554, 560, 869], [1228, 1234, 467], [15, 21, 972], [579, 585, 1007], [1169, 1175, 545], [1118, 1119, 1124], [638, 643, 644], [128, 925, 930], [515, 1174, 509], [304, 1076, 310], [182, 188, 901], [898, 434, 440], [74, 80, 882], [929, 934, 935], [742, 505, 510], [1157, 485, 491], [454, 1108, 1114], [303, 308, 309], [843, 849, 355], [672, 673, 678], [27, 32, 33], [551, 1229, 1235], [688, 694, 444], [602, 608, 887], [529, 534, 737], [1078, 484, 490], [1036, 1042, 460], [564, 570, 713], [195, 201, 1003], [1079, 604, 610], [539, 1241, 1247], [111, 117, 973], [734, 739, 740], [746, 752, 253], [231, 237, 967], [760, 766, 433], [625, 630, 631], [1057, 160, 166], [132, 138, 709], [703, 204, 210], [1016, 1010, 1015], [699, 705, 372], [897, 374, 891], [362, 879, 368], [627, 632, 633], [102, 745, 750], [5, 11, 1152], [1066, 496, 502], [1129, 1135, 178], [649, 655, 180], [600, 606, 695], [1099, 196, 202], [952, 958, 452], [192, 193, 198], [280, 286, 1082], [1184, 305, 311], [345, 1011, 1017], [271, 830, 836], [541, 546, 761], [856, 862, 439], [691, 216, 222], [518, 524, 874], [538, 1133, 1139], [827, 590, 595], [89, 95, 1176], [982, 483, 489], [12, 13, 18], [1081, 1086, 124], [613, 619, 815], [268, 274, 1070], [64, 69, 70], [424, 429, 430], [676, 682, 432], [776, 325, 331], [58, 1128, 1134], [566, 572, 881], [194, 200, 907], [664, 669, 396], [422, 428, 873], [680, 312, 318], [317, 322, 323], [1063, 232, 238], [927, 933, 339], [382, 1095, 1101], [1033, 1039, 165], [72, 78, 696], [279, 285, 986], [1153, 1159, 185], [97, 103, 769], [520, 526, 1090], [447, 964, 453], [1047, 1053, 357], [588, 589, 594], [1172, 293, 299], [28, 34, 1092], [458, 464, 868], [581, 586, 587], [519, 525, 970], [289, 294, 295], [436, 1012, 1018], [926, 931, 932], [716, 324, 330], [168, 169, 174], [512, 886, 506], [473, 478, 479], [517, 522, 523], [109, 115, 781], [99, 105, 961], [187, 829, 835], [260, 878, 884], [1067, 616, 622], [788, 313, 319], [1091, 592, 598], [387, 392, 393], [106, 1141, 1146], [61, 66, 756], [146, 152, 889], [975, 981, 363], [155, 1237, 1243], [261, 998, 1004], [269, 1166, 275], [217, 223, 787], [796, 802, 457], [120, 126, 697], [1179, 1185, 353], [482, 487, 488], [158, 164, 877], [1186, 497, 503], [435, 441, 988], [60, 648, 654], [640, 645, 384], [614, 875, 620], [801, 421, 427], [1216, 1222, 455], [553, 559, 797], [806, 811, 812], [1155, 1161, 377], [920, 315, 320], [712, 718, 468], [819, 825, 379], [1193, 1199, 569], [896, 326, 332], [855, 343, 861], [759, 765, 361], [37, 42, 43], [1044, 1045, 1050], [1226, 203, 1231], [50, 56, 894], [652, 657, 658], [777, 409, 415], [941, 946, 947], [556, 1097, 1103], [1094, 1100, 244], [412, 417, 418], [466, 1120, 1126], [552, 653, 659], [83, 1212, 1218], [722, 728, 241], [218, 224, 883], [730, 493, 498], [442, 1132, 1138], [459, 976, 465], [87, 93, 996], [121, 127, 793], [1048, 1054, 448], [133, 139, 805], [818, 824, 254], [1181, 1187, 557], [1046, 1051, 1052], [1215, 1221, 383], [219, 225, 979], [532, 1013, 1019], [531, 965, 971], [1035, 1040, 1041], [135, 141, 997], [902, 908, 278], [1183, 233, 239], [171, 172, 177], [994, 495, 501], [243, 248, 249], [983, 615, 621], [91, 840, 846], [1105, 1110, 142], [612, 618, 683], [470, 476, 880], [77, 1104, 82], [662, 668, 240], [116, 937, 942], [1087, 208, 214], [75, 81, 984], [39, 44, 45], [1069, 148, 154], [327, 333, 980], [151, 817, 823], [2, 8, 876], [229, 235, 775], [48, 660, 666], [675, 681, 336], [113, 118, 119], [1192, 1198, 461], [96, 637, 642], [784, 790, 445], [567, 573, 1001], [55, 852, 858], [1075, 220, 226], [789, 397, 403], [866, 872, 242], [399, 404, 921], [277, 794, 283], [205, 211, 799], [259, 842, 848], [513, 1006, 507], [59, 1236, 1242], [1239, 347, 1245], [410, 416, 885], [910, 494, 500], [535, 857, 863]]
embedding_to_use = {} big_h_new = {} big_j_new = {}
num_var = len(embeddings[0]) number_of_embeddings_to_use = 2 # len(embeddings) for k in range(number_of_embeddings_to_use): for j in range(num_var): # up to 0..1037 embedding_to_use[num_vark+j] = [embeddings[k][j]] big_h_new[num_vark+j] = 0
for k, v in jay.items(): big_j_new[k] = v for j in range(1, number_of_embeddings_to_use): big_j_new[(k[0]+num_varj, k[1]+num_varj)] = v
sampler = DWaveSampler(solver='Advantage2_prototype2.4') base_sampler = FixedEmbeddingComposite(sampler, embedding=embedding_to_use)
sampler_kwargs = dict(h=big_h_new, J=big_j_new, num_reads=num_reads, answer_mode='raw', fast_anneal=True, annealing_time=5 / 1000) ss = base_sampler.sample_ising(*sampler_kwargs) def ss_to_samps(ss): all_samples = ss.record.sample[:, range(num_var)] for k in range(1, number_of_embeddings_to_use): all_samples = np.vstack((all_samples, ss.record.sample[:, range(num_vark, num_var*(k+1))])) samps = np.array(all_samples, dtype=float) # casting may not be necessary. return samps
samps = ss_to_samps(ss) correlation_matrix = np.einsum('si,sj->ij', samps, samps) / num_reads / number_of_embeddings_to_use # this is a full matrix with ones on the diagonal print(correlation_matrix)
next, this version does not work. If you look at the columns of the ss.record.sample returned, they don't maintain the correct patterns and the off-diagonal parts of the correlation matrix go to zero.
It looks like something gets messed up with the column / variable labels maybe? Unless I'm just doing something totally stupid? (possible?)
it appears as though using the SpinReversalTransformComposite doesn't work properly here
composed_sampler = SpinReversalTransformComposite(base_sampler) num_spin_reversal_transforms = 5 # Also fails for 1 num_reads = int(num_reads/num_spin_reversal_transforms) sampler_kwargs.update({'num_reads': num_reads, 'num_spin_reversal_transforms': num_spin_reversal_transforms}) ss = composed_sampler.sample_ising(**sampler_kwargs) samps = ss_to_samps(ss) correlation_matrix = np.einsum('si,sj->ij', samps, samps) / num_reads / num_spin_reversal_transforms / number_of_embeddings_to_use # this is a full matrix with ones on the diagonal print(correlation_matrix) print('h', big_h_new) print('J', big_j_new) print('embedding', embedding_to_use)
Expected behavior print('The correlation matrix should be:\n' '[[1, -1+eps, 1-eps]\n [-1+eps, 1, -1+eps]\n [1+eps, -1+eps, 1]]\n' 'for small eps.\n' 'This does not happen when using SpinReversalComposite')
Environment:
Additional context Add any other context about the problem here.