.include "macro.inc" # assembler directives .set noat # allow manual use of $at .set noreorder # don't insert nops after branches .set gp=64 # allow use of 64-bit general purposee registers .section .text .align 4 glabel bcmp /* 0074C0 800068C0 28C10010 */ slti $at, $a2, 0x10 /* 0074C4 800068C4 14200037 */ bnez $at, .bytecmp /* 0074C8 800068C8 00851026 */ xor $v0, $a0, $a1 /* 0074CC 800068CC 30420003 */ andi $v0, $v0, 3 /* 0074D0 800068D0 14400019 */ bnez $v0, .unalgncmp /* 0074D4 800068D4 0004C023 */ negu $t8, $a0 /* 0074D8 800068D8 33180003 */ andi $t8, $t8, 3 /* 0074DC 800068DC 13000007 */ beqz $t8, .wordcmp /* 0074E0 800068E0 00D83023 */ subu $a2, $a2, $t8 /* 0074E4 800068E4 00601025 */ move $v0, $v1 /* 0074E8 800068E8 88820000 */ lwl $v0, ($a0) /* 0074EC 800068EC 88A30000 */ lwl $v1, ($a1) /* 0074F0 800068F0 00982021 */ addu $a0, $a0, $t8 /* 0074F4 800068F4 00B82821 */ addu $a1, $a1, $t8 /* 0074F8 800068F8 14430036 */ bne $v0, $v1, .cmpdone .wordcmp: /* 0074FC 800068FC 2401FFFC */ li $at, -4 /* 007500 80006900 00C13824 */ and $a3, $a2, $at /* 007504 80006904 10E00027 */ beqz $a3, .bytecmp /* 007508 80006908 00C73023 */ subu $a2, $a2, $a3 /* 00750C 8000690C 00E43821 */ addu $a3, $a3, $a0 /* 007510 80006910 8C820000 */ lw $v0, ($a0) .L80006914: /* 007514 80006914 8CA30000 */ lw $v1, ($a1) /* 007518 80006918 24840004 */ addiu $a0, $a0, 4 /* 00751C 8000691C 24A50004 */ addiu $a1, $a1, 4 /* 007520 80006920 1443002C */ bne $v0, $v1, .cmpdone /* 007524 80006924 00000000 */ nop /* 007528 80006928 5487FFFA */ bnel $a0, $a3, .L80006914 /* 00752C 8000692C 8C820000 */ lw $v0, ($a0) /* 007530 80006930 1000001C */ b .bytecmp /* 007534 80006934 00000000 */ nop .unalgncmp: /* 007538 80006938 00053823 */ negu $a3, $a1 /* 00753C 8000693C 30E70003 */ andi $a3, $a3, 3 /* 007540 80006940 10E0000A */ beqz $a3, .partaligncmp /* 007544 80006944 00C73023 */ subu $a2, $a2, $a3 /* 007548 80006948 00E43821 */ addu $a3, $a3, $a0 /* 00754C 8000694C 90820000 */ lbu $v0, ($a0) .L80006950: /* 007550 80006950 90A30000 */ lbu $v1, ($a1) /* 007554 80006954 24840001 */ addiu $a0, $a0, 1 /* 007558 80006958 24A50001 */ addiu $a1, $a1, 1 /* 00755C 8000695C 1443001D */ bne $v0, $v1, .cmpdone /* 007560 80006960 00000000 */ nop /* 007564 80006964 5487FFFA */ bnel $a0, $a3, .L80006950 /* 007568 80006968 90820000 */ lbu $v0, ($a0) .partaligncmp: /* 00756C 8000696C 2401FFFC */ li $at, -4 /* 007570 80006970 00C13824 */ and $a3, $a2, $at /* 007574 80006974 10E0000B */ beqz $a3, .bytecmp /* 007578 80006978 00C73023 */ subu $a2, $a2, $a3 /* 00757C 8000697C 00E43821 */ addu $a3, $a3, $a0 /* 007580 80006980 88820000 */ lwl $v0, ($a0) .L80006984: /* 007584 80006984 8CA30000 */ lw $v1, ($a1) /* 007588 80006988 98820003 */ lwr $v0, 3($a0) /* 00758C 8000698C 24840004 */ addiu $a0, $a0, 4 /* 007590 80006990 24A50004 */ addiu $a1, $a1, 4 /* 007594 80006994 1443000F */ bne $v0, $v1, .cmpdone /* 007598 80006998 00000000 */ nop /* 00759C 8000699C 5487FFF9 */ bnel $a0, $a3, .L80006984 /* 0075A0 800069A0 88820000 */ lwl $v0, ($a0) .bytecmp: /* 0075A4 800069A4 18C00009 */ blez $a2, .L800069CC /* 0075A8 800069A8 00C43821 */ addu $a3, $a2, $a0 /* 0075AC 800069AC 90820000 */ lbu $v0, ($a0) .L800069B0: /* 0075B0 800069B0 90A30000 */ lbu $v1, ($a1) /* 0075B4 800069B4 24840001 */ addiu $a0, $a0, 1 /* 0075B8 800069B8 24A50001 */ addiu $a1, $a1, 1 /* 0075BC 800069BC 14430005 */ bne $v0, $v1, .cmpdone /* 0075C0 800069C0 00000000 */ nop /* 0075C4 800069C4 5487FFFA */ bnel $a0, $a3, .L800069B0 /* 0075C8 800069C8 90820000 */ lbu $v0, ($a0) .L800069CC: /* 0075CC 800069CC 03E00008 */ jr $ra /* 0075D0 800069D0 00001025 */ move $v0, $zero .cmpdone: /* 0075D4 800069D4 03E00008 */ jr $ra /* 0075D8 800069D8 24020001 */ li $v0, 1