From: Vasudev Kamath Date: Thu, 26 Oct 2017 16:29:03 +0000 (+0530) Subject: New upstream version 0.22.0 X-Git-Tag: archive/raspbian/0.35.0-2+rpi1~3^2^2^2^2^2^2^2~22^2~7 X-Git-Url: https://dgit.raspbian.org/?a=commitdiff_plain;h=82fc9e211abe53c4a08685b01400f5eae6399947;p=cargo.git New upstream version 0.22.0 --- 82fc9e211abe53c4a08685b01400f5eae6399947 diff --cc vendor/advapi32-sys-0.2.0/.cargo-checksum.json index 000000000,000000000..22bddd467 new file mode 100644 --- /dev/null +++ b/vendor/advapi32-sys-0.2.0/.cargo-checksum.json @@@ -1,0 -1,0 +1,1 @@@ ++{"files":{},"package":"e06588080cb19d0acb6739808aafa5f26bfb2ca015b2b6370028b44cf7cb8a9a"} diff --cc vendor/advapi32-sys-0.2.0/.cargo-ok index 000000000,000000000..e69de29bb new file mode 100644 --- /dev/null +++ b/vendor/advapi32-sys-0.2.0/.cargo-ok diff --cc vendor/advapi32-sys-0.2.0/Cargo.toml index 000000000,000000000..fccf0ad80 new file mode 100644 --- /dev/null +++ b/vendor/advapi32-sys-0.2.0/Cargo.toml @@@ -1,0 -1,0 +1,17 @@@ ++[package] ++name = "advapi32-sys" ++version = "0.2.0" ++authors = ["Peter Atashian "] ++description = "Contains function definitions for the Windows API library advapi32. See winapi for types and constants." ++documentation = "https://retep998.github.io/doc/advapi32/" ++repository = "https://github.com/retep998/winapi-rs" ++readme = "README.md" ++keywords = ["windows", "ffi", "win32"] ++license = "MIT" ++build = "build.rs" ++[lib] ++name = "advapi32" ++[dependencies] ++winapi = { version = "0.2.5", path = "../.." } ++[build-dependencies] ++winapi-build = { version = "0.1.1", path = "../../build" } diff --cc vendor/advapi32-sys-0.2.0/README.md index 000000000,000000000..c3d84c328 new file mode 100644 --- /dev/null +++ b/vendor/advapi32-sys-0.2.0/README.md @@@ -1,0 -1,0 +1,13 @@@ ++# advapi32 # ++Contains function definitions for the Windows API library advapi32. See winapi for types and constants. ++ ++```toml ++[dependencies] ++advapi32-sys = "0.1.2" ++``` ++ ++```rust ++extern crate advapi32; ++``` ++ ++[Documentation](https://retep998.github.io/doc/advapi32/) diff --cc vendor/advapi32-sys-0.2.0/build.rs index 000000000,000000000..639d0b305 new file mode 100644 --- /dev/null +++ b/vendor/advapi32-sys-0.2.0/build.rs @@@ -1,0 -1,0 +1,6 @@@ ++// Copyright © 2015, Peter Atashian ++// Licensed under the MIT License ++extern crate build; ++fn main() { ++ build::link("advapi32", false) ++} diff --cc vendor/advapi32-sys-0.2.0/src/lib.rs index 000000000,000000000..3c4a5221b new file mode 100644 --- /dev/null +++ b/vendor/advapi32-sys-0.2.0/src/lib.rs @@@ -1,0 -1,0 +1,1005 @@@ ++// Copyright © 2015, Peter Atashian ++// Licensed under the MIT License ++//! FFI bindings to advapi32. ++#![cfg(windows)] ++extern crate winapi; ++use winapi::*; ++extern "system" { ++ pub fn AbortSystemShutdownA(lpMachineName: LPCSTR) -> BOOL; ++ pub fn AbortSystemShutdownW(lpMachineName: LPWSTR) -> BOOL; ++ // pub fn AccessCheck(); ++ // pub fn AccessCheckAndAuditAlarmA(); ++ // pub fn AccessCheckAndAuditAlarmW(); ++ // pub fn AccessCheckByType(); ++ // pub fn AccessCheckByTypeAndAuditAlarmA(); ++ // pub fn AccessCheckByTypeAndAuditAlarmW(); ++ // pub fn AccessCheckByTypeResultList(); ++ // pub fn AccessCheckByTypeResultListAndAuditAlarmA(); ++ // pub fn AccessCheckByTypeResultListAndAuditAlarmByHandleA(); ++ // pub fn AccessCheckByTypeResultListAndAuditAlarmByHandleW(); ++ // pub fn AccessCheckByTypeResultListAndAuditAlarmW(); ++ // pub fn AddAccessAllowedAce(); ++ // pub fn AddAccessAllowedAceEx(); ++ // pub fn AddAccessAllowedObjectAce(); ++ // pub fn AddAccessDeniedAce(); ++ // pub fn AddAccessDeniedAceEx(); ++ // pub fn AddAccessDeniedObjectAce(); ++ // pub fn AddAce(); ++ // pub fn AddAuditAccessAce(); ++ // pub fn AddAuditAccessAceEx(); ++ // pub fn AddAuditAccessObjectAce(); ++ // pub fn AddConditionalAce(); ++ // pub fn AddMandatoryAce(); ++ // pub fn AddUsersToEncryptedFile(); ++ // pub fn AddUsersToEncryptedFileEx(); ++ // pub fn AdjustTokenGroups(); ++ pub fn AdjustTokenPrivileges( ++ TokenHandle: HANDLE, DisableAllPrivileges: BOOL, NewState: PTOKEN_PRIVILEGES, ++ BufferLength: DWORD, PreviousState: PTOKEN_PRIVILEGES, ReturnLength: PDWORD, ++ ) -> BOOL; ++ // pub fn AllocateAndInitializeSid(); ++ pub fn AllocateLocallyUniqueId(Luid: PLUID) -> BOOL; ++ pub fn AreAllAccessesGranted(GrantedAccess: DWORD, DesiredAccess: DWORD) -> BOOL; ++ pub fn AreAnyAccessesGranted(GrantedAccess: DWORD, DesiredAccess: DWORD) -> BOOL; ++ // pub fn AuditComputeEffectivePolicyBySid(); ++ // pub fn AuditComputeEffectivePolicyByToken(); ++ // pub fn AuditEnumerateCategories(); ++ // pub fn AuditEnumeratePerUserPolicy(); ++ // pub fn AuditEnumerateSubCategories(); ++ pub fn AuditFree(Buffer: PVOID); ++ // pub fn AuditLookupCategoryGuidFromCategoryId(); ++ // pub fn AuditLookupCategoryIdFromCategoryGuid(); ++ // pub fn AuditLookupCategoryNameA(); ++ // pub fn AuditLookupCategoryNameW(); ++ // pub fn AuditLookupSubCategoryNameA(); ++ // pub fn AuditLookupSubCategoryNameW(); ++ // pub fn AuditQueryGlobalSaclA(); ++ // pub fn AuditQueryGlobalSaclW(); ++ // pub fn AuditQueryPerUserPolicy(); ++ // pub fn AuditQuerySecurity(); ++ // pub fn AuditQuerySystemPolicy(); ++ // pub fn AuditSetGlobalSaclA(); ++ // pub fn AuditSetGlobalSaclW(); ++ // pub fn AuditSetPerUserPolicy(); ++ // pub fn AuditSetSecurity(); ++ // pub fn AuditSetSystemPolicy(); ++ // pub fn BackupEventLogA(); ++ // pub fn BackupEventLogW(); ++ // pub fn BaseRegCloseKey(); ++ // pub fn BaseRegCreateKey(); ++ // pub fn BaseRegDeleteKeyEx(); ++ // pub fn BaseRegDeleteValue(); ++ // pub fn BaseRegFlushKey(); ++ // pub fn BaseRegGetVersion(); ++ // pub fn BaseRegLoadKey(); ++ // pub fn BaseRegOpenKey(); ++ // pub fn BaseRegRestoreKey(); ++ // pub fn BaseRegSaveKeyEx(); ++ // pub fn BaseRegSetKeySecurity(); ++ // pub fn BaseRegSetValue(); ++ // pub fn BaseRegUnLoadKey(); ++ // pub fn BuildExplicitAccessWithNameA(); ++ // pub fn BuildExplicitAccessWithNameW(); ++ // pub fn BuildImpersonateExplicitAccessWithNameA(); ++ // pub fn BuildImpersonateExplicitAccessWithNameW(); ++ // pub fn BuildImpersonateTrusteeA(); ++ // pub fn BuildImpersonateTrusteeW(); ++ // pub fn BuildSecurityDescriptorA(); ++ // pub fn BuildSecurityDescriptorW(); ++ // pub fn BuildTrusteeWithNameA(); ++ // pub fn BuildTrusteeWithNameW(); ++ // pub fn BuildTrusteeWithObjectsAndNameA(); ++ // pub fn BuildTrusteeWithObjectsAndNameW(); ++ // pub fn BuildTrusteeWithObjectsAndSidA(); ++ // pub fn BuildTrusteeWithObjectsAndSidW(); ++ // pub fn BuildTrusteeWithSidA(); ++ // pub fn BuildTrusteeWithSidW(); ++ // pub fn CancelOverlappedAccess(); ++ // pub fn ChangeServiceConfig2A(); ++ // pub fn ChangeServiceConfig2W(); ++ // pub fn ChangeServiceConfigA(); ++ // pub fn ChangeServiceConfigW(); ++ // pub fn CheckForHiberboot(); ++ // pub fn CheckTokenMembership(); ++ // pub fn ClearEventLogA(); ++ // pub fn ClearEventLogW(); ++ // pub fn CloseCodeAuthzLevel(); ++ // pub fn CloseEncryptedFileRaw(); ++ // pub fn CloseEventLog(); ++ pub fn CloseServiceHandle(hSCObject: SC_HANDLE) -> BOOL; ++ // pub fn CloseThreadWaitChainSession(); ++ // pub fn CloseTrace(); ++ // pub fn CommandLineFromMsiDescriptor(); ++ // pub fn ComputeAccessTokenFromCodeAuthzLevel(); ++ pub fn ControlService( ++ hService: SC_HANDLE, dwControl: DWORD, lpServiceStatus: LPSERVICE_STATUS, ++ ) -> BOOL; ++ // pub fn ControlServiceExA(); ++ // pub fn ControlServiceExW(); ++ // pub fn ControlTraceA(); ++ // pub fn ControlTraceW(); ++ // pub fn ConvertAccessToSecurityDescriptorA(); ++ // pub fn ConvertAccessToSecurityDescriptorW(); ++ // pub fn ConvertSDToStringSDDomainW(); ++ // pub fn ConvertSDToStringSDRootDomainA(); ++ // pub fn ConvertSDToStringSDRootDomainW(); ++ // pub fn ConvertSecurityDescriptorToAccessA(); ++ // pub fn ConvertSecurityDescriptorToAccessNamedA(); ++ // pub fn ConvertSecurityDescriptorToAccessNamedW(); ++ // pub fn ConvertSecurityDescriptorToAccessW(); ++ // pub fn ConvertSecurityDescriptorToStringSecurityDescriptorA(); ++ // pub fn ConvertSecurityDescriptorToStringSecurityDescriptorW(); ++ // pub fn ConvertSidToStringSidA(); ++ // pub fn ConvertSidToStringSidW(); ++ // pub fn ConvertStringSDToSDDomainA(); ++ // pub fn ConvertStringSDToSDDomainW(); ++ // pub fn ConvertStringSDToSDRootDomainA(); ++ // pub fn ConvertStringSDToSDRootDomainW(); ++ // pub fn ConvertStringSecurityDescriptorToSecurityDescriptorA(); ++ // pub fn ConvertStringSecurityDescriptorToSecurityDescriptorW(); ++ // pub fn ConvertStringSidToSidA(); ++ // pub fn ConvertStringSidToSidW(); ++ // pub fn ConvertToAutoInheritPrivateObjectSecurity(); ++ // pub fn CopySid(); ++ // pub fn CreateCodeAuthzLevel(); ++ // pub fn CreatePrivateObjectSecurity(); ++ // pub fn CreatePrivateObjectSecurityEx(); ++ // pub fn CreatePrivateObjectSecurityWithMultipleInheritance(); ++ // pub fn CreateProcessAsUserA(); ++ // pub fn CreateProcessAsUserW(); ++ // pub fn CreateProcessWithLogonW(); ++ // pub fn CreateProcessWithTokenW(); ++ // pub fn CreateRestrictedToken(); ++ pub fn CreateServiceA( ++ hSCManager: SC_HANDLE, lpServiceName: LPCSTR, lpDisplayName: LPCSTR, ++ dwDesiredAccess: DWORD, dwServiceType: DWORD, dwStartType: DWORD, dwErrorControl: DWORD, ++ lpBinaryPathName: LPCSTR, lpLoadOrderGroup: LPCSTR, lpdwTagId: LPDWORD, ++ lpDependencies: LPCSTR, lpServiceStartName: LPCSTR, lpPassword: LPCSTR, ++ ) -> SC_HANDLE; ++ pub fn CreateServiceW( ++ hSCManager: SC_HANDLE, lpServiceName: LPCWSTR, lpDisplayName: LPCWSTR, ++ dwDesiredAccess: DWORD, dwServiceType: DWORD, dwStartType: DWORD, dwErrorControl: DWORD, ++ lpBinaryPathName: LPCWSTR, lpLoadOrderGroup: LPCWSTR, lpdwTagId: LPDWORD, ++ lpDependencies: LPCWSTR, lpServiceStartName: LPCWSTR, lpPassword: LPCWSTR, ++ ) -> SC_HANDLE; ++ // pub fn CreateTraceInstanceId(); ++ // pub fn CreateWellKnownSid(); ++ pub fn CredDeleteA(TargetName: LPCSTR, Type: DWORD, Flags: DWORD) -> BOOL; ++ pub fn CredDeleteW(TargetName: LPCWSTR, Type: DWORD, Flags: DWORD) -> BOOL; ++ // pub fn CredEnumerateA(); ++ // pub fn CredEnumerateW(); ++ // pub fn CredFindBestCredentialA(); ++ // pub fn CredFindBestCredentialW(); ++ pub fn CredFree(Buffer: PVOID); ++ // pub fn CredGetSessionTypes(); ++ // pub fn CredGetTargetInfoA(); ++ // pub fn CredGetTargetInfoW(); ++ // pub fn CredIsMarshaledCredentialA(); ++ // pub fn CredIsMarshaledCredentialW(); ++ // pub fn CredIsProtectedA(); ++ // pub fn CredIsProtectedW(); ++ // pub fn CredMarshalCredentialA(); ++ // pub fn CredMarshalCredentialW(); ++ // pub fn CredProtectA(); ++ // pub fn CredProtectW(); ++ pub fn CredReadA( ++ TargetName: LPCSTR, Type: DWORD, Flags: DWORD, Credential: *mut PCREDENTIALA, ++ ) -> BOOL; ++ // pub fn CredReadDomainCredentialsA(); ++ // pub fn CredReadDomainCredentialsW(); ++ pub fn CredReadW( ++ TargetName: LPCWSTR, Type: DWORD, Flags: DWORD, Credential: *mut PCREDENTIALW, ++ ) -> BOOL; ++ // pub fn CredRenameA(); ++ // pub fn CredRenameW(); ++ // pub fn CredUnmarshalCredentialA(); ++ // pub fn CredUnmarshalCredentialW(); ++ // pub fn CredUnprotectA(); ++ // pub fn CredUnprotectW(); ++ pub fn CredWriteA(Credential: PCREDENTIALA, Flags: DWORD) -> BOOL; ++ // pub fn CredWriteDomainCredentialsA(); ++ // pub fn CredWriteDomainCredentialsW(); ++ pub fn CredWriteW(Credential: PCREDENTIALW, Flags: DWORD) -> BOOL; ++ pub fn CryptAcquireContextA( ++ phProv: *mut HCRYPTPROV, szContainer: LPCSTR, szProvider: LPCSTR, dwProvType: DWORD, ++ dwFlags: DWORD, ++ ) -> BOOL; ++ pub fn CryptAcquireContextW( ++ phProv: *mut HCRYPTPROV, szContainer: LPCWSTR, szProvider: LPCWSTR, dwProvType: DWORD, ++ dwFlags: DWORD, ++ ) -> BOOL; ++ pub fn CryptContextAddRef(hProv: HCRYPTPROV, pdwReserved: *mut DWORD, dwFlags: DWORD) -> BOOL; ++ pub fn CryptCreateHash( ++ hProv: HCRYPTPROV, Algid: ALG_ID, hKey: HCRYPTKEY, dwFlags: DWORD, phHash: *mut HCRYPTHASH, ++ ) -> BOOL; ++ pub fn CryptDecrypt( ++ hKey: HCRYPTKEY, hHash: HCRYPTHASH, Final: BOOL, dwFlags: DWORD, pbData: *mut BYTE, ++ pdwDataLen: *mut DWORD, ++ ) -> BOOL; ++ pub fn CryptDeriveKey( ++ hProv: HCRYPTPROV, Algid: ALG_ID, hBaseData: HCRYPTHASH, dwFlags: DWORD, ++ phKey: *mut HCRYPTKEY, ++ ) -> BOOL; ++ pub fn CryptDestroyHash(hHash: HCRYPTHASH) -> BOOL; ++ pub fn CryptDestroyKey(hKey: HCRYPTKEY) -> BOOL; ++ pub fn CryptDuplicateHash( ++ hHash: HCRYPTHASH, pdwReserved: *mut DWORD, dwFlags: DWORD, phHash: *mut HCRYPTHASH, ++ ) -> BOOL; ++ pub fn CryptDuplicateKey( ++ hKey: HCRYPTKEY, pdwReserved: *mut DWORD, dwFlags: DWORD, phKey: *mut HCRYPTKEY, ++ ) -> BOOL; ++ pub fn CryptEncrypt( ++ hKey: HCRYPTKEY, hHash: HCRYPTHASH, Final: BOOL, dwFlags: DWORD, pbData: *mut BYTE, ++ pdwDataLen: *mut DWORD, dwBufLen: DWORD, ++ ) -> BOOL; ++ pub fn CryptEnumProviderTypesA( ++ dwIndex: DWORD, pdwReserved: *mut DWORD, dwFlags: DWORD, pdwProvType: *mut DWORD, ++ szTypeName: LPSTR, pcbTypeName: *mut DWORD, ++ ) -> BOOL; ++ pub fn CryptEnumProviderTypesW( ++ dwIndex: DWORD, pdwReserved: *mut DWORD, dwFlags: DWORD, pdwProvType: *mut DWORD, ++ szTypeName: LPWSTR, pcbTypeName: *mut DWORD, ++ ) -> BOOL; ++ pub fn CryptEnumProvidersA( ++ dwIndex: DWORD, pdwReserved: *mut DWORD, dwFlags: DWORD, pdwProvType: *mut DWORD, ++ szProvName: LPSTR, pcbProvName: *mut DWORD, ++ ) -> BOOL; ++ pub fn CryptEnumProvidersW( ++ dwIndex: DWORD, pdwReserved: *mut DWORD, dwFlags: DWORD, pdwProvType: *mut DWORD, ++ szProvName: LPWSTR, pcbProvName: *mut DWORD, ++ ) -> BOOL; ++ pub fn CryptExportKey( ++ hKey: HCRYPTKEY, hExpKey: HCRYPTKEY, dwBlobType: DWORD, dwFlags: DWORD, pbData: *mut BYTE, ++ pdwDataLen: *mut DWORD, ++ ) -> BOOL; ++ pub fn CryptGenKey( ++ hProv: HCRYPTPROV, Algid: ALG_ID, dwFlags: DWORD, phKey: *mut HCRYPTKEY, ++ ) -> BOOL; ++ pub fn CryptGenRandom(hProv: HCRYPTPROV, dwLen: DWORD, pbBuffer: *mut BYTE) -> BOOL; ++ pub fn CryptGetDefaultProviderA( ++ dwProvType: DWORD, pdwReserved: *mut DWORD, dwFlags: DWORD, pszProvName: LPSTR, ++ pcbProvName: *mut DWORD, ++ ) -> BOOL; ++ pub fn CryptGetDefaultProviderW( ++ dwProvType: DWORD, pdwReserved: *mut DWORD, dwFlags: DWORD, pszProvName: LPWSTR, ++ pcbProvName: *mut DWORD, ++ ) -> BOOL; ++ pub fn CryptGetHashParam( ++ hHash: HCRYPTHASH, dwParam: DWORD, pbData: *mut BYTE, pdwDataLen: *mut DWORD, ++ dwFlags: DWORD, ++ ) -> BOOL; ++ pub fn CryptGetKeyParam( ++ hKey: HCRYPTKEY, dwParam: DWORD, pbData: *mut BYTE, pdwDataLen: *mut DWORD, dwFlags: DWORD, ++ ) -> BOOL; ++ pub fn CryptGetProvParam( ++ hProv: HCRYPTPROV, dwParam: DWORD, pbData: *mut BYTE, pdwDataLen: *mut DWORD, ++ dwFlags: DWORD, ++ ) -> BOOL; ++ pub fn CryptGetUserKey(hProv: HCRYPTPROV, dwKeySpec: DWORD, phUserKey: *mut HCRYPTKEY) -> BOOL; ++ pub fn CryptHashData( ++ hHash: HCRYPTHASH, pbData: *const BYTE, dwDataLen: DWORD, dwFlags: DWORD, ++ ) -> BOOL; ++ pub fn CryptHashSessionKey(hHash: HCRYPTHASH, hKey: HCRYPTKEY, dwFlags: DWORD) -> BOOL; ++ pub fn CryptImportKey( ++ hProv: HCRYPTPROV, pbData: *const BYTE, dwDataLen: DWORD, hPubKey: HCRYPTKEY, ++ dwFlags: DWORD, phKey: *mut HCRYPTKEY, ++ ) -> BOOL; ++ pub fn CryptReleaseContext(hProv: HCRYPTPROV, dwFlags: DWORD) -> BOOL; ++ pub fn CryptSetHashParam( ++ hHash: HCRYPTHASH, dwParam: DWORD, pbData: *const BYTE, dwFlags: DWORD, ++ ) -> BOOL; ++ pub fn CryptSetKeyParam( ++ hKey: HCRYPTKEY, dwParam: DWORD, pbData: *const BYTE, dwFlags: DWORD, ++ ) -> BOOL; ++ pub fn CryptSetProvParam( ++ hProv: HCRYPTPROV, dwParam: DWORD, pbData: *const BYTE, dwFlags: DWORD, ++ ) -> BOOL; ++ pub fn CryptSetProviderA(pszProvName: LPCSTR, dwProvType: DWORD) -> BOOL; ++ pub fn CryptSetProviderExA( ++ pszProvName: LPCSTR, dwProvType: DWORD, pdwReserved: *mut DWORD, dwFlags: DWORD, ++ ) -> BOOL; ++ pub fn CryptSetProviderExW( ++ pszProvName: LPCWSTR, dwProvType: DWORD, pdwReserved: *mut DWORD, dwFlags: DWORD, ++ ) -> BOOL; ++ pub fn CryptSetProviderW(pszProvName: LPCWSTR, dwProvType: DWORD) -> BOOL; ++ pub fn CryptSignHashA( ++ hHash: HCRYPTHASH, dwKeySpec: DWORD, szDescription: LPCSTR, dwFlags: DWORD, ++ pbSignature: *mut BYTE, pdwSigLen: *mut DWORD, ++ ) -> BOOL; ++ pub fn CryptSignHashW( ++ hHash: HCRYPTHASH, dwKeySpec: DWORD, szDescription: LPCWSTR, dwFlags: DWORD, ++ pbSignature: *mut BYTE, pdwSigLen: *mut DWORD, ++ ) -> BOOL; ++ pub fn CryptVerifySignatureA( ++ hHash: HCRYPTHASH, pbSignature: *const BYTE, dwSigLen: DWORD, hPubKey: HCRYPTKEY, ++ szDescription: LPCSTR, dwFlags: DWORD, ++ ) -> BOOL; ++ pub fn CryptVerifySignatureW( ++ hHash: HCRYPTHASH, pbSignature: *const BYTE, dwSigLen: DWORD, hPubKey: HCRYPTKEY, ++ szDescription: LPCWSTR, dwFlags: DWORD, ++ ) -> BOOL; ++ // pub fn DecryptFileA(); ++ // pub fn DecryptFileW(); ++ // pub fn DeleteAce(); ++ pub fn DeleteService(hService: SC_HANDLE) -> BOOL; ++ // pub fn DeregisterEventSource(); ++ // pub fn DestroyPrivateObjectSecurity(); ++ // pub fn DuplicateEncryptionInfoFile(); ++ // pub fn DuplicateToken(); ++ // pub fn DuplicateTokenEx(); ++ // pub fn ElfBackupEventLogFileA(); ++ // pub fn ElfBackupEventLogFileW(); ++ // pub fn ElfChangeNotify(); ++ // pub fn ElfClearEventLogFileA(); ++ // pub fn ElfClearEventLogFileW(); ++ // pub fn ElfCloseEventLog(); ++ // pub fn ElfDeregisterEventSource(); ++ // pub fn ElfFlushEventLog(); ++ // pub fn ElfNumberOfRecords(); ++ // pub fn ElfOldestRecord(); ++ // pub fn ElfOpenBackupEventLogA(); ++ // pub fn ElfOpenBackupEventLogW(); ++ // pub fn ElfOpenEventLogA(); ++ // pub fn ElfOpenEventLogW(); ++ // pub fn ElfReadEventLogA(); ++ // pub fn ElfReadEventLogW(); ++ // pub fn ElfRegisterEventSourceA(); ++ // pub fn ElfRegisterEventSourceW(); ++ // pub fn ElfReportEventA(); ++ // pub fn ElfReportEventAndSourceW(); ++ // pub fn ElfReportEventW(); ++ // pub fn EnableTrace(); ++ // pub fn EnableTraceEx(); ++ // pub fn EnableTraceEx2(); ++ // pub fn EncryptFileA(); ++ // pub fn EncryptFileW(); ++ // pub fn EncryptedFileKeyInfo(); ++ // pub fn EncryptionDisable(); ++ // pub fn EnumDependentServicesA(); ++ // pub fn EnumDependentServicesW(); ++ // pub fn EnumDynamicTimeZoneInformation(); ++ // pub fn EnumServiceGroupW(); ++ // pub fn EnumServicesStatusA(); ++ // pub fn EnumServicesStatusExA(); ++ // pub fn EnumServicesStatusExW(); ++ // pub fn EnumServicesStatusW(); ++ // pub fn EnumerateTraceGuids(); ++ // pub fn EnumerateTraceGuidsEx(); ++ // pub fn EqualDomainSid(); ++ // pub fn EqualPrefixSid(); ++ // pub fn EqualSid(); ++ // pub fn EtwLogSysConfigExtension(); ++ // pub fn EventAccessControl(); ++ // pub fn EventAccessQuery(); ++ // pub fn EventAccessRemove(); ++ // pub fn EventActivityIdControl(); ++ // pub fn EventEnabled(); ++ // pub fn EventProviderEnabled(); ++ // pub fn EventRegister(); ++ // pub fn EventSetInformation(); ++ // pub fn EventUnregister(); ++ // pub fn EventWrite(); ++ // pub fn EventWriteEndScenario(); ++ // pub fn EventWriteEx(); ++ // pub fn EventWriteStartScenario(); ++ // pub fn EventWriteString(); ++ // pub fn EventWriteTransfer(); ++ // pub fn FileEncryptionStatusA(); ++ // pub fn FileEncryptionStatusW(); ++ // pub fn FindFirstFreeAce(); ++ // pub fn FlushEfsCache(); ++ // pub fn FlushTraceA(); ++ // pub fn FlushTraceW(); ++ // pub fn FreeEncryptedFileKeyInfo(); ++ // pub fn FreeEncryptedFileMetadata(); ++ // pub fn FreeEncryptionCertificateHashList(); ++ // pub fn FreeInheritedFromArray(); ++ // pub fn FreeSid(); ++ // pub fn GetAccessPermissionsForObjectA(); ++ // pub fn GetAccessPermissionsForObjectW(); ++ // pub fn GetAce(); ++ // pub fn GetAclInformation(); ++ // pub fn GetAuditedPermissionsFromAclA(); ++ // pub fn GetAuditedPermissionsFromAclW(); ++ pub fn GetCurrentHwProfileA(lpHwProfileInfo: LPHW_PROFILE_INFOA) -> BOOL; ++ pub fn GetCurrentHwProfileW(lpHwProfileInfo: LPHW_PROFILE_INFOW) -> BOOL; ++ // pub fn GetDynamicTimeZoneInformationEffectiveYears(); ++ // pub fn GetEffectiveRightsFromAclA(); ++ // pub fn GetEffectiveRightsFromAclW(); ++ // pub fn GetEncryptedFileMetadata(); ++ // pub fn GetEventLogInformation(); ++ // pub fn GetExplicitEntriesFromAclA(); ++ // pub fn GetExplicitEntriesFromAclW(); ++ // pub fn GetFileSecurityA(); ++ // pub fn GetFileSecurityW(); ++ // pub fn GetInformationCodeAuthzLevelW(); ++ // pub fn GetInformationCodeAuthzPolicyW(); ++ // pub fn GetInheritanceSourceA(); ++ // pub fn GetInheritanceSourceW(); ++ // pub fn GetKernelObjectSecurity(); ++ // pub fn GetLengthSid(); ++ // pub fn GetLocalManagedApplicationData(); ++ // pub fn GetLocalManagedApplications(); ++ // pub fn GetManagedApplicationCategories(); ++ // pub fn GetManagedApplications(); ++ // pub fn GetMultipleTrusteeA(); ++ // pub fn GetMultipleTrusteeOperationA(); ++ // pub fn GetMultipleTrusteeOperationW(); ++ // pub fn GetMultipleTrusteeW(); ++ // pub fn GetNamedSecurityInfoA(); ++ // pub fn GetNamedSecurityInfoExA(); ++ // pub fn GetNamedSecurityInfoExW(); ++ // pub fn GetNamedSecurityInfoW(); ++ // pub fn GetNumberOfEventLogRecords(); ++ // pub fn GetOldestEventLogRecord(); ++ // pub fn GetOverlappedAccessResults(); ++ // pub fn GetPrivateObjectSecurity(); ++ // pub fn GetSecurityDescriptorControl(); ++ // pub fn GetSecurityDescriptorDacl(); ++ // pub fn GetSecurityDescriptorGroup(); ++ // pub fn GetSecurityDescriptorLength(); ++ // pub fn GetSecurityDescriptorOwner(); ++ // pub fn GetSecurityDescriptorRMControl(); ++ // pub fn GetSecurityDescriptorSacl(); ++ // pub fn GetSecurityInfo(); ++ // pub fn GetSecurityInfoExA(); ++ // pub fn GetSecurityInfoExW(); ++ // pub fn GetServiceDisplayNameA(); ++ // pub fn GetServiceDisplayNameW(); ++ // pub fn GetServiceKeyNameA(); ++ // pub fn GetServiceKeyNameW(); ++ // pub fn GetSidIdentifierAuthority(); ++ // pub fn GetSidLengthRequired(); ++ // pub fn GetSidSubAuthority(); ++ // pub fn GetSidSubAuthorityCount(); ++ // pub fn GetStringConditionFromBinary(); ++ // pub fn GetThreadWaitChain(); ++ // pub fn GetTokenInformation(); ++ // pub fn GetTraceEnableFlags(); ++ // pub fn GetTraceEnableLevel(); ++ // pub fn GetTraceLoggerHandle(); ++ // pub fn GetTrusteeFormA(); ++ // pub fn GetTrusteeFormW(); ++ // pub fn GetTrusteeNameA(); ++ // pub fn GetTrusteeNameW(); ++ // pub fn GetTrusteeTypeA(); ++ // pub fn GetTrusteeTypeW(); ++ pub fn GetUserNameA(lpBuffer: LPSTR, pcbBuffer: LPDWORD) -> BOOL; ++ pub fn GetUserNameW(lpBuffer: LPWSTR, pcbBuffer: LPDWORD) -> BOOL; ++ // pub fn GetWindowsAccountDomainSid(); ++ // pub fn I_ScSetServiceBitsA(); ++ // pub fn I_ScSetServiceBitsW(); ++ // pub fn IdentifyCodeAuthzLevelW(); ++ // pub fn ImpersonateAnonymousToken(); ++ // pub fn ImpersonateLoggedOnUser(); ++ // pub fn ImpersonateNamedPipeClient(); ++ // pub fn ImpersonateSelf(); ++ // pub fn InitializeAcl(); ++ // pub fn InitializeSecurityDescriptor(); ++ // pub fn InitializeSid(); ++ // pub fn InitiateShutdownA(); ++ // pub fn InitiateShutdownW(); ++ // pub fn InitiateSystemShutdownA(); ++ // pub fn InitiateSystemShutdownExA(); ++ // pub fn InitiateSystemShutdownExW(); ++ // pub fn InitiateSystemShutdownW(); ++ // pub fn InstallApplication(); ++ // pub fn IsTextUnicode(); ++ // pub fn IsTokenRestricted(); ++ // pub fn IsTokenUntrusted(); ++ // pub fn IsValidAcl(); ++ // pub fn IsValidRelativeSecurityDescriptor(); ++ // pub fn IsValidSecurityDescriptor(); ++ // pub fn IsValidSid(); ++ // pub fn IsWellKnownSid(); ++ // pub fn LockServiceDatabase(); ++ // pub fn LogonUserA(); ++ // pub fn LogonUserExA(); ++ // pub fn LogonUserExExW(); ++ // pub fn LogonUserExW(); ++ // pub fn LogonUserW(); ++ // pub fn LookupAccountNameA(); ++ // pub fn LookupAccountNameW(); ++ // pub fn LookupAccountSidA(); ++ // pub fn LookupAccountSidW(); ++ // pub fn LookupPrivilegeDisplayNameA(); ++ // pub fn LookupPrivilegeDisplayNameW(); ++ pub fn LookupPrivilegeNameA( ++ lpSystemName: LPCSTR, lpLuid: PLUID, lpName: LPSTR, cchName: LPDWORD, ++ ) -> BOOL; ++ pub fn LookupPrivilegeNameW( ++ lpSystemName: LPCWSTR, lpLuid: PLUID, lpName: LPWSTR, cchName: LPDWORD, ++ ) -> BOOL; ++ pub fn LookupPrivilegeValueA( ++ lpSystemName: LPCSTR, lpName: LPCSTR, lpLuid: PLUID, ++ ) -> BOOL; ++ pub fn LookupPrivilegeValueW( ++ lpSystemName: LPCWSTR, lpName: LPCWSTR, lpLuid: PLUID, ++ ) -> BOOL; ++ // pub fn LookupSecurityDescriptorPartsA(); ++ // pub fn LookupSecurityDescriptorPartsW(); ++ // pub fn LsaAddAccountRights(); ++ // pub fn LsaAddPrivilegesToAccount(); ++ // pub fn LsaClearAuditLog(); ++ // pub fn LsaClose(); ++ // pub fn LsaCreateAccount(); ++ // pub fn LsaCreateSecret(); ++ // pub fn LsaCreateTrustedDomain(); ++ // pub fn LsaCreateTrustedDomainEx(); ++ // pub fn LsaDelete(); ++ // pub fn LsaDeleteTrustedDomain(); ++ // pub fn LsaEnumerateAccountRights(); ++ // pub fn LsaEnumerateAccounts(); ++ // pub fn LsaEnumerateAccountsWithUserRight(); ++ // pub fn LsaEnumeratePrivileges(); ++ // pub fn LsaEnumeratePrivilegesOfAccount(); ++ // pub fn LsaEnumerateTrustedDomains(); ++ // pub fn LsaEnumerateTrustedDomainsEx(); ++ // pub fn LsaFreeMemory(); ++ // pub fn LsaGetAppliedCAPIDs(); ++ // pub fn LsaGetQuotasForAccount(); ++ // pub fn LsaGetRemoteUserName(); ++ // pub fn LsaGetSystemAccessAccount(); ++ // pub fn LsaGetUserName(); ++ // pub fn LsaICLookupNames(); ++ // pub fn LsaICLookupNamesWithCreds(); ++ // pub fn LsaICLookupSids(); ++ // pub fn LsaICLookupSidsWithCreds(); ++ // pub fn LsaLookupNames(); ++ // pub fn LsaLookupNames2(); ++ // pub fn LsaLookupPrivilegeDisplayName(); ++ // pub fn LsaLookupPrivilegeName(); ++ // pub fn LsaLookupPrivilegeValue(); ++ // pub fn LsaLookupSids(); ++ // pub fn LsaLookupSids2(); ++ // pub fn LsaManageSidNameMapping(); ++ // pub fn LsaNtStatusToWinError(); ++ // pub fn LsaOpenAccount(); ++ // pub fn LsaOpenPolicy(); ++ // pub fn LsaOpenPolicySce(); ++ // pub fn LsaOpenSecret(); ++ // pub fn LsaOpenTrustedDomain(); ++ // pub fn LsaOpenTrustedDomainByName(); ++ // pub fn LsaQueryCAPs(); ++ // pub fn LsaQueryDomainInformationPolicy(); ++ // pub fn LsaQueryForestTrustInformation(); ++ // pub fn LsaQueryInfoTrustedDomain(); ++ // pub fn LsaQueryInformationPolicy(); ++ // pub fn LsaQuerySecret(); ++ // pub fn LsaQuerySecurityObject(); ++ // pub fn LsaQueryTrustedDomainInfo(); ++ // pub fn LsaQueryTrustedDomainInfoByName(); ++ // pub fn LsaRemoveAccountRights(); ++ // pub fn LsaRemovePrivilegesFromAccount(); ++ // pub fn LsaRetrievePrivateData(); ++ // pub fn LsaSetCAPs(); ++ // pub fn LsaSetDomainInformationPolicy(); ++ // pub fn LsaSetForestTrustInformation(); ++ // pub fn LsaSetInformationPolicy(); ++ // pub fn LsaSetInformationTrustedDomain(); ++ // pub fn LsaSetQuotasForAccount(); ++ // pub fn LsaSetSecret(); ++ // pub fn LsaSetSecurityObject(); ++ // pub fn LsaSetSystemAccessAccount(); ++ // pub fn LsaSetTrustedDomainInfoByName(); ++ // pub fn LsaSetTrustedDomainInformation(); ++ // pub fn LsaStorePrivateData(); ++ // pub fn MIDL_user_free_Ext(); ++ // pub fn MSChapSrvChangePassword(); ++ // pub fn MSChapSrvChangePassword2(); ++ // pub fn MakeAbsoluteSD(); ++ // pub fn MakeAbsoluteSD2(); ++ // pub fn MakeSelfRelativeSD(); ++ // pub fn MapGenericMask(); ++ // pub fn NotifyBootConfigStatus(); ++ // pub fn NotifyChangeEventLog(); ++ // pub fn NotifyServiceStatusChange(); ++ // pub fn NotifyServiceStatusChangeA(); ++ // pub fn NotifyServiceStatusChangeW(); ++ // pub fn ObjectCloseAuditAlarmA(); ++ // pub fn ObjectCloseAuditAlarmW(); ++ // pub fn ObjectDeleteAuditAlarmA(); ++ // pub fn ObjectDeleteAuditAlarmW(); ++ // pub fn ObjectOpenAuditAlarmA(); ++ // pub fn ObjectOpenAuditAlarmW(); ++ // pub fn ObjectPrivilegeAuditAlarmA(); ++ // pub fn ObjectPrivilegeAuditAlarmW(); ++ // pub fn OpenBackupEventLogA(); ++ // pub fn OpenBackupEventLogW(); ++ // pub fn OpenEncryptedFileRawA(); ++ // pub fn OpenEncryptedFileRawW(); ++ // pub fn OpenEventLogA(); ++ // pub fn OpenEventLogW(); ++ pub fn OpenProcessToken( ++ ProcessHandle: HANDLE, DesiredAccess: DWORD, TokenHandle: PHANDLE, ++ ) -> BOOL; ++ pub fn OpenSCManagerA( ++ lpMachineName: LPCSTR, lpDatabaseName: LPCSTR, dwDesiredAccess: DWORD, ++ ) -> SC_HANDLE; ++ pub fn OpenSCManagerW( ++ lpMachineName: LPCWSTR, lpDatabaseName: LPCWSTR, dwDesiredAccess: DWORD, ++ ) -> SC_HANDLE; ++ pub fn OpenServiceA( ++ hSCManager: SC_HANDLE, lpServiceName: LPCSTR, dwDesiredAccess: DWORD, ++ ) -> SC_HANDLE; ++ pub fn OpenServiceW( ++ hSCManager: SC_HANDLE, lpServiceName: LPCWSTR, dwDesiredAccess: DWORD, ++ ) -> SC_HANDLE; ++ // pub fn OpenThreadToken(); ++ // pub fn OpenThreadWaitChainSession(); ++ // pub fn OpenTraceA(); ++ // pub fn OpenTraceW(); ++ // pub fn OperationEnd(); ++ // pub fn OperationStart(); ++ // pub fn PerfAddCounters(); ++ // pub fn PerfCloseQueryHandle(); ++ // pub fn PerfCreateInstance(); ++ // pub fn PerfDecrementULongCounterValue(); ++ // pub fn PerfDecrementULongLongCounterValue(); ++ // pub fn PerfDeleteCounters(); ++ // pub fn PerfDeleteInstance(); ++ // pub fn PerfEnumerateCounterSet(); ++ // pub fn PerfEnumerateCounterSetInstances(); ++ // pub fn PerfIncrementULongCounterValue(); ++ // pub fn PerfIncrementULongLongCounterValue(); ++ // pub fn PerfOpenQueryHandle(); ++ // pub fn PerfQueryCounterData(); ++ // pub fn PerfQueryCounterInfo(); ++ // pub fn PerfQueryCounterSetRegistrationInfo(); ++ // pub fn PerfQueryInstance(); ++ // pub fn PerfRegCloseKey(); ++ // pub fn PerfRegEnumKey(); ++ // pub fn PerfRegEnumValue(); ++ // pub fn PerfRegQueryInfoKey(); ++ // pub fn PerfRegQueryValue(); ++ // pub fn PerfRegSetValue(); ++ // pub fn PerfSetCounterRefValue(); ++ // pub fn PerfSetCounterSetInfo(); ++ // pub fn PerfSetULongCounterValue(); ++ // pub fn PerfSetULongLongCounterValue(); ++ // pub fn PerfStartProvider(); ++ // pub fn PerfStartProviderEx(); ++ // pub fn PerfStopProvider(); ++ // pub fn PrivilegeCheck(); ++ // pub fn PrivilegedServiceAuditAlarmA(); ++ // pub fn PrivilegedServiceAuditAlarmW(); ++ // pub fn ProcessTrace(); ++ // pub fn QueryAllTracesA(); ++ // pub fn QueryAllTracesW(); ++ // pub fn QueryRecoveryAgentsOnEncryptedFile(); ++ // pub fn QuerySecurityAccessMask(); ++ // pub fn QueryServiceConfig2A(); ++ // pub fn QueryServiceConfig2W(); ++ // pub fn QueryServiceConfigA(); ++ // pub fn QueryServiceConfigW(); ++ // pub fn QueryServiceDynamicInformation(); ++ // pub fn QueryServiceLockStatusA(); ++ // pub fn QueryServiceLockStatusW(); ++ // pub fn QueryServiceObjectSecurity(); ++ pub fn QueryServiceStatus(hService: SC_HANDLE, lpServiceStatus: LPSERVICE_STATUS) -> BOOL; ++ pub fn QueryServiceStatusEx( ++ hService: SC_HANDLE, InfoLevel: SC_STATUS_TYPE, lpBuffer: LPBYTE, cbBufSize: DWORD, ++ pcbBytesNeeded: LPDWORD, ++ ) -> BOOL; ++ // pub fn QueryTraceA(); ++ // pub fn QueryTraceW(); ++ // pub fn QueryUsersOnEncryptedFile(); ++ // pub fn ReadEncryptedFileRaw(); ++ // pub fn ReadEventLogA(); ++ // pub fn ReadEventLogW(); ++ pub fn RegCloseKey(hKey: HKEY) -> LONG; ++ pub fn RegConnectRegistryA(lpMachineName: LPCSTR, hKey: HKEY, phkResult: PHKEY) -> LONG; ++ // pub fn RegConnectRegistryExA(); ++ // pub fn RegConnectRegistryExW(); ++ pub fn RegConnectRegistryW(lpMachineName: LPCWSTR, hKey: HKEY, phkResult: PHKEY) -> LONG; ++ pub fn RegCopyTreeA(hKeySrc: HKEY, lpSubKey: LPCSTR, hKeyDest: HKEY) -> LONG; ++ pub fn RegCopyTreeW(hKeySrc: HKEY, lpSubKey: LPCWSTR, hKeyDest: HKEY) -> LONG; ++ // pub fn RegCreateKeyA(); ++ pub fn RegCreateKeyExA( ++ hKey: HKEY, lpSubKey: LPCSTR, Reserved: DWORD, lpClass: LPSTR, dwOptions: DWORD, ++ samDesired: REGSAM, lpSecurityAttributes: LPSECURITY_ATTRIBUTES, phkResult: PHKEY, ++ lpdwDisposition: LPDWORD, ++ ) -> LONG; ++ pub fn RegCreateKeyExW( ++ hKey: HKEY, lpSubKey: LPCWSTR, Reserved: DWORD, lpClass: LPWSTR, dwOptions: DWORD, ++ samDesired: REGSAM, lpSecurityAttributes: LPSECURITY_ATTRIBUTES, phkResult: PHKEY, ++ lpdwDisposition: LPDWORD, ++ ) -> LONG; ++ pub fn RegCreateKeyTransactedA( ++ hKey: HKEY, lpSubKey: LPCSTR, Reserved: DWORD, lpClass: LPSTR, dwOptions: DWORD, ++ samDesired: REGSAM, lpSecurityAttributes: LPSECURITY_ATTRIBUTES, phkResult: PHKEY, ++ lpdwDisposition: LPDWORD, hTransaction: HANDLE, pExtendedParemeter: PVOID, ++ ) -> LONG; ++ pub fn RegCreateKeyTransactedW( ++ hKey: HKEY, lpSubKey: LPCWSTR, Reserved: DWORD, lpClass: LPWSTR, dwOptions: DWORD, ++ samDesired: REGSAM, lpSecurityAttributes: LPSECURITY_ATTRIBUTES, phkResult: PHKEY, ++ lpdwDisposition: LPDWORD, hTransaction: HANDLE, pExtendedParemeter: PVOID, ++ ) -> LONG; ++ // pub fn RegCreateKeyW(); ++ pub fn RegDeleteKeyA(hKey: HKEY, lpSubKey: LPCSTR) -> LONG; ++ pub fn RegDeleteKeyExA( ++ hKey: HKEY, lpSubKey: LPCSTR, samDesired: REGSAM, Reserved: DWORD, ++ ) -> LONG; ++ pub fn RegDeleteKeyExW( ++ hKey: HKEY, lpSubKey: LPCWSTR, samDesired: REGSAM, Reserved: DWORD, ++ ) -> LONG; ++ pub fn RegDeleteKeyTransactedA( ++ hKey: HKEY, lpSubKey: LPCSTR, samDesired: REGSAM, Reserved: DWORD, ++ hTransaction: HANDLE, pExtendedParemeter: PVOID, ++ ) -> LONG; ++ pub fn RegDeleteKeyTransactedW( ++ hKey: HKEY, lpSubKey: LPCWSTR, samDesired: REGSAM, Reserved: DWORD, ++ hTransaction: HANDLE, pExtendedParemeter: PVOID, ++ ) -> LONG; ++ pub fn RegDeleteKeyValueA(hKey: HKEY, lpSubKey: LPCSTR, lpValueName: LPCSTR) -> LONG; ++ pub fn RegDeleteKeyValueW(hKey: HKEY, lpSubKey: LPCWSTR, lpValueName: LPCWSTR) -> LONG; ++ pub fn RegDeleteKeyW(hKey: HKEY, lpSubKey: LPCWSTR) -> LONG; ++ pub fn RegDeleteTreeA(hKey: HKEY, lpSubKey: LPCSTR) -> LONG; ++ pub fn RegDeleteTreeW(hKey: HKEY, lpSubKey: LPCWSTR) -> LONG; ++ pub fn RegDeleteValueA(hKey: HKEY, lpValueName: LPCSTR) -> LONG; ++ pub fn RegDeleteValueW(hKey: HKEY, lpValueName: LPCWSTR) -> LONG; ++ pub fn RegDisablePredefinedCache() -> LONG; ++ pub fn RegDisablePredefinedCacheEx() -> LONG; ++ pub fn RegDisableReflectionKey(hBase: HKEY) -> LONG; ++ pub fn RegEnableReflectionKey(hBase: HKEY) -> LONG; ++ // pub fn RegEnumKeyA(); ++ pub fn RegEnumKeyExA( ++ hKey: HKEY, dwIndex: DWORD, lpName: LPSTR, lpcName: LPDWORD, lpReserved: LPDWORD, ++ lpClass: LPSTR, lpcClass: LPDWORD, lpftLastWriteTime: PFILETIME, ++ ) -> LONG; ++ pub fn RegEnumKeyExW( ++ hKey: HKEY, dwIndex: DWORD, lpName: LPWSTR, lpcName: LPDWORD, lpReserved: LPDWORD, ++ lpClass: LPWSTR, lpcClass: LPDWORD, lpftLastWriteTime: PFILETIME, ++ ) -> LONG; ++ // pub fn RegEnumKeyW(); ++ pub fn RegEnumValueA( ++ hKey: HKEY, dwIndex: DWORD, lpValueName: LPSTR, lpcchValueName: LPDWORD, ++ lpReserved: LPDWORD, lpType: LPDWORD, lpData: LPBYTE, lpcbData: LPDWORD, ++ ) -> LONG; ++ pub fn RegEnumValueW( ++ hKey: HKEY, dwIndex: DWORD, lpValueName: LPWSTR, lpcchValueName: LPDWORD, ++ lpReserved: LPDWORD, lpType: LPDWORD, lpData: LPBYTE, lpcbData: LPDWORD, ++ ) -> LONG; ++ pub fn RegFlushKey(hKey: HKEY) -> LONG; ++ // pub fn RegGetKeySecurity(); ++ pub fn RegGetValueA( ++ hkey: HKEY, lpSubKey: LPCSTR, lpValue: LPCSTR, dwFlags: DWORD, pdwType: LPDWORD, ++ pvData: PVOID, pcbData: LPDWORD, ++ ) -> LONG; ++ pub fn RegGetValueW( ++ hkey: HKEY, lpSubKey: LPCWSTR, lpValue: LPCWSTR, dwFlags: DWORD, pdwType: LPDWORD, ++ pvData: PVOID, pcbData: LPDWORD, ++ ) -> LONG; ++ // pub fn RegLoadAppKeyA(); ++ // pub fn RegLoadAppKeyW(); ++ // pub fn RegLoadKeyA(); ++ // pub fn RegLoadKeyW(); ++ // pub fn RegLoadMUIStringA(); ++ pub fn RegLoadMUIStringW( ++ hKey: HKEY, pszValue: LPCWSTR, pszOutBuf: LPWSTR, cbOutBuf: DWORD, pcbData: LPDWORD, ++ Flags: DWORD, pszDirectory: LPCWSTR, ++ ) -> LONG; ++ pub fn RegNotifyChangeKeyValue( ++ hKey: HKEY, bWatchSubtree: BOOL, dwNotifyFilter: DWORD, hEvent: HANDLE, ++ fAsynchronous: BOOL, ++ ) -> LONG; ++ pub fn RegOpenCurrentUser(samDesired: REGSAM, phkResult: PHKEY) -> LONG; ++ // pub fn RegOpenKeyA(); ++ pub fn RegOpenKeyExA( ++ hKey: HKEY, lpSubKey: LPCSTR, ulOptions: DWORD, samDesired: REGSAM, phkResult: PHKEY, ++ ) -> LONG; ++ pub fn RegOpenKeyExW( ++ hKey: HKEY, lpSubKey: LPCWSTR, ulOptions: DWORD, samDesired: REGSAM, phkResult: PHKEY, ++ ) -> LONG; ++ pub fn RegOpenKeyTransactedA( ++ hKey: HKEY, lpSubKey: LPCSTR, ulOptions: DWORD, samDesired: REGSAM, phkResult: PHKEY, ++ hTransaction: HANDLE, pExtendedParemeter: PVOID, ++ ) -> LONG; ++ pub fn RegOpenKeyTransactedW( ++ hKey: HKEY, lpSubKey: LPCWSTR, ulOptions: DWORD, samDesired: REGSAM, phkResult: PHKEY, ++ hTransaction: HANDLE, pExtendedParemeter: PVOID, ++ ) -> LONG; ++ // pub fn RegOpenKeyW(); ++ pub fn RegOpenUserClassesRoot( ++ hToken: HANDLE, dwOptions: DWORD, samDesired: REGSAM, phkResult: PHKEY, ++ ) -> LONG; ++ pub fn RegOverridePredefKey(hKey: HKEY, hNewHKey: HKEY) -> LONG; ++ pub fn RegQueryInfoKeyA( ++ hKey: HKEY, lpClass: LPSTR, lpcClass: LPDWORD, lpReserved: LPDWORD, lpcSubKeys: LPDWORD, ++ lpcMaxSubKeyLen: LPDWORD, lpcMaxClassLen: LPDWORD, lpcValues: LPDWORD, ++ lpcMaxValueNameLen: LPDWORD, lpcMaxValueLen: LPDWORD, lpcbSecurityDescriptor: LPDWORD, ++ lpftLastWriteTime: PFILETIME, ++ ) -> LONG; ++ pub fn RegQueryInfoKeyW( ++ hKey: HKEY, lpClass: LPWSTR, lpcClass: LPDWORD, lpReserved: LPDWORD, lpcSubKeys: LPDWORD, ++ lpcMaxSubKeyLen: LPDWORD, lpcMaxClassLen: LPDWORD, lpcValues: LPDWORD, ++ lpcMaxValueNameLen: LPDWORD, lpcMaxValueLen: LPDWORD, lpcbSecurityDescriptor: LPDWORD, ++ lpftLastWriteTime: PFILETIME, ++ ) -> LONG; ++ pub fn RegQueryMultipleValuesA( ++ hKey: HKEY, val_list: PVALENTA, num_vals: DWORD, lpValueBuf: LPSTR, ldwTotsize: LPDWORD, ++ ) -> LONG; ++ pub fn RegQueryMultipleValuesW( ++ hKey: HKEY, val_list: PVALENTW, num_vals: DWORD, lpValueBuf: LPWSTR, ldwTotsize: LPDWORD, ++ ) -> LONG; ++ pub fn RegQueryReflectionKey(hBase: HKEY, bIsReflectionDisabled: PBOOL) -> LONG; ++ pub fn RegQueryValueExA( ++ hKey: HKEY, lpValueName: LPCSTR, lpReserved: LPDWORD, lpType: LPDWORD, lpData: LPBYTE, ++ lpcbData: LPDWORD, ++ ) -> LONG; ++ pub fn RegQueryValueExW( ++ hKey: HKEY, lpValueName: LPCWSTR, lpReserved: LPDWORD, lpType: LPDWORD, lpData: LPBYTE, ++ lpcbData: LPDWORD, ++ ) -> LONG; ++ // pub fn RegQueryValueW(); ++ // pub fn RegRenameKey(); ++ // pub fn RegReplaceKeyA(); ++ // pub fn RegReplaceKeyW(); ++ // pub fn RegRestoreKeyA(); ++ // pub fn RegRestoreKeyW(); ++ // pub fn RegSaveKeyA(); ++ // pub fn RegSaveKeyExA(); ++ // pub fn RegSaveKeyExW(); ++ // pub fn RegSaveKeyW(); ++ // pub fn RegSetKeySecurity(); ++ pub fn RegSetKeyValueA( ++ hKey: HKEY, lpSubKey: LPCSTR, lpValueName: LPCSTR, dwType: DWORD, lpData: LPCVOID, ++ cbData: DWORD, ++ ) -> LONG; ++ pub fn RegSetValueExA( ++ hKey: HKEY, lpValueName: LPCSTR, Reserved: DWORD, dwType: DWORD, lpData: *const BYTE, ++ cbData: DWORD, ++ ) -> LONG; ++ pub fn RegSetValueExW( ++ hKey: HKEY, lpValueName: LPCWSTR, Reserved: DWORD, dwType: DWORD, lpData: *const BYTE, ++ cbData: DWORD, ++ ) -> LONG; ++ pub fn RegSetKeyValueW( ++ hKey: HKEY, lpSubKey: LPCWSTR, lpValueName: LPCWSTR, dwType: DWORD, lpData: LPCVOID, ++ cbData: DWORD, ++ ) -> LONG; ++ // pub fn RegUnLoadKeyA(); ++ // pub fn RegUnLoadKeyW(); ++ // pub fn RegisterEventSourceA(); ++ // pub fn RegisterEventSourceW(); ++ pub fn RegisterServiceCtrlHandlerA( ++ lpServiceName: LPCSTR, lpHandlerProc: LPHANDLER_FUNCTION, ++ ) -> SERVICE_STATUS_HANDLE; ++ pub fn RegisterServiceCtrlHandlerExA( ++ lpServiceName: LPCSTR, lpHandlerProc: LPHANDLER_FUNCTION_EX, lpContext: LPVOID, ++ ) -> SERVICE_STATUS_HANDLE; ++ pub fn RegisterServiceCtrlHandlerExW( ++ lpServiceName: LPCWSTR, lpHandlerProc: LPHANDLER_FUNCTION_EX, lpContext: LPVOID, ++ ) -> SERVICE_STATUS_HANDLE; ++ pub fn RegisterServiceCtrlHandlerW( ++ lpServiceName: LPCWSTR, lpHandlerProc: LPHANDLER_FUNCTION, ++ ) -> SERVICE_STATUS_HANDLE; ++ // pub fn RegisterTraceGuidsA(); ++ // pub fn RegisterTraceGuidsW(); ++ // pub fn RegisterWaitChainCOMCallback(); ++ // pub fn RemoteRegEnumKeyWrapper(); ++ // pub fn RemoteRegEnumValueWrapper(); ++ // pub fn RemoteRegQueryInfoKeyWrapper(); ++ // pub fn RemoteRegQueryValueWrapper(); ++ // pub fn RemoveTraceCallback(); ++ // pub fn RemoveUsersFromEncryptedFile(); ++ // pub fn ReportEventA(); ++ // pub fn ReportEventW(); ++ // pub fn RevertToSelf(); ++ // pub fn SafeBaseRegGetKeySecurity(); ++ // pub fn SaferCloseLevel(); ++ // pub fn SaferComputeTokenFromLevel(); ++ // pub fn SaferCreateLevel(); ++ // pub fn SaferGetLevelInformation(); ++ // pub fn SaferGetPolicyInformation(); ++ // pub fn SaferIdentifyLevel(); ++ // pub fn SaferRecordEventLogEntry(); ++ // pub fn SaferSetLevelInformation(); ++ // pub fn SaferSetPolicyInformation(); ++ // pub fn SaferiIsExecutableFileType(); ++ // pub fn SetAclInformation(); ++ // pub fn SetEncryptedFileMetadata(); ++ // pub fn SetEntriesInAccessListA(); ++ // pub fn SetEntriesInAccessListW(); ++ // pub fn SetEntriesInAclA(); ++ // pub fn SetEntriesInAclW(); ++ // pub fn SetEntriesInAuditListA(); ++ // pub fn SetEntriesInAuditListW(); ++ // pub fn SetFileSecurityA(); ++ // pub fn SetFileSecurityW(); ++ // pub fn SetInformationCodeAuthzLevelW(); ++ // pub fn SetInformationCodeAuthzPolicyW(); ++ // pub fn SetKernelObjectSecurity(); ++ // pub fn SetNamedSecurityInfoA(); ++ // pub fn SetNamedSecurityInfoExA(); ++ // pub fn SetNamedSecurityInfoExW(); ++ // pub fn SetNamedSecurityInfoW(); ++ // pub fn SetPrivateObjectSecurity(); ++ // pub fn SetPrivateObjectSecurityEx(); ++ // pub fn SetSecurityAccessMask(); ++ // pub fn SetSecurityDescriptorControl(); ++ // pub fn SetSecurityDescriptorDacl(); ++ // pub fn SetSecurityDescriptorGroup(); ++ // pub fn SetSecurityDescriptorOwner(); ++ // pub fn SetSecurityDescriptorRMControl(); ++ // pub fn SetSecurityDescriptorSacl(); ++ // pub fn SetSecurityInfo(); ++ // pub fn SetSecurityInfoExA(); ++ // pub fn SetSecurityInfoExW(); ++ // pub fn SetServiceBits(); ++ // pub fn SetServiceObjectSecurity(); ++ pub fn SetServiceStatus( ++ hServiceStatus: SERVICE_STATUS_HANDLE, lpServiceStatus: LPSERVICE_STATUS, ++ ) -> BOOL; ++ // pub fn SetThreadToken(); ++ // pub fn SetTokenInformation(); ++ // pub fn SetTraceCallback(); ++ // pub fn SetUserFileEncryptionKey(); ++ // pub fn SetUserFileEncryptionKeyEx(); ++ // pub fn StartServiceA(); ++ pub fn StartServiceCtrlDispatcherA(lpServiceStartTable: *const SERVICE_TABLE_ENTRYA) -> BOOL; ++ pub fn StartServiceCtrlDispatcherW(lpServiceStartTable: *const SERVICE_TABLE_ENTRYW) -> BOOL; ++ // pub fn StartServiceW(); ++ // pub fn StartTraceA(); ++ // pub fn StartTraceW(); ++ // pub fn StopTraceA(); ++ // pub fn StopTraceW(); ++ // pub fn SystemFunction001(); ++ // pub fn SystemFunction002(); ++ // pub fn SystemFunction003(); ++ // pub fn SystemFunction004(); ++ // pub fn SystemFunction005(); ++ // pub fn SystemFunction006(); ++ // pub fn SystemFunction007(); ++ // pub fn SystemFunction008(); ++ // pub fn SystemFunction009(); ++ // pub fn SystemFunction010(); ++ // pub fn SystemFunction011(); ++ // pub fn SystemFunction012(); ++ // pub fn SystemFunction013(); ++ // pub fn SystemFunction014(); ++ // pub fn SystemFunction015(); ++ // pub fn SystemFunction016(); ++ // pub fn SystemFunction017(); ++ // pub fn SystemFunction018(); ++ // pub fn SystemFunction019(); ++ // pub fn SystemFunction020(); ++ // pub fn SystemFunction021(); ++ // pub fn SystemFunction022(); ++ // pub fn SystemFunction023(); ++ // pub fn SystemFunction024(); ++ // pub fn SystemFunction025(); ++ // pub fn SystemFunction026(); ++ // pub fn SystemFunction027(); ++ // pub fn SystemFunction028(); ++ // pub fn SystemFunction029(); ++ // pub fn SystemFunction030(); ++ // pub fn SystemFunction031(); ++ // pub fn SystemFunction032(); ++ // pub fn SystemFunction033(); ++ // pub fn SystemFunction034(); ++ // pub fn SystemFunction036(); ++ // pub fn SystemFunction040(); ++ // pub fn SystemFunction041(); ++ // pub fn TraceEvent(); ++ // pub fn TraceEventInstance(); ++ // pub fn TraceMessage(); ++ // pub fn TraceMessageVa(); ++ // pub fn TraceQueryInformation(); ++ // pub fn TraceSetInformation(); ++ // pub fn TreeResetNamedSecurityInfoA(); ++ // pub fn TreeResetNamedSecurityInfoW(); ++ // pub fn TreeSetNamedSecurityInfoA(); ++ // pub fn TreeSetNamedSecurityInfoW(); ++ // pub fn TrusteeAccessToObjectA(); ++ // pub fn TrusteeAccessToObjectW(); ++ // pub fn UninstallApplication(); ++ // pub fn UnlockServiceDatabase(); ++ // pub fn UnregisterTraceGuids(); ++ // pub fn UpdateTraceA(); ++ // pub fn UpdateTraceW(); ++ // pub fn UsePinForEncryptedFilesA(); ++ // pub fn UsePinForEncryptedFilesW(); ++ // pub fn WaitServiceState(); ++ // pub fn WriteEncryptedFileRaw(); ++} diff --cc vendor/backtrace-sys-0.1.12/.cargo-checksum.json index 000000000,000000000..625e4951d new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/.cargo-checksum.json @@@ -1,0 -1,0 +1,1 @@@ ++{"files":{},"package":"afccc5772ba333abccdf60d55200fa3406f8c59dcf54d5f7998c9107d3799c7c"} diff --cc vendor/backtrace-sys-0.1.12/.cargo-ok index 000000000,000000000..e69de29bb new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/.cargo-ok diff --cc vendor/backtrace-sys-0.1.12/Cargo.toml index 000000000,000000000..95bfc39bd new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/Cargo.toml @@@ -1,0 -1,0 +1,26 @@@ ++# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO ++# ++# When uploading crates to the registry Cargo will automatically ++# "normalize" Cargo.toml files for maximal compatibility ++# with all versions of Cargo and also rewrite `path` dependencies ++# to registry (e.g. crates.io) dependencies ++# ++# If you believe there's an error in this file please file an ++# issue against the rust-lang/cargo repository. If you're ++# editing this file be aware that the upstream Cargo.toml ++# will likely look very different (and much more reasonable) ++ ++[package] ++name = "backtrace-sys" ++version = "0.1.12" ++authors = ["Alex Crichton "] ++build = "build.rs" ++description = "Bindings to the libbacktrace gcc library\n" ++homepage = "https://github.com/alexcrichton/backtrace-rs" ++documentation = "http://alexcrichton.com/backtrace-rs" ++license = "MIT/Apache-2.0" ++repository = "https://github.com/alexcrichton/backtrace-rs" ++[dependencies.libc] ++version = "0.2" ++[build-dependencies.gcc] ++version = "0.3" diff --cc vendor/backtrace-sys-0.1.12/build.rs index 000000000,000000000..95f89e1b9 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/build.rs @@@ -1,0 -1,0 +1,171 @@@ ++extern crate gcc; ++ ++use std::env; ++use std::ffi::OsString; ++use std::fs; ++use std::io; ++use std::path::PathBuf; ++use std::process::Command; ++ ++macro_rules! t { ++ ($e:expr) => (match $e { ++ Ok(e) => e, ++ Err(e) => panic!("{} failed with {}", stringify!($e), e), ++ }) ++} ++ ++fn try_tool(compiler: &gcc::Tool, cc: &str, compiler_suffix: &str, tool_suffix: &str) ++ -> Option { ++ if !cc.ends_with(compiler_suffix) { ++ return None ++ } ++ let cc = cc.replace(compiler_suffix, tool_suffix); ++ let candidate = compiler.path().parent().unwrap().join(cc); ++ if Command::new(&candidate).output().is_ok() { ++ Some(candidate) ++ } else { ++ None ++ } ++} ++ ++fn find_tool(compiler: &gcc::Tool, cc: &str, tool: &str) -> PathBuf { ++ // Allow overrides via env var ++ if let Some(s) = env::var_os(tool.to_uppercase()) { ++ return s.into() ++ } ++ let tool_suffix = format!("-{}", tool); ++ try_tool(compiler, cc, "-gcc", &tool_suffix) ++ .or_else(|| try_tool(compiler, cc, "-clang", &tool_suffix)) ++ .or_else(|| try_tool(compiler, cc, "-cc", &tool_suffix)) ++ .unwrap_or_else(|| PathBuf::from(tool)) ++} ++ ++fn main() { ++ let src = env::current_dir().unwrap(); ++ let dst = PathBuf::from(env::var_os("OUT_DIR").unwrap()); ++ let target = env::var("TARGET").unwrap(); ++ let host = env::var("HOST").unwrap(); ++ ++ // libbacktrace doesn't currently support Mach-O files ++ if target.contains("darwin") { ++ return ++ } ++ ++ // libbacktrace isn't used on windows ++ if target.contains("windows") { ++ return ++ } ++ ++ // no way this will ever compile for emscripten ++ if target.contains("emscripten") { ++ return ++ } ++ ++ let mut make = "make"; ++ ++ // host BSDs has GNU-make as gmake ++ if host.contains("bitrig") || host.contains("dragonfly") || ++ host.contains("freebsd") || host.contains("netbsd") || ++ host.contains("openbsd") { ++ ++ make = "gmake" ++ } ++ ++ let configure = src.join("src/libbacktrace/configure").into_os_string(); ++ ++ // When cross-compiling on Windows, this path will contain backslashes, ++ // but configure doesn't like that. Replace them with forward slashes. ++ #[cfg(windows)] ++ let configure = { ++ use std::os::windows::ffi::{OsStrExt, OsStringExt}; ++ let mut chars: Vec = configure.encode_wide().collect(); ++ for c in chars.iter_mut() { ++ if *c == '\\' as u16 { ++ *c = '/' as u16; ++ } ++ } ++ OsString::from_wide(&chars) ++ }; ++ ++ let cfg = gcc::Config::new(); ++ let compiler = cfg.get_compiler(); ++ let cc = compiler.path().file_name().unwrap().to_str().unwrap(); ++ let mut flags = OsString::new(); ++ for (i, flag) in compiler.args().iter().enumerate() { ++ if i > 0 { ++ flags.push(" "); ++ } ++ flags.push(flag); ++ } ++ let ar = find_tool(&compiler, cc, "ar"); ++ let mut cmd = Command::new("sh"); ++ ++ cmd.arg(configure) ++ .current_dir(&dst) ++ .env("AR", &ar) ++ .env("CC", compiler.path()) ++ .env("CFLAGS", flags) ++ .arg("--with-pic") ++ .arg("--disable-multilib") ++ .arg("--disable-shared") ++ .arg("--disable-host-shared") ++ .arg(format!("--host={}", target)); ++ ++ // Apparently passing this flag causes problems on Windows ++ if !host.contains("windows") { ++ cmd.arg(format!("--build={}", host)); ++ } ++ ++ run(&mut cmd, "sh"); ++ run(Command::new(make) ++ .current_dir(&dst) ++ .arg(format!("INCDIR={}", ++ src.join("src/libbacktrace").display())), ++ "make"); ++ println!("cargo:rustc-link-search=native={}/.libs", dst.display()); ++ println!("cargo:rustc-link-lib=static=backtrace"); ++ ++ // The standard library currently bundles in libbacktrace, but it's ++ // compiled with hidden visibility (naturally) so we can't use it. ++ // ++ // To prevent conflicts with a second statically linked copy we rename all ++ // symbols with a '__rbt_' prefix manually here through `objcopy`. ++ let lib = dst.join(".libs/libbacktrace.a"); ++ let tmpdir = dst.join("__tmp"); ++ drop(fs::remove_dir_all(&tmpdir)); ++ t!(fs::create_dir_all(&tmpdir)); ++ run(Command::new(&ar).arg("x").arg(&lib).current_dir(&tmpdir), ++ ar.to_str().unwrap()); ++ ++ t!(fs::remove_file(&lib)); ++ let mut objs = Vec::new(); ++ let objcopy = find_tool(&compiler, cc, "objcopy"); ++ for obj in t!(tmpdir.read_dir()) { ++ let obj = t!(obj); ++ run(Command::new(&objcopy) ++ .arg("--redefine-syms=symbol-map") ++ .arg(obj.path()), ++ objcopy.to_str().unwrap()); ++ objs.push(obj.path()); ++ } ++ ++ run(Command::new(&ar).arg("crus").arg(&lib).args(&objs), ++ ar.to_str().unwrap()); ++} ++ ++fn run(cmd: &mut Command, program: &str) { ++ println!("running: {:?}", cmd); ++ let status = match cmd.status() { ++ Ok(s) => s, ++ Err(ref e) if e.kind() == io::ErrorKind::NotFound => { ++ panic!("\n\nfailed to execute command: {}\nIs `{}` \ ++ not installed?\n\n", ++ e, ++ program); ++ } ++ Err(e) => panic!("failed to get status: {}", e), ++ }; ++ if !status.success() { ++ panic!("failed with: {}", status); ++ } ++} diff --cc vendor/backtrace-sys-0.1.12/src/lib.rs index 000000000,000000000..0edc2674c new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/lib.rs @@@ -1,0 -1,0 +1,44 @@@ ++#![allow(bad_style)] ++ ++extern crate libc; ++ ++use libc::uintptr_t; ++use std::os::raw::{c_void, c_char, c_int}; ++ ++pub type backtrace_syminfo_callback = ++ extern fn(data: *mut c_void, ++ pc: uintptr_t, ++ symname: *const c_char, ++ symval: uintptr_t, ++ symsize: uintptr_t); ++pub type backtrace_full_callback = ++ extern fn(data: *mut c_void, ++ pc: uintptr_t, ++ filename: *const c_char, ++ lineno: c_int, ++ function: *const c_char) -> c_int; ++pub type backtrace_error_callback = ++ extern fn(data: *mut c_void, ++ msg: *const c_char, ++ errnum: c_int); ++pub enum backtrace_state {} ++ ++extern { ++ #[link_name = "__rbt_backtrace_create_state"] ++ pub fn backtrace_create_state(filename: *const c_char, ++ threaded: c_int, ++ error: backtrace_error_callback, ++ data: *mut c_void) -> *mut backtrace_state; ++ #[link_name = "__rbt_backtrace_syminfo"] ++ pub fn backtrace_syminfo(state: *mut backtrace_state, ++ addr: uintptr_t, ++ cb: backtrace_syminfo_callback, ++ error: backtrace_error_callback, ++ data: *mut c_void) -> c_int; ++ #[link_name = "__rbt_backtrace_pcinfo"] ++ pub fn backtrace_pcinfo(state: *mut backtrace_state, ++ addr: uintptr_t, ++ cb: backtrace_full_callback, ++ error: backtrace_error_callback, ++ data: *mut c_void) -> c_int; ++} diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/ChangeLog index 000000000,000000000..88005dfb0 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/ChangeLog @@@ -1,0 -1,0 +1,598 @@@ ++2016-11-15 Matthias Klose ++ ++ * configure: Regenerate. ++ ++2016-09-11 Carlos Liam ++ ++ * all: Remove meaningless trailing whitespace. ++ ++2016-05-18 Uros Bizjak ++ ++ PR target/71161 ++ * elf.c (phdr_callback) [__i386__]: Add ++ __attribute__((__force_align_arg_pointer__)). ++ ++2016-03-02 Maxim Ostapenko ++ ++ * elf.c (backtrace_initialize): Properly initialize elf_fileline_fn to ++ avoid possible crash. ++ (elf_add): Don't set *fileline_fn to elf_nodebug value in case of ++ missing debug info anymore. ++ ++2016-02-06 John David Anglin ++ ++ * mmap.c (MAP_FAILED): Define if not defined. ++ ++2016-01-04 Jakub Jelinek ++ ++ Update copyright years. ++ ++2015-12-18 Andris Pavenis ++ ++ * configure.ac: Specify that DJGPP do not have mmap ++ even when sys/mman.h exists. ++ * configure: Regenerate ++ ++2015-12-09 John David Anglin ++ ++ PR libgfortran/68115 ++ * configure.ac: Set libbacktrace_cv_sys_sync to no on hppa*-*-hpux*. ++ * configure: Regenerate. ++ * elf.c (backtrace_initialize): Cast __sync_bool_compare_and_swap call ++ to void. ++ ++2015-09-17 Ian Lance Taylor ++ ++ * posix.c (backtrace_open): Cast second argument of open() to int. ++ ++2015-09-11 Ian Lance Taylor ++ ++ * Makefile.am (backtrace.lo): Depend on internal.h. ++ (sort.lo, stest.lo): Add explicit dependencies. ++ * Makefile.in: Rebuild. ++ ++2015-09-09 Hans-Peter Nilsson ++ ++ * backtrace.c: #include . ++ ++2015-09-08 Ian Lance Taylor ++ ++ PR other/67457 ++ * backtrace.c: #include "internal.h". ++ (struct backtrace_data): Add can_alloc field. ++ (unwind): If can_alloc is false, don't try to get file/line ++ information. ++ (backtrace_full): Set can_alloc field in bdata. ++ * alloc.c (backtrace_alloc): Don't call error_callback if it is ++ NULL. ++ * mmap.c (backtrace_alloc): Likewise. ++ * internal.h: Update comments for backtrace_alloc and ++ backtrace_free. ++ ++2015-09-08 Ian Lance Taylor ++ ++ PR other/67457 ++ * mmap.c (backtrace_alloc): Correct test for mmap failure. ++ ++2015-08-31 Ulrich Weigand ++ ++ * configure.ac: For spu-*-* targets, set have_fcntl to no. ++ * configure: Regenerate. ++ ++2015-08-27 Ulrich Weigand ++ ++ * configure.ac: Remove [disable-shared] argument to LT_INIT. ++ Remove setting PIC_FLAG when building as target library. ++ * configure: Regenerate. ++ ++2015-08-26 Hans-Peter Nilsson ++ ++ * configure.ac: Only compile with -fPIC if the target ++ supports it. ++ * configure: Regenerate. ++ ++2015-08-24 Ulrich Weigand ++ ++ * configure.ac: Set have_mmap to no on spu-*-* targets. ++ * configure: Regenerate. ++ ++2015-08-13 Ian Lance Taylor ++ ++ * dwarf.c (read_function_entry): Add vec_inlined parameter. ++ Change all callers. ++ ++2015-06-11 Martin Sebor ++ ++ PR sanitizer/65479 ++ * dwarf.c (struct line): Add new field idx. ++ (line_compare): Use it. ++ (add_line): Set it. ++ (read_line_info): Reset it. ++ ++2015-05-29 Tristan Gingold ++ ++ * pecoff.c: New file. ++ * Makefile.am (FORMAT_FILES): Add pecoff.c and dependencies. ++ * Makefile.in: Regenerate. ++ * filetype.awk: Detect pecoff. ++ * configure.ac: Define BACKTRACE_SUPPORTS_DATA on elf platforms. ++ Add pecoff. ++ * btest.c (test5): Test enabled only if BACKTRACE_SUPPORTS_DATA is ++ true. ++ * backtrace-supported.h.in (BACKTRACE_SUPPORTS_DATA): Define. ++ * configure: Regenerate. ++ * pecoff.c: New file. ++ ++2015-05-13 Michael Haubenwallner ++ ++ * Makefile.in: Regenerated with automake-1.11.6. ++ * aclocal.m4: Likewise. ++ * configure: Likewise. ++ ++2015-01-24 Matthias Klose ++ ++ * configure.ac: Move AM_ENABLE_MULTILIB before AC_PROG_CC. ++ * configure: Regenerate. ++ ++2015-01-05 Jakub Jelinek ++ ++ Update copyright years. ++ ++2014-11-21 H.J. Lu ++ ++ PR bootstrap/63784 ++ * configure: Regenerated. ++ ++2014-11-11 David Malcolm ++ ++ * ChangeLog.jit: New. ++ ++2014-11-11 Francois-Xavier Coudert ++ ++ PR target/63610 ++ * configure: Regenerate. ++ ++2014-10-23 Ian Lance Taylor ++ ++ * internal.h (backtrace_atomic_load_pointer) [no atomic or sync]: ++ Fix to return void *. ++ ++2014-05-08 Ian Lance Taylor ++ ++ * mmap.c (backtrace_free): If freeing a large aligned block of ++ memory, call munmap rather than holding onto it. ++ (backtrace_vector_grow): When growing a vector, double the number ++ of pages requested. When releasing the old version of a grown ++ vector, pass the correct size to backtrace_free. ++ ++2014-03-07 Ian Lance Taylor ++ ++ * sort.c (backtrace_qsort): Use middle element as pivot. ++ ++2014-03-06 Ian Lance Taylor ++ ++ * sort.c: New file. ++ * stest.c: New file. ++ * internal.h (backtrace_qsort): Declare. ++ * dwarf.c (read_abbrevs): Call backtrace_qsort instead of qsort. ++ (read_line_info, read_function_entry): Likewise. ++ (read_function_info, build_dwarf_data): Likewise. ++ * elf.c (elf_initialize_syminfo): Likewise. ++ * Makefile.am (libbacktrace_la_SOURCES): Add sort.c. ++ (stest_SOURCES, stest_LDADD): Define. ++ (check_PROGRAMS): Add stest. ++ ++2014-02-07 Misty De Meo ++ ++ PR target/58710 ++ * configure.ac: Use AC_LINK_IFELSE in check for ++ _Unwind_GetIPInfo. ++ * configure: Regenerate. ++ ++2014-01-02 Richard Sandiford ++ ++ Update copyright years ++ ++2013-12-06 Jakub Jelinek ++ ++ * elf.c (ET_DYN): Undefine and define again. ++ (elf_add): Add exe argument, if true and ehdr.e_type is ET_DYN, ++ return early -1 without closing the descriptor. ++ (struct phdr_data): Add exe_descriptor. ++ (phdr_callback): If pd->exe_descriptor is not -1, for very first ++ call if dlpi_name is NULL just call elf_add with the exe_descriptor, ++ otherwise backtrace_close the exe_descriptor if not -1. Adjust ++ call to elf_add. ++ (backtrace_initialize): Adjust call to elf_add. If it returns ++ -1, set pd.exe_descriptor to descriptor, otherwise set it to -1. ++ ++2013-12-05 Ian Lance Taylor ++ ++ * alloc.c (backtrace_vector_finish): Add error_callback and data ++ parameters. Call backtrace_vector_release. Return address base. ++ * mmap.c (backtrace_vector_finish): Add error_callback and data ++ parameters. Return address base. ++ * dwarf.c (read_function_info): Get new address base from ++ backtrace_vector_finish. ++ * internal.h (backtrace_vector_finish): Update declaration. ++ ++2013-11-27 Ian Lance Taylor ++ ++ * dwarf.c (find_address_ranges): New static function, broken out ++ of build_address_map. ++ (build_address_map): Call it. ++ * btest.c (check): Check for missing filename or function, rather ++ than crashing. ++ (f3): Check that enough frames were returned. ++ ++2013-11-19 Jakub Jelinek ++ ++ * backtrace.h (backtrace_syminfo_callback): Add symsize argument. ++ * elf.c (elf_syminfo): Pass 0 or sym->size to the callback as ++ last argument. ++ * btest.c (struct symdata): Add size field. ++ (callback_three): Add symsize argument. Copy it to the data->size ++ field. ++ (f23): Set symdata.size to 0. ++ (test5): Likewise. If sizeof (int) > 1, lookup address of ++ ((uintptr_t) &global) + 1. Verify symdata.val and symdata.size ++ values. ++ ++ * atomic.c: Include sys/types.h. ++ ++2013-11-18 Ian Lance Taylor ++ ++ * configure.ac: Check for support of __atomic extensions. ++ * internal.h: Declare or #define atomic functions for use in ++ backtrace code. ++ * atomic.c: New file. ++ * dwarf.c (dwarf_lookup_pc): Use atomic functions. ++ (dwarf_fileline, backtrace_dwarf_add): Likewise. ++ * elf.c (elf_add_syminfo_data, elf_syminfo): Likewise. ++ (backtrace_initialize): Likewise. ++ * fileline.c (fileline_initialize): Likewise. ++ * Makefile.am (libbacktrace_la_SOURCES): Add atomic.c. ++ * configure, config.h.in, Makefile.in: Rebuild. ++ ++2013-11-18 Jakub Jelinek ++ ++ * elf.c (SHN_UNDEF): Define. ++ (elf_initialize_syminfo): Add base_address argument. Ignore symbols ++ with st_shndx == SHN_UNDEF. Add base_address to address fields. ++ (elf_add): Adjust caller. ++ ++ * elf.c (phdr_callback): Process info->dlpi_addr == 0 normally. ++ ++2013-11-16 Ian Lance Taylor ++ ++ * backtrace.h (backtrace_create_state): Correct comment about ++ threading. ++ ++2013-11-15 Ian Lance Taylor ++ ++ * backtrace.h (backtrace_syminfo): Update comment and parameter ++ name to take any address, not just a PC value. ++ * elf.c (STT_OBJECT): Define. ++ (elf_nosyms): Rename parameter pc to addr. ++ (elf_symbol_search): Rename local variable pc to addr. ++ (elf_initialize_syminfo): Add STT_OBJECT symbols to elf_symbols. ++ (elf_syminfo): Rename parameter pc to addr. ++ * btest.c (global): New global variable. ++ (test5): New test. ++ (main): Call test5. ++ ++2013-10-17 Ian Lance Taylor ++ ++ * elf.c (elf_add): Don't get the wrong offsets if a debug section ++ is missing. ++ ++2013-10-15 David Malcolm ++ ++ * configure.ac: Add --enable-host-shared, setting up ++ pre-existing PIC_FLAG variable within Makefile.am et al. ++ * configure: Regenerate. ++ ++2013-09-20 Alan Modra ++ ++ * configure: Regenerate. ++ ++2013-07-23 Alexander Monakov ++ ++ * elf.c (elf_syminfo): Loop over the elf_syminfo_data chain. ++ ++2013-07-23 Alexander Monakov ++ ++ * elf.c (backtrace_initialize): Pass elf_fileline_fn to ++ dl_iterate_phdr callbacks. ++ ++2013-03-25 Ian Lance Taylor ++ ++ * alloc.c: #include . ++ * mmap.c: Likewise. ++ ++2013-01-31 Ian Lance Taylor ++ ++ * dwarf.c (read_function_info): Permit fvec parameter to be NULL. ++ (dwarf_lookup_pc): Don't use ddata->fvec if threaded. ++ ++2013-01-25 Jakub Jelinek ++ ++ PR other/56076 ++ * dwarf.c (read_line_header): Don't crash if DW_AT_comp_dir ++ attribute was not seen. ++ ++2013-01-16 Ian Lance Taylor ++ ++ * dwarf.c (struct unit): Add filename and abs_filename fields. ++ (build_address_map): Set new fields when reading unit. ++ (dwarf_lookup_pc): If we don't find an entry in the line table, ++ just return the main file name. ++ ++2013-01-14 Richard Sandiford ++ ++ Update copyright years. ++ ++2013-01-01 Ian Lance Taylor ++ ++ PR bootstrap/54834 ++ * Makefile.am (AM_CPPFLAGS): Remove -I ../gcc/include and -I ++ $(MULTIBUILDTOP)/../../gcc/include. ++ * Makefile.in: Rebuild. ++ ++2013-01-01 Ian Lance Taylor ++ ++ PR other/55536 ++ * mmap.c (backtrace_alloc): Don't call sync functions if not ++ threaded. ++ (backtrace_free): Likewise. ++ ++2012-12-12 John David Anglin ++ ++ * mmapio.c: Define MAP_FAILED if not defined. ++ ++2012-12-11 Jakub Jelinek ++ ++ PR bootstrap/54926 ++ * Makefile.am (AM_CFLAGS): Remove -frandom-seed=$@. ++ * configure.ac: If --with-target-subdir, add -frandom-seed=$@ ++ to EXTRA_FLAGS unconditionally, otherwise check whether the compiler ++ accepts it. ++ * Makefile.in: Regenerated. ++ * configure: Regenerated. ++ ++2012-12-07 Jakub Jelinek ++ ++ PR bootstrap/54926 ++ * Makefile.am (AM_CFLAGS): Add -frandom-seed=$@. ++ * Makefile.in: Regenerated. ++ ++2012-11-20 Ian Lance Taylor ++ ++ * dwarf.c (read_attribute): Always clear val. ++ ++2012-11-13 Ian Lance Taylor ++ ++ PR other/55312 ++ * configure.ac: Only add -Werror if building a target library. ++ * configure: Rebuild. ++ ++2012-11-12 Ian Lance Taylor ++ Rainer Orth ++ Gerald Pfeifer ++ ++ * configure.ac: Check for getexecname. ++ * fileline.c: #include . Define getexecname if not ++ available. ++ (fileline_initialize): Try to find the executable in a few ++ different ways. ++ * print.c (error_callback): Only print the filename if it came ++ from the backtrace state. ++ * configure, config.h.in: Rebuild. ++ ++2012-10-29 Ian Lance Taylor ++ ++ * mmap.c (backtrace_vector_release): Correct last patch: add ++ aligned, not size. ++ ++2012-10-29 Ian Lance Taylor ++ ++ * mmap.c (backtrace_vector_release): Make sure freed block is ++ aligned on 8-byte boundary. ++ ++2012-10-26 Ian Lance Taylor ++ ++ PR other/55087 ++ * posix.c (backtrace_open): Add does_not_exist parameter. ++ * elf.c (phdr_callback): Do not warn if shared library could not ++ be opened. ++ * fileline.c (fileline_initialize): Update calls to ++ backtrace_open. ++ * internal.h (backtrace_open): Update declaration. ++ ++2012-10-26 Jack Howarth ++ ++ PR target/55061 ++ * configure.ac: Check for _Unwind_GetIPInfo function declaration. ++ * configure: Regenerate. ++ ++2012-10-24 Ian Lance Taylor ++ ++ PR target/55061 ++ * configure.ac: Check whether -funwind-tables option works. ++ * configure: Rebuild. ++ ++2012-10-11 Ian Lance Taylor ++ ++ * configure.ac: Do not use dl_iterate_phdr on Solaris 10. ++ * configure: Rebuild. ++ ++2012-10-10 Ian Lance Taylor ++ ++ * elf.c: Rename all Elf typedefs to start with b_elf, and be all ++ lower case. ++ ++2012-10-10 Hans-Peter Nilsson ++ ++ * elf.c (elf_add_syminfo_data): Add casts to avoid warning. ++ ++2012-10-09 Ian Lance Taylor ++ ++ * dwarf.c (dwarf_fileline): Add cast to avoid warning. ++ (backtrace_dwarf_add): Likewise. ++ ++2012-10-09 Ian Lance Taylor ++ ++ Add support for tracing through shared libraries. ++ * configure.ac: Check for link.h and dl_iterate_phdr. ++ * elf.c: #include if system has dl_iterate_phdr. #undef ++ ELF macros before #defining them. ++ (dl_phdr_info, dl_iterate_phdr): Define if system does not have ++ dl_iterate_phdr. ++ (struct elf_syminfo_data): Add next field. ++ (elf_initialize_syminfo): Initialize next field. ++ (elf_add_syminfo_data): New static function. ++ (elf_add): New static function, broken out of ++ backtrace_initialize. Call backtrace_dwarf_add instead of ++ backtrace_dwarf_initialize. ++ (struct phdr_data): Define. ++ (phdr_callback): New static function. ++ (backtrace_initialize): Call elf_add. ++ * dwarf.c (struct dwarf_data): Add next and base_address fields. ++ (add_unit_addr): Add base_address parameter. Change all callers. ++ (add_unit_ranges, build_address_map): Likewise. ++ (add_line): Add ddata parameter. Change all callers. ++ (read_line_program, add_function_range): Likewise. ++ (dwarf_lookup_pc): New static function, broken out of ++ dwarf_fileline. ++ (dwarf_fileline): Call dwarf_lookup_pc. ++ (build_dwarf_data): New static function. ++ (backtrace_dwarf_add): New function. ++ (backtrace_dwarf_initialize): Remove. ++ * internal.h (backtrace_dwarf_initialize): Don't declare. ++ (backtrace_dwarf_add): Declare. ++ * configure, config.h.in: Rebuild. ++ ++2012-10-04 Gerald Pfeifer ++ ++ * btest.c (f23): Avoid uninitialized variable warning. ++ ++2012-10-04 Ian Lance Taylor ++ ++ * dwarf.c: If the system header files do not declare strnlen, ++ provide our own version. ++ ++2012-10-03 Ian Lance Taylor ++ ++ * dwarf.c (read_uleb128): Fix overflow test. ++ (read_sleb128): Likewise. ++ (build_address_map): Don't change unit_buf.start. ++ ++2012-10-02 Uros Bizjak ++ ++ PR other/54761 ++ * configure.ac (EXTRA_FLAGS): New. ++ * Makefile.am (AM_FLAGS): Add $(EXTRA_FLAGS). ++ * configure, Makefile.in: Regenerate. ++ ++2012-09-29 Ian Lance Taylor ++ ++ PR other/54749 ++ * fileline.c (fileline_initialize): Pass errnum as -1 when ++ reporting that we could not read executable information after a ++ previous failure. ++ ++2012-09-27 Ian Lance Taylor ++ ++ PR bootstrap/54732 ++ * configure.ac: Add no-dependencies to AM_INIT_AUTOMAKE. ++ * Makefile.am: Add dependencies for all objects. ++ * configure, aclocal.m4, Makefile.in: Rebuild. ++ ++2012-09-27 Ian Lance Taylor ++ ++ PR other/54726 ++ * elf.c (backtrace_initialize): Set *fileln_fn, not ++ state->fileln_fn. ++ ++2012-09-19 Ian Lance Taylor ++ ++ * configure.ac: Only use GCC_CHECK_UNWIND_GETIPINFO when compiled ++ as a target library. ++ * configure: Rebuild. ++ ++2012-09-19 Rainer Orth ++ Ian Lance Taylor ++ ++ * configure.ac (GCC_HEADER_STDINT): Invoke. ++ * backtrace.h: If we can't find , use "gstdint.h". ++ * btest.c: Don't include . ++ * dwarf.c: Likewise. ++ * configure, aclocal.m4, Makefile.in, config.h.in: Rebuild. ++ ++2012-09-18 Ian Lance Taylor ++ ++ PR bootstrap/54623 ++ * Makefile.am (AM_CPPFLAGS): Define. ++ (AM_CFLAGS): Remove -I options. ++ * Makefile.in: Rebuild. ++ ++2012-09-18 Ian Lance Taylor ++ ++ * posix.c (O_BINARY): Define if not defined. ++ (backtrace_open): Pass O_BINARY to open. Only call fcntl if ++ HAVE_FCNTL is defined. ++ * configure.ac: Test for the fcntl function. ++ * configure, config.h.in: Rebuild. ++ ++2012-09-18 Ian Lance Taylor ++ ++ * btest.c (test1, test2, test3, test4): Add the unused attribute. ++ ++2012-09-18 Ian Lance Taylor ++ ++ * dwarf.c: Correct test of HAVE_DECL_STRNLEN. ++ ++2012-09-18 Ian Lance Taylor ++ ++ * configure.ac: Add AC_USE_SYSTEM_EXTENSIONS. ++ * mmapio.c: Don't define _GNU_SOURCE. ++ * configure, config.h.in: Rebuild. ++ ++2012-09-18 Ian Lance Taylor ++ ++ * configure.ac: Check whether strnlen is declared. ++ * dwarf.c: Declare strnlen if not declared. ++ * configure, config.h.in: Rebuild. ++ ++2012-09-18 Rainer Orth ++ ++ * fileline.c: Include . ++ * mmap.c: Likewise. ++ ++2012-09-17 Ian Lance Taylor ++ ++ PR bootstrap/54611 ++ * nounwind.c (backtrace_full): Rename from backtrace. Add state ++ parameter. ++ ++2012-09-17 Gerald Pfeifer ++ ++ PR bootstrap/54611 ++ * nounwind.c (backtrace_simple): Add state parameter. ++ ++2012-09-17 Ian Lance Taylor ++ ++ PR bootstrap/54609 ++ * unknown.c (unknown_fileline): Add state parameter, remove ++ fileline_data parameter, name error_callback parameter. ++ (backtrace_initialize): Add state parameter. ++ ++2012-09-17 Ian Lance Taylor ++ ++ * Initial implementation. ++ ++Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ ++Copying and distribution of this file, with or without modification, ++are permitted in any medium without royalty provided the copyright ++notice and this notice are preserved. diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/ChangeLog.jit index 000000000,000000000..6b60e3b3b new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/ChangeLog.jit @@@ -1,0 -1,0 +1,14 @@@ ++2014-09-24 David Malcolm ++ ++ * ChangeLog.jit: Add copyright footer. ++ ++2013-10-03 David Malcolm ++ ++ * configure.ac: Add --enable-host-shared. ++ * configure: Regenerate. ++ ++Copyright (C) 2013-2014 Free Software Foundation, Inc. ++ ++Copying and distribution of this file, with or without modification, ++are permitted in any medium without royalty provided the copyright ++notice and this notice are preserved. diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/Makefile.am index 000000000,000000000..61aec4474 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/Makefile.am @@@ -1,0 -1,0 +1,136 @@@ ++# Makefile.am -- Backtrace Makefile. ++# Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ ++# Redistribution and use in source and binary forms, with or without ++# modification, are permitted provided that the following conditions are ++# met: ++ ++# (1) Redistributions of source code must retain the above copyright ++# notice, this list of conditions and the following disclaimer. ++ ++# (2) Redistributions in binary form must reproduce the above copyright ++# notice, this list of conditions and the following disclaimer in ++# the documentation and/or other materials provided with the ++# distribution. ++ ++# (3) The name of the author may not be used to ++# endorse or promote products derived from this software without ++# specific prior written permission. ++ ++# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++# DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++# POSSIBILITY OF SUCH DAMAGE. ++ ++ACLOCAL_AMFLAGS = -I .. -I ../config ++ ++AM_CPPFLAGS = -I $(top_srcdir)/../include -I $(top_srcdir)/../libgcc \ ++ -I ../libgcc ++ ++AM_CFLAGS = $(EXTRA_FLAGS) $(WARN_FLAGS) $(PIC_FLAG) ++ ++noinst_LTLIBRARIES = libbacktrace.la ++ ++libbacktrace_la_SOURCES = \ ++ backtrace.h \ ++ atomic.c \ ++ dwarf.c \ ++ fileline.c \ ++ internal.h \ ++ posix.c \ ++ print.c \ ++ sort.c \ ++ state.c ++ ++BACKTRACE_FILES = \ ++ backtrace.c \ ++ simple.c \ ++ nounwind.c ++ ++FORMAT_FILES = \ ++ elf.c \ ++ pecoff.c \ ++ unknown.c ++ ++VIEW_FILES = \ ++ read.c \ ++ mmapio.c ++ ++ALLOC_FILES = \ ++ alloc.c \ ++ mmap.c ++ ++EXTRA_libbacktrace_la_SOURCES = \ ++ $(BACKTRACE_FILES) \ ++ $(FORMAT_FILES) \ ++ $(VIEW_FILES) \ ++ $(ALLOC_FILES) ++ ++libbacktrace_la_LIBADD = \ ++ $(BACKTRACE_FILE) \ ++ $(FORMAT_FILE) \ ++ $(VIEW_FILE) \ ++ $(ALLOC_FILE) ++ ++libbacktrace_la_DEPENDENCIES = $(libbacktrace_la_LIBADD) ++ ++# Testsuite. ++ ++check_PROGRAMS = ++ ++TESTS = $(check_PROGRAMS) ++ ++if NATIVE ++ ++btest_SOURCES = btest.c ++btest_CFLAGS = $(AM_CFLAGS) -g -O ++btest_LDADD = libbacktrace.la ++ ++check_PROGRAMS += btest ++ ++stest_SOURCES = stest.c ++stest_LDADD = libbacktrace.la ++ ++check_PROGRAMS += stest ++ ++endif NATIVE ++ ++# We can't use automake's automatic dependency tracking, because it ++# breaks when using bootstrap-lean. Automatic dependency tracking ++# with GCC bootstrap will cause some of the objects to depend on ++# header files in prev-gcc/include, e.g., stddef.h and stdarg.h. When ++# using bootstrap-lean, prev-gcc is removed after each stage. When ++# running "make install", those header files will be gone, causing the ++# library to be rebuilt at install time. That may not succeed. ++ ++# These manual dependencies do not include dependencies on unwind.h, ++# even though that is part of GCC, because where to find it depends on ++# whether we are being built as a host library or a target library. ++ ++INCDIR = $(top_srcdir)/../include ++alloc.lo: config.h backtrace.h internal.h ++backtrace.lo: config.h backtrace.h internal.h ++btest.lo: (INCDIR)/filenames.h backtrace.h backtrace-supported.h ++dwarf.lo: config.h $(INCDIR)/dwarf2.h $(INCDIR)/dwarf2.def \ ++ $(INCDIR)/filenames.h backtrace.h internal.h ++elf.lo: config.h backtrace.h internal.h ++fileline.lo: config.h backtrace.h internal.h ++mmap.lo: config.h backtrace.h internal.h ++mmapio.lo: config.h backtrace.h internal.h ++nounwind.lo: config.h internal.h ++pecoff.lo: config.h backtrace.h internal.h ++posix.lo: config.h backtrace.h internal.h ++print.lo: config.h backtrace.h internal.h ++read.lo: config.h backtrace.h internal.h ++simple.lo: config.h backtrace.h internal.h ++sort.lo: config.h backtrace.h internal.h ++stest.lo: config.h backtrace.h internal.h ++state.lo: config.h backtrace.h backtrace-supported.h internal.h ++unknown.lo: config.h backtrace.h internal.h diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/Makefile.in index 000000000,000000000..de74b5d09 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/Makefile.in @@@ -1,0 -1,0 +1,770 @@@ ++# Makefile.in generated by automake 1.11.6 from Makefile.am. ++# @configure_input@ ++ ++# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, ++# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software ++# Foundation, Inc. ++# This Makefile.in is free software; the Free Software Foundation ++# gives unlimited permission to copy and/or distribute it, ++# with or without modifications, as long as this notice is preserved. ++ ++# This program is distributed in the hope that it will be useful, ++# but WITHOUT ANY WARRANTY, to the extent permitted by law; without ++# even the implied warranty of MERCHANTABILITY or FITNESS FOR A ++# PARTICULAR PURPOSE. ++ ++@SET_MAKE@ ++ ++# Makefile.am -- Backtrace Makefile. ++# Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ ++# Redistribution and use in source and binary forms, with or without ++# modification, are permitted provided that the following conditions are ++# met: ++ ++# (1) Redistributions of source code must retain the above copyright ++# notice, this list of conditions and the following disclaimer. ++ ++# (2) Redistributions in binary form must reproduce the above copyright ++# notice, this list of conditions and the following disclaimer in ++# the documentation and/or other materials provided with the ++# distribution. ++ ++# (3) The name of the author may not be used to ++# endorse or promote products derived from this software without ++# specific prior written permission. ++ ++# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++# DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++# POSSIBILITY OF SUCH DAMAGE. ++ ++VPATH = @srcdir@ ++am__make_dryrun = \ ++ { \ ++ am__dry=no; \ ++ case $$MAKEFLAGS in \ ++ *\\[\ \ ]*) \ ++ echo 'am--echo: ; @echo "AM" OK' | $(MAKE) -f - 2>/dev/null \ ++ | grep '^AM OK$$' >/dev/null || am__dry=yes;; \ ++ *) \ ++ for am__flg in $$MAKEFLAGS; do \ ++ case $$am__flg in \ ++ *=*|--*) ;; \ ++ *n*) am__dry=yes; break;; \ ++ esac; \ ++ done;; \ ++ esac; \ ++ test $$am__dry = yes; \ ++ } ++pkgdatadir = $(datadir)/@PACKAGE@ ++pkgincludedir = $(includedir)/@PACKAGE@ ++pkglibdir = $(libdir)/@PACKAGE@ ++pkglibexecdir = $(libexecdir)/@PACKAGE@ ++am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd ++install_sh_DATA = $(install_sh) -c -m 644 ++install_sh_PROGRAM = $(install_sh) -c ++install_sh_SCRIPT = $(install_sh) -c ++INSTALL_HEADER = $(INSTALL_DATA) ++transform = $(program_transform_name) ++NORMAL_INSTALL = : ++PRE_INSTALL = : ++POST_INSTALL = : ++NORMAL_UNINSTALL = : ++PRE_UNINSTALL = : ++POST_UNINSTALL = : ++build_triplet = @build@ ++host_triplet = @host@ ++target_triplet = @target@ ++check_PROGRAMS = $(am__EXEEXT_1) ++@NATIVE_TRUE@am__append_1 = btest stest ++subdir = . ++DIST_COMMON = README ChangeLog $(srcdir)/Makefile.in \ ++ $(srcdir)/Makefile.am $(top_srcdir)/configure \ ++ $(am__configure_deps) $(srcdir)/config.h.in \ ++ $(srcdir)/../mkinstalldirs $(srcdir)/backtrace-supported.h.in ++ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 ++am__aclocal_m4_deps = $(top_srcdir)/../config/lead-dot.m4 \ ++ $(top_srcdir)/../config/multi.m4 \ ++ $(top_srcdir)/../config/override.m4 \ ++ $(top_srcdir)/../config/stdint.m4 \ ++ $(top_srcdir)/../config/unwind_ipinfo.m4 \ ++ $(top_srcdir)/../config/warnings.m4 \ ++ $(top_srcdir)/../libtool.m4 $(top_srcdir)/../ltoptions.m4 \ ++ $(top_srcdir)/../ltsugar.m4 $(top_srcdir)/../ltversion.m4 \ ++ $(top_srcdir)/../lt~obsolete.m4 $(top_srcdir)/configure.ac ++am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ ++ $(ACLOCAL_M4) ++am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \ ++ configure.lineno config.status.lineno ++mkinstalldirs = $(SHELL) $(top_srcdir)/../mkinstalldirs ++CONFIG_HEADER = config.h ++CONFIG_CLEAN_FILES = backtrace-supported.h ++CONFIG_CLEAN_VPATH_FILES = ++LTLIBRARIES = $(noinst_LTLIBRARIES) ++am__DEPENDENCIES_1 = ++am_libbacktrace_la_OBJECTS = atomic.lo dwarf.lo fileline.lo posix.lo \ ++ print.lo sort.lo state.lo ++libbacktrace_la_OBJECTS = $(am_libbacktrace_la_OBJECTS) ++@NATIVE_TRUE@am__EXEEXT_1 = btest$(EXEEXT) stest$(EXEEXT) ++@NATIVE_TRUE@am_btest_OBJECTS = btest-btest.$(OBJEXT) ++btest_OBJECTS = $(am_btest_OBJECTS) ++@NATIVE_TRUE@btest_DEPENDENCIES = libbacktrace.la ++btest_LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ ++ --mode=link $(CCLD) $(btest_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) \ ++ $(LDFLAGS) -o $@ ++@NATIVE_TRUE@am_stest_OBJECTS = stest.$(OBJEXT) ++stest_OBJECTS = $(am_stest_OBJECTS) ++@NATIVE_TRUE@stest_DEPENDENCIES = libbacktrace.la ++DEFAULT_INCLUDES = -I.@am__isrc@ ++depcomp = ++am__depfiles_maybe = ++COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ ++ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) ++LTCOMPILE = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ ++ --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ ++ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) ++CCLD = $(CC) ++LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ ++ --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) \ ++ $(LDFLAGS) -o $@ ++SOURCES = $(libbacktrace_la_SOURCES) $(EXTRA_libbacktrace_la_SOURCES) \ ++ $(btest_SOURCES) $(stest_SOURCES) ++MULTISRCTOP = ++MULTIBUILDTOP = ++MULTIDIRS = ++MULTISUBDIR = ++MULTIDO = true ++MULTICLEAN = true ++am__can_run_installinfo = \ ++ case $$AM_UPDATE_INFO_DIR in \ ++ n|no|NO) false;; \ ++ *) (install-info --version) >/dev/null 2>&1;; \ ++ esac ++ETAGS = etags ++CTAGS = ctags ++am__tty_colors = \ ++red=; grn=; lgn=; blu=; std= ++ACLOCAL = @ACLOCAL@ ++ALLOC_FILE = @ALLOC_FILE@ ++AMTAR = @AMTAR@ ++AR = @AR@ ++AUTOCONF = @AUTOCONF@ ++AUTOHEADER = @AUTOHEADER@ ++AUTOMAKE = @AUTOMAKE@ ++AWK = @AWK@ ++BACKTRACE_FILE = @BACKTRACE_FILE@ ++BACKTRACE_SUPPORTED = @BACKTRACE_SUPPORTED@ ++BACKTRACE_SUPPORTS_DATA = @BACKTRACE_SUPPORTS_DATA@ ++BACKTRACE_SUPPORTS_THREADS = @BACKTRACE_SUPPORTS_THREADS@ ++BACKTRACE_USES_MALLOC = @BACKTRACE_USES_MALLOC@ ++CC = @CC@ ++CFLAGS = @CFLAGS@ ++CPP = @CPP@ ++CPPFLAGS = @CPPFLAGS@ ++CYGPATH_W = @CYGPATH_W@ ++DEFS = @DEFS@ ++DSYMUTIL = @DSYMUTIL@ ++DUMPBIN = @DUMPBIN@ ++ECHO_C = @ECHO_C@ ++ECHO_N = @ECHO_N@ ++ECHO_T = @ECHO_T@ ++EGREP = @EGREP@ ++EXEEXT = @EXEEXT@ ++EXTRA_FLAGS = @EXTRA_FLAGS@ ++FGREP = @FGREP@ ++FORMAT_FILE = @FORMAT_FILE@ ++GREP = @GREP@ ++INSTALL = @INSTALL@ ++INSTALL_DATA = @INSTALL_DATA@ ++INSTALL_PROGRAM = @INSTALL_PROGRAM@ ++INSTALL_SCRIPT = @INSTALL_SCRIPT@ ++INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ ++LD = @LD@ ++LDFLAGS = @LDFLAGS@ ++LIBOBJS = @LIBOBJS@ ++LIBS = @LIBS@ ++LIBTOOL = @LIBTOOL@ ++LIPO = @LIPO@ ++LN_S = @LN_S@ ++LTLIBOBJS = @LTLIBOBJS@ ++MAINT = @MAINT@ ++MAKEINFO = @MAKEINFO@ ++MKDIR_P = @MKDIR_P@ ++NM = @NM@ ++NMEDIT = @NMEDIT@ ++OBJDUMP = @OBJDUMP@ ++OBJEXT = @OBJEXT@ ++OTOOL = @OTOOL@ ++OTOOL64 = @OTOOL64@ ++PACKAGE = @PACKAGE@ ++PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ ++PACKAGE_NAME = @PACKAGE_NAME@ ++PACKAGE_STRING = @PACKAGE_STRING@ ++PACKAGE_TARNAME = @PACKAGE_TARNAME@ ++PACKAGE_URL = @PACKAGE_URL@ ++PACKAGE_VERSION = @PACKAGE_VERSION@ ++PATH_SEPARATOR = @PATH_SEPARATOR@ ++PIC_FLAG = @PIC_FLAG@ ++RANLIB = @RANLIB@ ++SED = @SED@ ++SET_MAKE = @SET_MAKE@ ++SHELL = @SHELL@ ++STRIP = @STRIP@ ++VERSION = @VERSION@ ++VIEW_FILE = @VIEW_FILE@ ++WARN_FLAGS = @WARN_FLAGS@ ++abs_builddir = @abs_builddir@ ++abs_srcdir = @abs_srcdir@ ++abs_top_builddir = @abs_top_builddir@ ++abs_top_srcdir = @abs_top_srcdir@ ++ac_ct_CC = @ac_ct_CC@ ++ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ ++am__leading_dot = @am__leading_dot@ ++am__tar = @am__tar@ ++am__untar = @am__untar@ ++bindir = @bindir@ ++build = @build@ ++build_alias = @build_alias@ ++build_cpu = @build_cpu@ ++build_os = @build_os@ ++build_vendor = @build_vendor@ ++builddir = @builddir@ ++datadir = @datadir@ ++datarootdir = @datarootdir@ ++docdir = @docdir@ ++dvidir = @dvidir@ ++exec_prefix = @exec_prefix@ ++host = @host@ ++host_alias = @host_alias@ ++host_cpu = @host_cpu@ ++host_os = @host_os@ ++host_vendor = @host_vendor@ ++htmldir = @htmldir@ ++includedir = @includedir@ ++infodir = @infodir@ ++install_sh = @install_sh@ ++libdir = @libdir@ ++libexecdir = @libexecdir@ ++libtool_VERSION = @libtool_VERSION@ ++localedir = @localedir@ ++localstatedir = @localstatedir@ ++mandir = @mandir@ ++mkdir_p = @mkdir_p@ ++multi_basedir = @multi_basedir@ ++oldincludedir = @oldincludedir@ ++pdfdir = @pdfdir@ ++prefix = @prefix@ ++program_transform_name = @program_transform_name@ ++psdir = @psdir@ ++sbindir = @sbindir@ ++sharedstatedir = @sharedstatedir@ ++srcdir = @srcdir@ ++sysconfdir = @sysconfdir@ ++target = @target@ ++target_alias = @target_alias@ ++target_cpu = @target_cpu@ ++target_os = @target_os@ ++target_vendor = @target_vendor@ ++top_build_prefix = @top_build_prefix@ ++top_builddir = @top_builddir@ ++top_srcdir = @top_srcdir@ ++ACLOCAL_AMFLAGS = -I .. -I ../config ++AM_CPPFLAGS = -I $(top_srcdir)/../include -I $(top_srcdir)/../libgcc \ ++ -I ../libgcc ++ ++AM_CFLAGS = $(EXTRA_FLAGS) $(WARN_FLAGS) $(PIC_FLAG) ++noinst_LTLIBRARIES = libbacktrace.la ++libbacktrace_la_SOURCES = \ ++ backtrace.h \ ++ atomic.c \ ++ dwarf.c \ ++ fileline.c \ ++ internal.h \ ++ posix.c \ ++ print.c \ ++ sort.c \ ++ state.c ++ ++BACKTRACE_FILES = \ ++ backtrace.c \ ++ simple.c \ ++ nounwind.c ++ ++FORMAT_FILES = \ ++ elf.c \ ++ pecoff.c \ ++ unknown.c ++ ++VIEW_FILES = \ ++ read.c \ ++ mmapio.c ++ ++ALLOC_FILES = \ ++ alloc.c \ ++ mmap.c ++ ++EXTRA_libbacktrace_la_SOURCES = \ ++ $(BACKTRACE_FILES) \ ++ $(FORMAT_FILES) \ ++ $(VIEW_FILES) \ ++ $(ALLOC_FILES) ++ ++libbacktrace_la_LIBADD = \ ++ $(BACKTRACE_FILE) \ ++ $(FORMAT_FILE) \ ++ $(VIEW_FILE) \ ++ $(ALLOC_FILE) ++ ++libbacktrace_la_DEPENDENCIES = $(libbacktrace_la_LIBADD) ++TESTS = $(check_PROGRAMS) ++@NATIVE_TRUE@btest_SOURCES = btest.c ++@NATIVE_TRUE@btest_CFLAGS = $(AM_CFLAGS) -g -O ++@NATIVE_TRUE@btest_LDADD = libbacktrace.la ++@NATIVE_TRUE@stest_SOURCES = stest.c ++@NATIVE_TRUE@stest_LDADD = libbacktrace.la ++ ++# We can't use automake's automatic dependency tracking, because it ++# breaks when using bootstrap-lean. Automatic dependency tracking ++# with GCC bootstrap will cause some of the objects to depend on ++# header files in prev-gcc/include, e.g., stddef.h and stdarg.h. When ++# using bootstrap-lean, prev-gcc is removed after each stage. When ++# running "make install", those header files will be gone, causing the ++# library to be rebuilt at install time. That may not succeed. ++ ++# These manual dependencies do not include dependencies on unwind.h, ++# even though that is part of GCC, because where to find it depends on ++# whether we are being built as a host library or a target library. ++INCDIR = $(top_srcdir)/../include ++all: config.h ++ $(MAKE) $(AM_MAKEFLAGS) all-am ++ ++.SUFFIXES: ++.SUFFIXES: .c .lo .o .obj ++am--refresh: Makefile ++ @: ++$(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(am__configure_deps) ++ @for dep in $?; do \ ++ case '$(am__configure_deps)' in \ ++ *$$dep*) \ ++ echo ' cd $(srcdir) && $(AUTOMAKE) --foreign --ignore-deps'; \ ++ $(am__cd) $(srcdir) && $(AUTOMAKE) --foreign --ignore-deps \ ++ && exit 0; \ ++ exit 1;; \ ++ esac; \ ++ done; \ ++ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign --ignore-deps Makefile'; \ ++ $(am__cd) $(top_srcdir) && \ ++ $(AUTOMAKE) --foreign --ignore-deps Makefile ++.PRECIOUS: Makefile ++Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status ++ @case '$?' in \ ++ *config.status*) \ ++ echo ' $(SHELL) ./config.status'; \ ++ $(SHELL) ./config.status;; \ ++ *) \ ++ echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \ ++ cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \ ++ esac; ++ ++$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) ++ $(SHELL) ./config.status --recheck ++ ++$(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) ++ $(am__cd) $(srcdir) && $(AUTOCONF) ++$(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps) ++ $(am__cd) $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS) ++$(am__aclocal_m4_deps): ++ ++config.h: stamp-h1 ++ @if test ! -f $@; then rm -f stamp-h1; else :; fi ++ @if test ! -f $@; then $(MAKE) $(AM_MAKEFLAGS) stamp-h1; else :; fi ++ ++stamp-h1: $(srcdir)/config.h.in $(top_builddir)/config.status ++ @rm -f stamp-h1 ++ cd $(top_builddir) && $(SHELL) ./config.status config.h ++$(srcdir)/config.h.in: @MAINTAINER_MODE_TRUE@ $(am__configure_deps) ++ ($(am__cd) $(top_srcdir) && $(AUTOHEADER)) ++ rm -f stamp-h1 ++ touch $@ ++ ++distclean-hdr: ++ -rm -f config.h stamp-h1 ++backtrace-supported.h: $(top_builddir)/config.status $(srcdir)/backtrace-supported.h.in ++ cd $(top_builddir) && $(SHELL) ./config.status $@ ++ ++clean-noinstLTLIBRARIES: ++ -test -z "$(noinst_LTLIBRARIES)" || rm -f $(noinst_LTLIBRARIES) ++ @list='$(noinst_LTLIBRARIES)'; for p in $$list; do \ ++ dir="`echo $$p | sed -e 's|/[^/]*$$||'`"; \ ++ test "$$dir" != "$$p" || dir=.; \ ++ echo "rm -f \"$${dir}/so_locations\""; \ ++ rm -f "$${dir}/so_locations"; \ ++ done ++libbacktrace.la: $(libbacktrace_la_OBJECTS) $(libbacktrace_la_DEPENDENCIES) $(EXTRA_libbacktrace_la_DEPENDENCIES) ++ $(LINK) $(libbacktrace_la_OBJECTS) $(libbacktrace_la_LIBADD) $(LIBS) ++ ++clean-checkPROGRAMS: ++ @list='$(check_PROGRAMS)'; test -n "$$list" || exit 0; \ ++ echo " rm -f" $$list; \ ++ rm -f $$list || exit $$?; \ ++ test -n "$(EXEEXT)" || exit 0; \ ++ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ ++ echo " rm -f" $$list; \ ++ rm -f $$list ++btest$(EXEEXT): $(btest_OBJECTS) $(btest_DEPENDENCIES) $(EXTRA_btest_DEPENDENCIES) ++ @rm -f btest$(EXEEXT) ++ $(btest_LINK) $(btest_OBJECTS) $(btest_LDADD) $(LIBS) ++stest$(EXEEXT): $(stest_OBJECTS) $(stest_DEPENDENCIES) $(EXTRA_stest_DEPENDENCIES) ++ @rm -f stest$(EXEEXT) ++ $(LINK) $(stest_OBJECTS) $(stest_LDADD) $(LIBS) ++ ++mostlyclean-compile: ++ -rm -f *.$(OBJEXT) ++ ++distclean-compile: ++ -rm -f *.tab.c ++ ++.c.o: ++ $(COMPILE) -c $< ++ ++.c.obj: ++ $(COMPILE) -c `$(CYGPATH_W) '$<'` ++ ++.c.lo: ++ $(LTCOMPILE) -c -o $@ $< ++ ++btest-btest.o: btest.c ++ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(btest_CFLAGS) $(CFLAGS) -c -o btest-btest.o `test -f 'btest.c' || echo '$(srcdir)/'`btest.c ++ ++btest-btest.obj: btest.c ++ $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(btest_CFLAGS) $(CFLAGS) -c -o btest-btest.obj `if test -f 'btest.c'; then $(CYGPATH_W) 'btest.c'; else $(CYGPATH_W) '$(srcdir)/btest.c'; fi` ++ ++mostlyclean-libtool: ++ -rm -f *.lo ++ ++clean-libtool: ++ -rm -rf .libs _libs ++ ++distclean-libtool: ++ -rm -f libtool config.lt ++ ++# GNU Make needs to see an explicit $(MAKE) variable in the command it ++# runs to enable its job server during parallel builds. Hence the ++# comments below. ++all-multi: ++ $(MULTIDO) $(AM_MAKEFLAGS) DO=all multi-do # $(MAKE) ++install-multi: ++ $(MULTIDO) $(AM_MAKEFLAGS) DO=install multi-do # $(MAKE) ++ ++mostlyclean-multi: ++ $(MULTICLEAN) $(AM_MAKEFLAGS) DO=mostlyclean multi-clean # $(MAKE) ++clean-multi: ++ $(MULTICLEAN) $(AM_MAKEFLAGS) DO=clean multi-clean # $(MAKE) ++distclean-multi: ++ $(MULTICLEAN) $(AM_MAKEFLAGS) DO=distclean multi-clean # $(MAKE) ++maintainer-clean-multi: ++ $(MULTICLEAN) $(AM_MAKEFLAGS) DO=maintainer-clean multi-clean # $(MAKE) ++ ++ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) ++ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ ++ unique=`for i in $$list; do \ ++ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ ++ done | \ ++ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ ++ END { if (nonempty) { for (i in files) print i; }; }'`; \ ++ mkid -fID $$unique ++tags: TAGS ++ ++TAGS: $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \ ++ $(TAGS_FILES) $(LISP) ++ set x; \ ++ here=`pwd`; \ ++ list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \ ++ unique=`for i in $$list; do \ ++ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ ++ done | \ ++ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ ++ END { if (nonempty) { for (i in files) print i; }; }'`; \ ++ shift; \ ++ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ ++ test -n "$$unique" || unique=$$empty_fix; \ ++ if test $$# -gt 0; then \ ++ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ ++ "$$@" $$unique; \ ++ else \ ++ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ ++ $$unique; \ ++ fi; \ ++ fi ++ctags: CTAGS ++CTAGS: $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \ ++ $(TAGS_FILES) $(LISP) ++ list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \ ++ unique=`for i in $$list; do \ ++ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ ++ done | \ ++ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ ++ END { if (nonempty) { for (i in files) print i; }; }'`; \ ++ test -z "$(CTAGS_ARGS)$$unique" \ ++ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ ++ $$unique ++ ++GTAGS: ++ here=`$(am__cd) $(top_builddir) && pwd` \ ++ && $(am__cd) $(top_srcdir) \ ++ && gtags -i $(GTAGS_ARGS) "$$here" ++ ++distclean-tags: ++ -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags ++ ++check-TESTS: $(TESTS) ++ @failed=0; all=0; xfail=0; xpass=0; skip=0; \ ++ srcdir=$(srcdir); export srcdir; \ ++ list=' $(TESTS) '; \ ++ $(am__tty_colors); \ ++ if test -n "$$list"; then \ ++ for tst in $$list; do \ ++ if test -f ./$$tst; then dir=./; \ ++ elif test -f $$tst; then dir=; \ ++ else dir="$(srcdir)/"; fi; \ ++ if $(TESTS_ENVIRONMENT) $${dir}$$tst; then \ ++ all=`expr $$all + 1`; \ ++ case " $(XFAIL_TESTS) " in \ ++ *[\ \ ]$$tst[\ \ ]*) \ ++ xpass=`expr $$xpass + 1`; \ ++ failed=`expr $$failed + 1`; \ ++ col=$$red; res=XPASS; \ ++ ;; \ ++ *) \ ++ col=$$grn; res=PASS; \ ++ ;; \ ++ esac; \ ++ elif test $$? -ne 77; then \ ++ all=`expr $$all + 1`; \ ++ case " $(XFAIL_TESTS) " in \ ++ *[\ \ ]$$tst[\ \ ]*) \ ++ xfail=`expr $$xfail + 1`; \ ++ col=$$lgn; res=XFAIL; \ ++ ;; \ ++ *) \ ++ failed=`expr $$failed + 1`; \ ++ col=$$red; res=FAIL; \ ++ ;; \ ++ esac; \ ++ else \ ++ skip=`expr $$skip + 1`; \ ++ col=$$blu; res=SKIP; \ ++ fi; \ ++ echo "$${col}$$res$${std}: $$tst"; \ ++ done; \ ++ if test "$$all" -eq 1; then \ ++ tests="test"; \ ++ All=""; \ ++ else \ ++ tests="tests"; \ ++ All="All "; \ ++ fi; \ ++ if test "$$failed" -eq 0; then \ ++ if test "$$xfail" -eq 0; then \ ++ banner="$$All$$all $$tests passed"; \ ++ else \ ++ if test "$$xfail" -eq 1; then failures=failure; else failures=failures; fi; \ ++ banner="$$All$$all $$tests behaved as expected ($$xfail expected $$failures)"; \ ++ fi; \ ++ else \ ++ if test "$$xpass" -eq 0; then \ ++ banner="$$failed of $$all $$tests failed"; \ ++ else \ ++ if test "$$xpass" -eq 1; then passes=pass; else passes=passes; fi; \ ++ banner="$$failed of $$all $$tests did not behave as expected ($$xpass unexpected $$passes)"; \ ++ fi; \ ++ fi; \ ++ dashes="$$banner"; \ ++ skipped=""; \ ++ if test "$$skip" -ne 0; then \ ++ if test "$$skip" -eq 1; then \ ++ skipped="($$skip test was not run)"; \ ++ else \ ++ skipped="($$skip tests were not run)"; \ ++ fi; \ ++ test `echo "$$skipped" | wc -c` -le `echo "$$banner" | wc -c` || \ ++ dashes="$$skipped"; \ ++ fi; \ ++ report=""; \ ++ if test "$$failed" -ne 0 && test -n "$(PACKAGE_BUGREPORT)"; then \ ++ report="Please report to $(PACKAGE_BUGREPORT)"; \ ++ test `echo "$$report" | wc -c` -le `echo "$$banner" | wc -c` || \ ++ dashes="$$report"; \ ++ fi; \ ++ dashes=`echo "$$dashes" | sed s/./=/g`; \ ++ if test "$$failed" -eq 0; then \ ++ col="$$grn"; \ ++ else \ ++ col="$$red"; \ ++ fi; \ ++ echo "$${col}$$dashes$${std}"; \ ++ echo "$${col}$$banner$${std}"; \ ++ test -z "$$skipped" || echo "$${col}$$skipped$${std}"; \ ++ test -z "$$report" || echo "$${col}$$report$${std}"; \ ++ echo "$${col}$$dashes$${std}"; \ ++ test "$$failed" -eq 0; \ ++ else :; fi ++check-am: all-am ++ $(MAKE) $(AM_MAKEFLAGS) $(check_PROGRAMS) ++ $(MAKE) $(AM_MAKEFLAGS) check-TESTS ++check: check-am ++all-am: Makefile $(LTLIBRARIES) all-multi config.h ++installdirs: ++install: install-am ++install-exec: install-exec-am ++install-data: install-data-am ++uninstall: uninstall-am ++ ++install-am: all-am ++ @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am ++ ++installcheck: installcheck-am ++install-strip: ++ if test -z '$(STRIP)'; then \ ++ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ ++ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ ++ install; \ ++ else \ ++ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ ++ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ ++ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ ++ fi ++mostlyclean-generic: ++ ++clean-generic: ++ ++distclean-generic: ++ -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) ++ -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) ++ ++maintainer-clean-generic: ++ @echo "This command is intended for maintainers to use" ++ @echo "it deletes files that may require special tools to rebuild." ++clean: clean-am clean-multi ++ ++clean-am: clean-checkPROGRAMS clean-generic clean-libtool \ ++ clean-noinstLTLIBRARIES mostlyclean-am ++ ++distclean: distclean-am distclean-multi ++ -rm -f $(am__CONFIG_DISTCLEAN_FILES) ++ -rm -f Makefile ++distclean-am: clean-am distclean-compile distclean-generic \ ++ distclean-hdr distclean-libtool distclean-tags ++ ++dvi: dvi-am ++ ++dvi-am: ++ ++html: html-am ++ ++html-am: ++ ++info: info-am ++ ++info-am: ++ ++install-data-am: ++ ++install-dvi: install-dvi-am ++ ++install-dvi-am: ++ ++install-exec-am: install-multi ++ ++install-html: install-html-am ++ ++install-html-am: ++ ++install-info: install-info-am ++ ++install-info-am: ++ ++install-man: ++ ++install-pdf: install-pdf-am ++ ++install-pdf-am: ++ ++install-ps: install-ps-am ++ ++install-ps-am: ++ ++installcheck-am: ++ ++maintainer-clean: maintainer-clean-am maintainer-clean-multi ++ -rm -f $(am__CONFIG_DISTCLEAN_FILES) ++ -rm -rf $(top_srcdir)/autom4te.cache ++ -rm -f Makefile ++maintainer-clean-am: distclean-am maintainer-clean-generic ++ ++mostlyclean: mostlyclean-am mostlyclean-multi ++ ++mostlyclean-am: mostlyclean-compile mostlyclean-generic \ ++ mostlyclean-libtool ++ ++pdf: pdf-am ++ ++pdf-am: ++ ++ps: ps-am ++ ++ps-am: ++ ++uninstall-am: ++ ++.MAKE: all all-multi check-am clean-multi distclean-multi install-am \ ++ install-multi install-strip maintainer-clean-multi \ ++ mostlyclean-multi ++ ++.PHONY: CTAGS GTAGS all all-am all-multi am--refresh check check-TESTS \ ++ check-am clean clean-checkPROGRAMS clean-generic clean-libtool \ ++ clean-multi clean-noinstLTLIBRARIES ctags distclean \ ++ distclean-compile distclean-generic distclean-hdr \ ++ distclean-libtool distclean-multi distclean-tags dvi dvi-am \ ++ html html-am info info-am install install-am install-data \ ++ install-data-am install-dvi install-dvi-am install-exec \ ++ install-exec-am install-html install-html-am install-info \ ++ install-info-am install-man install-multi install-pdf \ ++ install-pdf-am install-ps install-ps-am install-strip \ ++ installcheck installcheck-am installdirs maintainer-clean \ ++ maintainer-clean-generic maintainer-clean-multi mostlyclean \ ++ mostlyclean-compile mostlyclean-generic mostlyclean-libtool \ ++ mostlyclean-multi pdf pdf-am ps ps-am tags uninstall \ ++ uninstall-am ++ ++alloc.lo: config.h backtrace.h internal.h ++backtrace.lo: config.h backtrace.h internal.h ++btest.lo: (INCDIR)/filenames.h backtrace.h backtrace-supported.h ++dwarf.lo: config.h $(INCDIR)/dwarf2.h $(INCDIR)/dwarf2.def \ ++ $(INCDIR)/filenames.h backtrace.h internal.h ++elf.lo: config.h backtrace.h internal.h ++fileline.lo: config.h backtrace.h internal.h ++mmap.lo: config.h backtrace.h internal.h ++mmapio.lo: config.h backtrace.h internal.h ++nounwind.lo: config.h internal.h ++pecoff.lo: config.h backtrace.h internal.h ++posix.lo: config.h backtrace.h internal.h ++print.lo: config.h backtrace.h internal.h ++read.lo: config.h backtrace.h internal.h ++simple.lo: config.h backtrace.h internal.h ++sort.lo: config.h backtrace.h internal.h ++stest.lo: config.h backtrace.h internal.h ++state.lo: config.h backtrace.h backtrace-supported.h internal.h ++unknown.lo: config.h backtrace.h internal.h ++ ++# Tell versions [3.59,3.63) of GNU make to not export all variables. ++# Otherwise a system limit (for SysV at least) may be exceeded. ++.NOEXPORT: diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/README index 000000000,000000000..e8b225745 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/README @@@ -1,0 -1,0 +1,23 @@@ ++The libbacktrace library ++Initially written by Ian Lance Taylor ++ ++The libbacktrace library may be linked into a program or library and ++used to produce symbolic backtraces. Sample uses would be to print a ++detailed backtrace when an error occurs or to gather detailed ++profiling information. ++ ++The libbacktrace library is provided under a BSD license. See the ++source files for the exact license text. ++ ++The public functions are declared and documented in the header file ++backtrace.h, which should be #include'd by a user of the library. ++ ++Building libbacktrace will generate a file backtrace-supported.h, ++which a user of the library may use to determine whether backtraces ++will work. See the source file backtrace-supported.h.in for the ++macros that it defines. ++ ++As of September 2012, libbacktrace only supports ELF executables with ++DWARF debugging information. The library is written to make it ++straightforward to add support for other object file and debugging ++formats. diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/aclocal.m4 index 000000000,000000000..8e84ddd1f new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/aclocal.m4 @@@ -1,0 -1,0 +1,683 @@@ ++# generated automatically by aclocal 1.11.6 -*- Autoconf -*- ++ ++# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, ++# 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, ++# Inc. ++# This file is free software; the Free Software Foundation ++# gives unlimited permission to copy and/or distribute it, ++# with or without modifications, as long as this notice is preserved. ++ ++# This program is distributed in the hope that it will be useful, ++# but WITHOUT ANY WARRANTY, to the extent permitted by law; without ++# even the implied warranty of MERCHANTABILITY or FITNESS FOR A ++# PARTICULAR PURPOSE. ++ ++m4_ifndef([AC_AUTOCONF_VERSION], ++ [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl ++m4_if(m4_defn([AC_AUTOCONF_VERSION]), [2.64],, ++[m4_warning([this file was generated for autoconf 2.64. ++You have another version of autoconf. It may work, but is not guaranteed to. ++If you have problems, you may need to regenerate the build system entirely. ++To do so, use the procedure documented by the package, typically `autoreconf'.])]) ++ ++# Copyright (C) 2002, 2003, 2005, 2006, 2007, 2008, 2011 Free Software ++# Foundation, Inc. ++# ++# This file is free software; the Free Software Foundation ++# gives unlimited permission to copy and/or distribute it, ++# with or without modifications, as long as this notice is preserved. ++ ++# serial 1 ++ ++# AM_AUTOMAKE_VERSION(VERSION) ++# ---------------------------- ++# Automake X.Y traces this macro to ensure aclocal.m4 has been ++# generated from the m4 files accompanying Automake X.Y. ++# (This private macro should not be called outside this file.) ++AC_DEFUN([AM_AUTOMAKE_VERSION], ++[am__api_version='1.11' ++dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to ++dnl require some minimum version. Point them to the right macro. ++m4_if([$1], [1.11.6], [], ++ [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl ++]) ++ ++# _AM_AUTOCONF_VERSION(VERSION) ++# ----------------------------- ++# aclocal traces this macro to find the Autoconf version. ++# This is a private macro too. Using m4_define simplifies ++# the logic in aclocal, which can simply ignore this definition. ++m4_define([_AM_AUTOCONF_VERSION], []) ++ ++# AM_SET_CURRENT_AUTOMAKE_VERSION ++# ------------------------------- ++# Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced. ++# This function is AC_REQUIREd by AM_INIT_AUTOMAKE. ++AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION], ++[AM_AUTOMAKE_VERSION([1.11.6])dnl ++m4_ifndef([AC_AUTOCONF_VERSION], ++ [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl ++_AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))]) ++ ++# AM_AUX_DIR_EXPAND -*- Autoconf -*- ++ ++# Copyright (C) 2001, 2003, 2005, 2011 Free Software Foundation, Inc. ++# ++# This file is free software; the Free Software Foundation ++# gives unlimited permission to copy and/or distribute it, ++# with or without modifications, as long as this notice is preserved. ++ ++# serial 1 ++ ++# For projects using AC_CONFIG_AUX_DIR([foo]), Autoconf sets ++# $ac_aux_dir to `$srcdir/foo'. In other projects, it is set to ++# `$srcdir', `$srcdir/..', or `$srcdir/../..'. ++# ++# Of course, Automake must honor this variable whenever it calls a ++# tool from the auxiliary directory. The problem is that $srcdir (and ++# therefore $ac_aux_dir as well) can be either absolute or relative, ++# depending on how configure is run. This is pretty annoying, since ++# it makes $ac_aux_dir quite unusable in subdirectories: in the top ++# source directory, any form will work fine, but in subdirectories a ++# relative path needs to be adjusted first. ++# ++# $ac_aux_dir/missing ++# fails when called from a subdirectory if $ac_aux_dir is relative ++# $top_srcdir/$ac_aux_dir/missing ++# fails if $ac_aux_dir is absolute, ++# fails when called from a subdirectory in a VPATH build with ++# a relative $ac_aux_dir ++# ++# The reason of the latter failure is that $top_srcdir and $ac_aux_dir ++# are both prefixed by $srcdir. In an in-source build this is usually ++# harmless because $srcdir is `.', but things will broke when you ++# start a VPATH build or use an absolute $srcdir. ++# ++# So we could use something similar to $top_srcdir/$ac_aux_dir/missing, ++# iff we strip the leading $srcdir from $ac_aux_dir. That would be: ++# am_aux_dir='\$(top_srcdir)/'`expr "$ac_aux_dir" : "$srcdir//*\(.*\)"` ++# and then we would define $MISSING as ++# MISSING="\${SHELL} $am_aux_dir/missing" ++# This will work as long as MISSING is not called from configure, because ++# unfortunately $(top_srcdir) has no meaning in configure. ++# However there are other variables, like CC, which are often used in ++# configure, and could therefore not use this "fixed" $ac_aux_dir. ++# ++# Another solution, used here, is to always expand $ac_aux_dir to an ++# absolute PATH. The drawback is that using absolute paths prevent a ++# configured tree to be moved without reconfiguration. ++ ++AC_DEFUN([AM_AUX_DIR_EXPAND], ++[dnl Rely on autoconf to set up CDPATH properly. ++AC_PREREQ([2.50])dnl ++# expand $ac_aux_dir to an absolute path ++am_aux_dir=`cd $ac_aux_dir && pwd` ++]) ++ ++# AM_CONDITIONAL -*- Autoconf -*- ++ ++# Copyright (C) 1997, 2000, 2001, 2003, 2004, 2005, 2006, 2008 ++# Free Software Foundation, Inc. ++# ++# This file is free software; the Free Software Foundation ++# gives unlimited permission to copy and/or distribute it, ++# with or without modifications, as long as this notice is preserved. ++ ++# serial 9 ++ ++# AM_CONDITIONAL(NAME, SHELL-CONDITION) ++# ------------------------------------- ++# Define a conditional. ++AC_DEFUN([AM_CONDITIONAL], ++[AC_PREREQ(2.52)dnl ++ ifelse([$1], [TRUE], [AC_FATAL([$0: invalid condition: $1])], ++ [$1], [FALSE], [AC_FATAL([$0: invalid condition: $1])])dnl ++AC_SUBST([$1_TRUE])dnl ++AC_SUBST([$1_FALSE])dnl ++_AM_SUBST_NOTMAKE([$1_TRUE])dnl ++_AM_SUBST_NOTMAKE([$1_FALSE])dnl ++m4_define([_AM_COND_VALUE_$1], [$2])dnl ++if $2; then ++ $1_TRUE= ++ $1_FALSE='#' ++else ++ $1_TRUE='#' ++ $1_FALSE= ++fi ++AC_CONFIG_COMMANDS_PRE( ++[if test -z "${$1_TRUE}" && test -z "${$1_FALSE}"; then ++ AC_MSG_ERROR([[conditional "$1" was never defined. ++Usually this means the macro was only invoked conditionally.]]) ++fi])]) ++ ++# Do all the work for Automake. -*- Autoconf -*- ++ ++# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, ++# 2005, 2006, 2008, 2009 Free Software Foundation, Inc. ++# ++# This file is free software; the Free Software Foundation ++# gives unlimited permission to copy and/or distribute it, ++# with or without modifications, as long as this notice is preserved. ++ ++# serial 16 ++ ++# This macro actually does too much. Some checks are only needed if ++# your package does certain things. But this isn't really a big deal. ++ ++# AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE]) ++# AM_INIT_AUTOMAKE([OPTIONS]) ++# ----------------------------------------------- ++# The call with PACKAGE and VERSION arguments is the old style ++# call (pre autoconf-2.50), which is being phased out. PACKAGE ++# and VERSION should now be passed to AC_INIT and removed from ++# the call to AM_INIT_AUTOMAKE. ++# We support both call styles for the transition. After ++# the next Automake release, Autoconf can make the AC_INIT ++# arguments mandatory, and then we can depend on a new Autoconf ++# release and drop the old call support. ++AC_DEFUN([AM_INIT_AUTOMAKE], ++[AC_PREREQ([2.62])dnl ++dnl Autoconf wants to disallow AM_ names. We explicitly allow ++dnl the ones we care about. ++m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl ++AC_REQUIRE([AM_SET_CURRENT_AUTOMAKE_VERSION])dnl ++AC_REQUIRE([AC_PROG_INSTALL])dnl ++if test "`cd $srcdir && pwd`" != "`pwd`"; then ++ # Use -I$(srcdir) only when $(srcdir) != ., so that make's output ++ # is not polluted with repeated "-I." ++ AC_SUBST([am__isrc], [' -I$(srcdir)'])_AM_SUBST_NOTMAKE([am__isrc])dnl ++ # test to see if srcdir already configured ++ if test -f $srcdir/config.status; then ++ AC_MSG_ERROR([source directory already configured; run "make distclean" there first]) ++ fi ++fi ++ ++# test whether we have cygpath ++if test -z "$CYGPATH_W"; then ++ if (cygpath --version) >/dev/null 2>/dev/null; then ++ CYGPATH_W='cygpath -w' ++ else ++ CYGPATH_W=echo ++ fi ++fi ++AC_SUBST([CYGPATH_W]) ++ ++# Define the identity of the package. ++dnl Distinguish between old-style and new-style calls. ++m4_ifval([$2], ++[m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl ++ AC_SUBST([PACKAGE], [$1])dnl ++ AC_SUBST([VERSION], [$2])], ++[_AM_SET_OPTIONS([$1])dnl ++dnl Diagnose old-style AC_INIT with new-style AM_AUTOMAKE_INIT. ++m4_if(m4_ifdef([AC_PACKAGE_NAME], 1)m4_ifdef([AC_PACKAGE_VERSION], 1), 11,, ++ [m4_fatal([AC_INIT should be called with package and version arguments])])dnl ++ AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl ++ AC_SUBST([VERSION], ['AC_PACKAGE_VERSION'])])dnl ++ ++_AM_IF_OPTION([no-define],, ++[AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Name of package]) ++ AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Version number of package])])dnl ++ ++# Some tools Automake needs. ++AC_REQUIRE([AM_SANITY_CHECK])dnl ++AC_REQUIRE([AC_ARG_PROGRAM])dnl ++AM_MISSING_PROG(ACLOCAL, aclocal-${am__api_version}) ++AM_MISSING_PROG(AUTOCONF, autoconf) ++AM_MISSING_PROG(AUTOMAKE, automake-${am__api_version}) ++AM_MISSING_PROG(AUTOHEADER, autoheader) ++AM_MISSING_PROG(MAKEINFO, makeinfo) ++AC_REQUIRE([AM_PROG_INSTALL_SH])dnl ++AC_REQUIRE([AM_PROG_INSTALL_STRIP])dnl ++AC_REQUIRE([AM_PROG_MKDIR_P])dnl ++# We need awk for the "check" target. The system "awk" is bad on ++# some platforms. ++AC_REQUIRE([AC_PROG_AWK])dnl ++AC_REQUIRE([AC_PROG_MAKE_SET])dnl ++AC_REQUIRE([AM_SET_LEADING_DOT])dnl ++_AM_IF_OPTION([tar-ustar], [_AM_PROG_TAR([ustar])], ++ [_AM_IF_OPTION([tar-pax], [_AM_PROG_TAR([pax])], ++ [_AM_PROG_TAR([v7])])]) ++_AM_IF_OPTION([no-dependencies],, ++[AC_PROVIDE_IFELSE([AC_PROG_CC], ++ [_AM_DEPENDENCIES(CC)], ++ [define([AC_PROG_CC], ++ defn([AC_PROG_CC])[_AM_DEPENDENCIES(CC)])])dnl ++AC_PROVIDE_IFELSE([AC_PROG_CXX], ++ [_AM_DEPENDENCIES(CXX)], ++ [define([AC_PROG_CXX], ++ defn([AC_PROG_CXX])[_AM_DEPENDENCIES(CXX)])])dnl ++AC_PROVIDE_IFELSE([AC_PROG_OBJC], ++ [_AM_DEPENDENCIES(OBJC)], ++ [define([AC_PROG_OBJC], ++ defn([AC_PROG_OBJC])[_AM_DEPENDENCIES(OBJC)])])dnl ++]) ++_AM_IF_OPTION([silent-rules], [AC_REQUIRE([AM_SILENT_RULES])])dnl ++dnl The `parallel-tests' driver may need to know about EXEEXT, so add the ++dnl `am__EXEEXT' conditional if _AM_COMPILER_EXEEXT was seen. This macro ++dnl is hooked onto _AC_COMPILER_EXEEXT early, see below. ++AC_CONFIG_COMMANDS_PRE(dnl ++[m4_provide_if([_AM_COMPILER_EXEEXT], ++ [AM_CONDITIONAL([am__EXEEXT], [test -n "$EXEEXT"])])])dnl ++]) ++ ++dnl Hook into `_AC_COMPILER_EXEEXT' early to learn its expansion. Do not ++dnl add the conditional right here, as _AC_COMPILER_EXEEXT may be further ++dnl mangled by Autoconf and run in a shell conditional statement. ++m4_define([_AC_COMPILER_EXEEXT], ++m4_defn([_AC_COMPILER_EXEEXT])[m4_provide([_AM_COMPILER_EXEEXT])]) ++ ++ ++# When config.status generates a header, we must update the stamp-h file. ++# This file resides in the same directory as the config header ++# that is generated. The stamp files are numbered to have different names. ++ ++# Autoconf calls _AC_AM_CONFIG_HEADER_HOOK (when defined) in the ++# loop where config.status creates the headers, so we can generate ++# our stamp files there. ++AC_DEFUN([_AC_AM_CONFIG_HEADER_HOOK], ++[# Compute $1's index in $config_headers. ++_am_arg=$1 ++_am_stamp_count=1 ++for _am_header in $config_headers :; do ++ case $_am_header in ++ $_am_arg | $_am_arg:* ) ++ break ;; ++ * ) ++ _am_stamp_count=`expr $_am_stamp_count + 1` ;; ++ esac ++done ++echo "timestamp for $_am_arg" >`AS_DIRNAME(["$_am_arg"])`/stamp-h[]$_am_stamp_count]) ++ ++# Copyright (C) 2001, 2003, 2005, 2008, 2011 Free Software Foundation, ++# Inc. ++# ++# This file is free software; the Free Software Foundation ++# gives unlimited permission to copy and/or distribute it, ++# with or without modifications, as long as this notice is preserved. ++ ++# serial 1 ++ ++# AM_PROG_INSTALL_SH ++# ------------------ ++# Define $install_sh. ++AC_DEFUN([AM_PROG_INSTALL_SH], ++[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl ++if test x"${install_sh}" != xset; then ++ case $am_aux_dir in ++ *\ * | *\ *) ++ install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; ++ *) ++ install_sh="\${SHELL} $am_aux_dir/install-sh" ++ esac ++fi ++AC_SUBST(install_sh)]) ++ ++# Add --enable-maintainer-mode option to configure. -*- Autoconf -*- ++# From Jim Meyering ++ ++# Copyright (C) 1996, 1998, 2000, 2001, 2002, 2003, 2004, 2005, 2008, ++# 2011 Free Software Foundation, Inc. ++# ++# This file is free software; the Free Software Foundation ++# gives unlimited permission to copy and/or distribute it, ++# with or without modifications, as long as this notice is preserved. ++ ++# serial 5 ++ ++# AM_MAINTAINER_MODE([DEFAULT-MODE]) ++# ---------------------------------- ++# Control maintainer-specific portions of Makefiles. ++# Default is to disable them, unless `enable' is passed literally. ++# For symmetry, `disable' may be passed as well. Anyway, the user ++# can override the default with the --enable/--disable switch. ++AC_DEFUN([AM_MAINTAINER_MODE], ++[m4_case(m4_default([$1], [disable]), ++ [enable], [m4_define([am_maintainer_other], [disable])], ++ [disable], [m4_define([am_maintainer_other], [enable])], ++ [m4_define([am_maintainer_other], [enable]) ++ m4_warn([syntax], [unexpected argument to AM@&t@_MAINTAINER_MODE: $1])]) ++AC_MSG_CHECKING([whether to enable maintainer-specific portions of Makefiles]) ++ dnl maintainer-mode's default is 'disable' unless 'enable' is passed ++ AC_ARG_ENABLE([maintainer-mode], ++[ --][am_maintainer_other][-maintainer-mode am_maintainer_other make rules and dependencies not useful ++ (and sometimes confusing) to the casual installer], ++ [USE_MAINTAINER_MODE=$enableval], ++ [USE_MAINTAINER_MODE=]m4_if(am_maintainer_other, [enable], [no], [yes])) ++ AC_MSG_RESULT([$USE_MAINTAINER_MODE]) ++ AM_CONDITIONAL([MAINTAINER_MODE], [test $USE_MAINTAINER_MODE = yes]) ++ MAINT=$MAINTAINER_MODE_TRUE ++ AC_SUBST([MAINT])dnl ++] ++) ++ ++AU_DEFUN([jm_MAINTAINER_MODE], [AM_MAINTAINER_MODE]) ++ ++# Fake the existence of programs that GNU maintainers use. -*- Autoconf -*- ++ ++# Copyright (C) 1997, 1999, 2000, 2001, 2003, 2004, 2005, 2008 ++# Free Software Foundation, Inc. ++# ++# This file is free software; the Free Software Foundation ++# gives unlimited permission to copy and/or distribute it, ++# with or without modifications, as long as this notice is preserved. ++ ++# serial 6 ++ ++# AM_MISSING_PROG(NAME, PROGRAM) ++# ------------------------------ ++AC_DEFUN([AM_MISSING_PROG], ++[AC_REQUIRE([AM_MISSING_HAS_RUN]) ++$1=${$1-"${am_missing_run}$2"} ++AC_SUBST($1)]) ++ ++ ++# AM_MISSING_HAS_RUN ++# ------------------ ++# Define MISSING if not defined so far and test if it supports --run. ++# If it does, set am_missing_run to use it, otherwise, to nothing. ++AC_DEFUN([AM_MISSING_HAS_RUN], ++[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl ++AC_REQUIRE_AUX_FILE([missing])dnl ++if test x"${MISSING+set}" != xset; then ++ case $am_aux_dir in ++ *\ * | *\ *) ++ MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; ++ *) ++ MISSING="\${SHELL} $am_aux_dir/missing" ;; ++ esac ++fi ++# Use eval to expand $SHELL ++if eval "$MISSING --run true"; then ++ am_missing_run="$MISSING --run " ++else ++ am_missing_run= ++ AC_MSG_WARN([`missing' script is too old or missing]) ++fi ++]) ++ ++# Copyright (C) 2003, 2004, 2005, 2006, 2011 Free Software Foundation, ++# Inc. ++# ++# This file is free software; the Free Software Foundation ++# gives unlimited permission to copy and/or distribute it, ++# with or without modifications, as long as this notice is preserved. ++ ++# serial 1 ++ ++# AM_PROG_MKDIR_P ++# --------------- ++# Check for `mkdir -p'. ++AC_DEFUN([AM_PROG_MKDIR_P], ++[AC_PREREQ([2.60])dnl ++AC_REQUIRE([AC_PROG_MKDIR_P])dnl ++dnl Automake 1.8 to 1.9.6 used to define mkdir_p. We now use MKDIR_P, ++dnl while keeping a definition of mkdir_p for backward compatibility. ++dnl @MKDIR_P@ is magic: AC_OUTPUT adjusts its value for each Makefile. ++dnl However we cannot define mkdir_p as $(MKDIR_P) for the sake of ++dnl Makefile.ins that do not define MKDIR_P, so we do our own ++dnl adjustment using top_builddir (which is defined more often than ++dnl MKDIR_P). ++AC_SUBST([mkdir_p], ["$MKDIR_P"])dnl ++case $mkdir_p in ++ [[\\/$]]* | ?:[[\\/]]*) ;; ++ */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;; ++esac ++]) ++ ++# Helper functions for option handling. -*- Autoconf -*- ++ ++# Copyright (C) 2001, 2002, 2003, 2005, 2008, 2010 Free Software ++# Foundation, Inc. ++# ++# This file is free software; the Free Software Foundation ++# gives unlimited permission to copy and/or distribute it, ++# with or without modifications, as long as this notice is preserved. ++ ++# serial 5 ++ ++# _AM_MANGLE_OPTION(NAME) ++# ----------------------- ++AC_DEFUN([_AM_MANGLE_OPTION], ++[[_AM_OPTION_]m4_bpatsubst($1, [[^a-zA-Z0-9_]], [_])]) ++ ++# _AM_SET_OPTION(NAME) ++# -------------------- ++# Set option NAME. Presently that only means defining a flag for this option. ++AC_DEFUN([_AM_SET_OPTION], ++[m4_define(_AM_MANGLE_OPTION([$1]), 1)]) ++ ++# _AM_SET_OPTIONS(OPTIONS) ++# ------------------------ ++# OPTIONS is a space-separated list of Automake options. ++AC_DEFUN([_AM_SET_OPTIONS], ++[m4_foreach_w([_AM_Option], [$1], [_AM_SET_OPTION(_AM_Option)])]) ++ ++# _AM_IF_OPTION(OPTION, IF-SET, [IF-NOT-SET]) ++# ------------------------------------------- ++# Execute IF-SET if OPTION is set, IF-NOT-SET otherwise. ++AC_DEFUN([_AM_IF_OPTION], ++[m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])]) ++ ++# Check to make sure that the build environment is sane. -*- Autoconf -*- ++ ++# Copyright (C) 1996, 1997, 2000, 2001, 2003, 2005, 2008 ++# Free Software Foundation, Inc. ++# ++# This file is free software; the Free Software Foundation ++# gives unlimited permission to copy and/or distribute it, ++# with or without modifications, as long as this notice is preserved. ++ ++# serial 5 ++ ++# AM_SANITY_CHECK ++# --------------- ++AC_DEFUN([AM_SANITY_CHECK], ++[AC_MSG_CHECKING([whether build environment is sane]) ++# Just in case ++sleep 1 ++echo timestamp > conftest.file ++# Reject unsafe characters in $srcdir or the absolute working directory ++# name. Accept space and tab only in the latter. ++am_lf=' ++' ++case `pwd` in ++ *[[\\\"\#\$\&\'\`$am_lf]]*) ++ AC_MSG_ERROR([unsafe absolute working directory name]);; ++esac ++case $srcdir in ++ *[[\\\"\#\$\&\'\`$am_lf\ \ ]]*) ++ AC_MSG_ERROR([unsafe srcdir value: `$srcdir']);; ++esac ++ ++# Do `set' in a subshell so we don't clobber the current shell's ++# arguments. Must try -L first in case configure is actually a ++# symlink; some systems play weird games with the mod time of symlinks ++# (eg FreeBSD returns the mod time of the symlink's containing ++# directory). ++if ( ++ set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` ++ if test "$[*]" = "X"; then ++ # -L didn't work. ++ set X `ls -t "$srcdir/configure" conftest.file` ++ fi ++ rm -f conftest.file ++ if test "$[*]" != "X $srcdir/configure conftest.file" \ ++ && test "$[*]" != "X conftest.file $srcdir/configure"; then ++ ++ # If neither matched, then we have a broken ls. This can happen ++ # if, for instance, CONFIG_SHELL is bash and it inherits a ++ # broken ls alias from the environment. This has actually ++ # happened. Such a system could not be considered "sane". ++ AC_MSG_ERROR([ls -t appears to fail. Make sure there is not a broken ++alias in your environment]) ++ fi ++ ++ test "$[2]" = conftest.file ++ ) ++then ++ # Ok. ++ : ++else ++ AC_MSG_ERROR([newly created file is older than distributed files! ++Check your system clock]) ++fi ++AC_MSG_RESULT(yes)]) ++ ++# Copyright (C) 2001, 2003, 2005, 2011 Free Software Foundation, Inc. ++# ++# This file is free software; the Free Software Foundation ++# gives unlimited permission to copy and/or distribute it, ++# with or without modifications, as long as this notice is preserved. ++ ++# serial 1 ++ ++# AM_PROG_INSTALL_STRIP ++# --------------------- ++# One issue with vendor `install' (even GNU) is that you can't ++# specify the program used to strip binaries. This is especially ++# annoying in cross-compiling environments, where the build's strip ++# is unlikely to handle the host's binaries. ++# Fortunately install-sh will honor a STRIPPROG variable, so we ++# always use install-sh in `make install-strip', and initialize ++# STRIPPROG with the value of the STRIP variable (set by the user). ++AC_DEFUN([AM_PROG_INSTALL_STRIP], ++[AC_REQUIRE([AM_PROG_INSTALL_SH])dnl ++# Installed binaries are usually stripped using `strip' when the user ++# run `make install-strip'. However `strip' might not be the right ++# tool to use in cross-compilation environments, therefore Automake ++# will honor the `STRIP' environment variable to overrule this program. ++dnl Don't test for $cross_compiling = yes, because it might be `maybe'. ++if test "$cross_compiling" != no; then ++ AC_CHECK_TOOL([STRIP], [strip], :) ++fi ++INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" ++AC_SUBST([INSTALL_STRIP_PROGRAM])]) ++ ++# Copyright (C) 2006, 2008, 2010 Free Software Foundation, Inc. ++# ++# This file is free software; the Free Software Foundation ++# gives unlimited permission to copy and/or distribute it, ++# with or without modifications, as long as this notice is preserved. ++ ++# serial 3 ++ ++# _AM_SUBST_NOTMAKE(VARIABLE) ++# --------------------------- ++# Prevent Automake from outputting VARIABLE = @VARIABLE@ in Makefile.in. ++# This macro is traced by Automake. ++AC_DEFUN([_AM_SUBST_NOTMAKE]) ++ ++# AM_SUBST_NOTMAKE(VARIABLE) ++# -------------------------- ++# Public sister of _AM_SUBST_NOTMAKE. ++AC_DEFUN([AM_SUBST_NOTMAKE], [_AM_SUBST_NOTMAKE($@)]) ++ ++# Check how to create a tarball. -*- Autoconf -*- ++ ++# Copyright (C) 2004, 2005, 2012 Free Software Foundation, Inc. ++# ++# This file is free software; the Free Software Foundation ++# gives unlimited permission to copy and/or distribute it, ++# with or without modifications, as long as this notice is preserved. ++ ++# serial 2 ++ ++# _AM_PROG_TAR(FORMAT) ++# -------------------- ++# Check how to create a tarball in format FORMAT. ++# FORMAT should be one of `v7', `ustar', or `pax'. ++# ++# Substitute a variable $(am__tar) that is a command ++# writing to stdout a FORMAT-tarball containing the directory ++# $tardir. ++# tardir=directory && $(am__tar) > result.tar ++# ++# Substitute a variable $(am__untar) that extract such ++# a tarball read from stdin. ++# $(am__untar) < result.tar ++AC_DEFUN([_AM_PROG_TAR], ++[# Always define AMTAR for backward compatibility. Yes, it's still used ++# in the wild :-( We should find a proper way to deprecate it ... ++AC_SUBST([AMTAR], ['$${TAR-tar}']) ++m4_if([$1], [v7], ++ [am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -'], ++ [m4_case([$1], [ustar],, [pax],, ++ [m4_fatal([Unknown tar format])]) ++AC_MSG_CHECKING([how to create a $1 tar archive]) ++# Loop over all known methods to create a tar archive until one works. ++_am_tools='gnutar m4_if([$1], [ustar], [plaintar]) pax cpio none' ++_am_tools=${am_cv_prog_tar_$1-$_am_tools} ++# Do not fold the above two line into one, because Tru64 sh and ++# Solaris sh will not grok spaces in the rhs of `-'. ++for _am_tool in $_am_tools ++do ++ case $_am_tool in ++ gnutar) ++ for _am_tar in tar gnutar gtar; ++ do ++ AM_RUN_LOG([$_am_tar --version]) && break ++ done ++ am__tar="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$$tardir"' ++ am__tar_="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$tardir"' ++ am__untar="$_am_tar -xf -" ++ ;; ++ plaintar) ++ # Must skip GNU tar: if it does not support --format= it doesn't create ++ # ustar tarball either. ++ (tar --version) >/dev/null 2>&1 && continue ++ am__tar='tar chf - "$$tardir"' ++ am__tar_='tar chf - "$tardir"' ++ am__untar='tar xf -' ++ ;; ++ pax) ++ am__tar='pax -L -x $1 -w "$$tardir"' ++ am__tar_='pax -L -x $1 -w "$tardir"' ++ am__untar='pax -r' ++ ;; ++ cpio) ++ am__tar='find "$$tardir" -print | cpio -o -H $1 -L' ++ am__tar_='find "$tardir" -print | cpio -o -H $1 -L' ++ am__untar='cpio -i -H $1 -d' ++ ;; ++ none) ++ am__tar=false ++ am__tar_=false ++ am__untar=false ++ ;; ++ esac ++ ++ # If the value was cached, stop now. We just wanted to have am__tar ++ # and am__untar set. ++ test -n "${am_cv_prog_tar_$1}" && break ++ ++ # tar/untar a dummy directory, and stop if the command works ++ rm -rf conftest.dir ++ mkdir conftest.dir ++ echo GrepMe > conftest.dir/file ++ AM_RUN_LOG([tardir=conftest.dir && eval $am__tar_ >conftest.tar]) ++ rm -rf conftest.dir ++ if test -s conftest.tar; then ++ AM_RUN_LOG([$am__untar /dev/null 2>&1 && break ++ fi ++done ++rm -rf conftest.dir ++ ++AC_CACHE_VAL([am_cv_prog_tar_$1], [am_cv_prog_tar_$1=$_am_tool]) ++AC_MSG_RESULT([$am_cv_prog_tar_$1])]) ++AC_SUBST([am__tar]) ++AC_SUBST([am__untar]) ++]) # _AM_PROG_TAR ++ ++m4_include([../config/lead-dot.m4]) ++m4_include([../config/multi.m4]) ++m4_include([../config/override.m4]) ++m4_include([../config/stdint.m4]) ++m4_include([../config/unwind_ipinfo.m4]) ++m4_include([../config/warnings.m4]) ++m4_include([../libtool.m4]) ++m4_include([../ltoptions.m4]) ++m4_include([../ltsugar.m4]) ++m4_include([../ltversion.m4]) ++m4_include([../lt~obsolete.m4]) diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/alloc.c index 000000000,000000000..3333624a2 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/alloc.c @@@ -1,0 -1,0 +1,156 @@@ ++/* alloc.c -- Memory allocation without mmap. ++ Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++#include "config.h" ++ ++#include ++#include ++#include ++ ++#include "backtrace.h" ++#include "internal.h" ++ ++/* Allocation routines to use on systems that do not support anonymous ++ mmap. This implementation just uses malloc, which means that the ++ backtrace functions may not be safely invoked from a signal ++ handler. */ ++ ++/* Allocate memory like malloc. If ERROR_CALLBACK is NULL, don't ++ report an error. */ ++ ++void * ++backtrace_alloc (struct backtrace_state *state ATTRIBUTE_UNUSED, ++ size_t size, backtrace_error_callback error_callback, ++ void *data) ++{ ++ void *ret; ++ ++ ret = malloc (size); ++ if (ret == NULL) ++ { ++ if (error_callback) ++ error_callback (data, "malloc", errno); ++ } ++ return ret; ++} ++ ++/* Free memory. */ ++ ++void ++backtrace_free (struct backtrace_state *state ATTRIBUTE_UNUSED, ++ void *p, size_t size ATTRIBUTE_UNUSED, ++ backtrace_error_callback error_callback ATTRIBUTE_UNUSED, ++ void *data ATTRIBUTE_UNUSED) ++{ ++ free (p); ++} ++ ++/* Grow VEC by SIZE bytes. */ ++ ++void * ++backtrace_vector_grow (struct backtrace_state *state ATTRIBUTE_UNUSED, ++ size_t size, backtrace_error_callback error_callback, ++ void *data, struct backtrace_vector *vec) ++{ ++ void *ret; ++ ++ if (size > vec->alc) ++ { ++ size_t alc; ++ void *base; ++ ++ if (vec->size == 0) ++ alc = 32 * size; ++ else if (vec->size >= 4096) ++ alc = vec->size + 4096; ++ else ++ alc = 2 * vec->size; ++ ++ if (alc < vec->size + size) ++ alc = vec->size + size; ++ ++ base = realloc (vec->base, alc); ++ if (base == NULL) ++ { ++ error_callback (data, "realloc", errno); ++ return NULL; ++ } ++ ++ vec->base = base; ++ vec->alc = alc - vec->size; ++ } ++ ++ ret = (char *) vec->base + vec->size; ++ vec->size += size; ++ vec->alc -= size; ++ return ret; ++} ++ ++/* Finish the current allocation on VEC. */ ++ ++void * ++backtrace_vector_finish (struct backtrace_state *state, ++ struct backtrace_vector *vec, ++ backtrace_error_callback error_callback, ++ void *data) ++{ ++ void *ret; ++ ++ /* With this allocator we call realloc in backtrace_vector_grow, ++ which means we can't easily reuse the memory here. So just ++ release it. */ ++ if (!backtrace_vector_release (state, vec, error_callback, data)) ++ return NULL; ++ ret = vec->base; ++ vec->base = NULL; ++ vec->size = 0; ++ vec->alc = 0; ++ return ret; ++} ++ ++/* Release any extra space allocated for VEC. */ ++ ++int ++backtrace_vector_release (struct backtrace_state *state ATTRIBUTE_UNUSED, ++ struct backtrace_vector *vec, ++ backtrace_error_callback error_callback, ++ void *data) ++{ ++ vec->base = realloc (vec->base, vec->size); ++ if (vec->base == NULL) ++ { ++ error_callback (data, "realloc", errno); ++ return 0; ++ } ++ vec->alc = 0; ++ return 1; ++} diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/ansidecl.h index 000000000,000000000..08aeb1eeb new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/ansidecl.h @@@ -1,0 -1,0 +1,355 @@@ ++/* ANSI and traditional C compatability macros ++ Copyright (C) 1991-2015 Free Software Foundation, Inc. ++ This file is part of the GNU C Library. ++ ++This program is free software; you can redistribute it and/or modify ++it under the terms of the GNU General Public License as published by ++the Free Software Foundation; either version 2 of the License, or ++(at your option) any later version. ++ ++This program is distributed in the hope that it will be useful, ++but WITHOUT ANY WARRANTY; without even the implied warranty of ++MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++GNU General Public License for more details. ++ ++You should have received a copy of the GNU General Public License ++along with this program; if not, write to the Free Software ++Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA 02110-1301, USA. */ ++ ++/* ANSI and traditional C compatibility macros ++ ++ ANSI C is assumed if __STDC__ is #defined. ++ ++ Macro ANSI C definition Traditional C definition ++ ----- ---- - ---------- ----------- - ---------- ++ PTR `void *' `char *' ++ const not defined `' ++ volatile not defined `' ++ signed not defined `' ++ ++ For ease of writing code which uses GCC extensions but needs to be ++ portable to other compilers, we provide the GCC_VERSION macro that ++ simplifies testing __GNUC__ and __GNUC_MINOR__ together, and various ++ wrappers around __attribute__. Also, __extension__ will be #defined ++ to nothing if it doesn't work. See below. */ ++ ++#ifndef _ANSIDECL_H ++#define _ANSIDECL_H 1 ++ ++#ifdef __cplusplus ++extern "C" { ++#endif ++ ++/* Every source file includes this file, ++ so they will all get the switch for lint. */ ++/* LINTLIBRARY */ ++ ++/* Using MACRO(x,y) in cpp #if conditionals does not work with some ++ older preprocessors. Thus we can't define something like this: ++ ++#define HAVE_GCC_VERSION(MAJOR, MINOR) \ ++ (__GNUC__ > (MAJOR) || (__GNUC__ == (MAJOR) && __GNUC_MINOR__ >= (MINOR))) ++ ++and then test "#if HAVE_GCC_VERSION(2,7)". ++ ++So instead we use the macro below and test it against specific values. */ ++ ++/* This macro simplifies testing whether we are using gcc, and if it ++ is of a particular minimum version. (Both major & minor numbers are ++ significant.) This macro will evaluate to 0 if we are not using ++ gcc at all. */ ++#ifndef GCC_VERSION ++#define GCC_VERSION (__GNUC__ * 1000 + __GNUC_MINOR__) ++#endif /* GCC_VERSION */ ++ ++#if defined (__STDC__) || defined(__cplusplus) || defined (_AIX) || (defined (__mips) && defined (_SYSTYPE_SVR4)) || defined(_WIN32) ++/* All known AIX compilers implement these things (but don't always ++ define __STDC__). The RISC/OS MIPS compiler defines these things ++ in SVR4 mode, but does not define __STDC__. */ ++/* eraxxon@alumni.rice.edu: The Compaq C++ compiler, unlike many other ++ C++ compilers, does not define __STDC__, though it acts as if this ++ was so. (Verified versions: 5.7, 6.2, 6.3, 6.5) */ ++ ++#define PTR void * ++ ++#undef const ++#undef volatile ++#undef signed ++ ++/* inline requires special treatment; it's in C99, and GCC >=2.7 supports ++ it too, but it's not in C89. */ ++#undef inline ++#if __STDC_VERSION__ >= 199901L || defined(__cplusplus) || (defined(__SUNPRO_C) && defined(__C99FEATURES__)) ++/* it's a keyword */ ++#else ++# if GCC_VERSION >= 2007 ++# define inline __inline__ /* __inline__ prevents -pedantic warnings */ ++# else ++# define inline /* nothing */ ++# endif ++#endif ++ ++#else /* Not ANSI C. */ ++ ++#define PTR char * ++ ++/* some systems define these in header files for non-ansi mode */ ++#undef const ++#undef volatile ++#undef signed ++#undef inline ++#define const ++#define volatile ++#define signed ++#define inline ++ ++#endif /* ANSI C. */ ++ ++/* Define macros for some gcc attributes. This permits us to use the ++ macros freely, and know that they will come into play for the ++ version of gcc in which they are supported. */ ++ ++#if (GCC_VERSION < 2007) ++# define __attribute__(x) ++#endif ++ ++/* Attribute __malloc__ on functions was valid as of gcc 2.96. */ ++#ifndef ATTRIBUTE_MALLOC ++# if (GCC_VERSION >= 2096) ++# define ATTRIBUTE_MALLOC __attribute__ ((__malloc__)) ++# else ++# define ATTRIBUTE_MALLOC ++# endif /* GNUC >= 2.96 */ ++#endif /* ATTRIBUTE_MALLOC */ ++ ++/* Attributes on labels were valid as of gcc 2.93 and g++ 4.5. For ++ g++ an attribute on a label must be followed by a semicolon. */ ++#ifndef ATTRIBUTE_UNUSED_LABEL ++# ifndef __cplusplus ++# if GCC_VERSION >= 2093 ++# define ATTRIBUTE_UNUSED_LABEL ATTRIBUTE_UNUSED ++# else ++# define ATTRIBUTE_UNUSED_LABEL ++# endif ++# else ++# if GCC_VERSION >= 4005 ++# define ATTRIBUTE_UNUSED_LABEL ATTRIBUTE_UNUSED ; ++# else ++# define ATTRIBUTE_UNUSED_LABEL ++# endif ++# endif ++#endif ++ ++/* Similarly to ARG_UNUSED below. Prior to GCC 3.4, the C++ frontend ++ couldn't parse attributes placed after the identifier name, and now ++ the entire compiler is built with C++. */ ++#ifndef ATTRIBUTE_UNUSED ++#if GCC_VERSION >= 3004 ++# define ATTRIBUTE_UNUSED __attribute__ ((__unused__)) ++#else ++#define ATTRIBUTE_UNUSED ++#endif ++#endif /* ATTRIBUTE_UNUSED */ ++ ++/* Before GCC 3.4, the C++ frontend couldn't parse attributes placed after the ++ identifier name. */ ++#if ! defined(__cplusplus) || (GCC_VERSION >= 3004) ++# define ARG_UNUSED(NAME) NAME ATTRIBUTE_UNUSED ++#else /* !__cplusplus || GNUC >= 3.4 */ ++# define ARG_UNUSED(NAME) NAME ++#endif /* !__cplusplus || GNUC >= 3.4 */ ++ ++#ifndef ATTRIBUTE_NORETURN ++#define ATTRIBUTE_NORETURN __attribute__ ((__noreturn__)) ++#endif /* ATTRIBUTE_NORETURN */ ++ ++/* Attribute `nonnull' was valid as of gcc 3.3. */ ++#ifndef ATTRIBUTE_NONNULL ++# if (GCC_VERSION >= 3003) ++# define ATTRIBUTE_NONNULL(m) __attribute__ ((__nonnull__ (m))) ++# else ++# define ATTRIBUTE_NONNULL(m) ++# endif /* GNUC >= 3.3 */ ++#endif /* ATTRIBUTE_NONNULL */ ++ ++/* Attribute `returns_nonnull' was valid as of gcc 4.9. */ ++#ifndef ATTRIBUTE_RETURNS_NONNULL ++# if (GCC_VERSION >= 4009) ++# define ATTRIBUTE_RETURNS_NONNULL __attribute__ ((__returns_nonnull__)) ++# else ++# define ATTRIBUTE_RETURNS_NONNULL ++# endif /* GNUC >= 4.9 */ ++#endif /* ATTRIBUTE_RETURNS_NONNULL */ ++ ++/* Attribute `pure' was valid as of gcc 3.0. */ ++#ifndef ATTRIBUTE_PURE ++# if (GCC_VERSION >= 3000) ++# define ATTRIBUTE_PURE __attribute__ ((__pure__)) ++# else ++# define ATTRIBUTE_PURE ++# endif /* GNUC >= 3.0 */ ++#endif /* ATTRIBUTE_PURE */ ++ ++/* Use ATTRIBUTE_PRINTF when the format specifier must not be NULL. ++ This was the case for the `printf' format attribute by itself ++ before GCC 3.3, but as of 3.3 we need to add the `nonnull' ++ attribute to retain this behavior. */ ++#ifndef ATTRIBUTE_PRINTF ++#define ATTRIBUTE_PRINTF(m, n) __attribute__ ((__format__ (__printf__, m, n))) ATTRIBUTE_NONNULL(m) ++#define ATTRIBUTE_PRINTF_1 ATTRIBUTE_PRINTF(1, 2) ++#define ATTRIBUTE_PRINTF_2 ATTRIBUTE_PRINTF(2, 3) ++#define ATTRIBUTE_PRINTF_3 ATTRIBUTE_PRINTF(3, 4) ++#define ATTRIBUTE_PRINTF_4 ATTRIBUTE_PRINTF(4, 5) ++#define ATTRIBUTE_PRINTF_5 ATTRIBUTE_PRINTF(5, 6) ++#endif /* ATTRIBUTE_PRINTF */ ++ ++/* Use ATTRIBUTE_FPTR_PRINTF when the format attribute is to be set on ++ a function pointer. Format attributes were allowed on function ++ pointers as of gcc 3.1. */ ++#ifndef ATTRIBUTE_FPTR_PRINTF ++# if (GCC_VERSION >= 3001) ++# define ATTRIBUTE_FPTR_PRINTF(m, n) ATTRIBUTE_PRINTF(m, n) ++# else ++# define ATTRIBUTE_FPTR_PRINTF(m, n) ++# endif /* GNUC >= 3.1 */ ++# define ATTRIBUTE_FPTR_PRINTF_1 ATTRIBUTE_FPTR_PRINTF(1, 2) ++# define ATTRIBUTE_FPTR_PRINTF_2 ATTRIBUTE_FPTR_PRINTF(2, 3) ++# define ATTRIBUTE_FPTR_PRINTF_3 ATTRIBUTE_FPTR_PRINTF(3, 4) ++# define ATTRIBUTE_FPTR_PRINTF_4 ATTRIBUTE_FPTR_PRINTF(4, 5) ++# define ATTRIBUTE_FPTR_PRINTF_5 ATTRIBUTE_FPTR_PRINTF(5, 6) ++#endif /* ATTRIBUTE_FPTR_PRINTF */ ++ ++/* Use ATTRIBUTE_NULL_PRINTF when the format specifier may be NULL. A ++ NULL format specifier was allowed as of gcc 3.3. */ ++#ifndef ATTRIBUTE_NULL_PRINTF ++# if (GCC_VERSION >= 3003) ++# define ATTRIBUTE_NULL_PRINTF(m, n) __attribute__ ((__format__ (__printf__, m, n))) ++# else ++# define ATTRIBUTE_NULL_PRINTF(m, n) ++# endif /* GNUC >= 3.3 */ ++# define ATTRIBUTE_NULL_PRINTF_1 ATTRIBUTE_NULL_PRINTF(1, 2) ++# define ATTRIBUTE_NULL_PRINTF_2 ATTRIBUTE_NULL_PRINTF(2, 3) ++# define ATTRIBUTE_NULL_PRINTF_3 ATTRIBUTE_NULL_PRINTF(3, 4) ++# define ATTRIBUTE_NULL_PRINTF_4 ATTRIBUTE_NULL_PRINTF(4, 5) ++# define ATTRIBUTE_NULL_PRINTF_5 ATTRIBUTE_NULL_PRINTF(5, 6) ++#endif /* ATTRIBUTE_NULL_PRINTF */ ++ ++/* Attribute `sentinel' was valid as of gcc 3.5. */ ++#ifndef ATTRIBUTE_SENTINEL ++# if (GCC_VERSION >= 3005) ++# define ATTRIBUTE_SENTINEL __attribute__ ((__sentinel__)) ++# else ++# define ATTRIBUTE_SENTINEL ++# endif /* GNUC >= 3.5 */ ++#endif /* ATTRIBUTE_SENTINEL */ ++ ++ ++#ifndef ATTRIBUTE_ALIGNED_ALIGNOF ++# if (GCC_VERSION >= 3000) ++# define ATTRIBUTE_ALIGNED_ALIGNOF(m) __attribute__ ((__aligned__ (__alignof__ (m)))) ++# else ++# define ATTRIBUTE_ALIGNED_ALIGNOF(m) ++# endif /* GNUC >= 3.0 */ ++#endif /* ATTRIBUTE_ALIGNED_ALIGNOF */ ++ ++/* Useful for structures whose layout must much some binary specification ++ regardless of the alignment and padding qualities of the compiler. */ ++#ifndef ATTRIBUTE_PACKED ++# define ATTRIBUTE_PACKED __attribute__ ((packed)) ++#endif ++ ++/* Attribute `hot' and `cold' was valid as of gcc 4.3. */ ++#ifndef ATTRIBUTE_COLD ++# if (GCC_VERSION >= 4003) ++# define ATTRIBUTE_COLD __attribute__ ((__cold__)) ++# else ++# define ATTRIBUTE_COLD ++# endif /* GNUC >= 4.3 */ ++#endif /* ATTRIBUTE_COLD */ ++#ifndef ATTRIBUTE_HOT ++# if (GCC_VERSION >= 4003) ++# define ATTRIBUTE_HOT __attribute__ ((__hot__)) ++# else ++# define ATTRIBUTE_HOT ++# endif /* GNUC >= 4.3 */ ++#endif /* ATTRIBUTE_HOT */ ++ ++/* Attribute 'no_sanitize_undefined' was valid as of gcc 4.9. */ ++#ifndef ATTRIBUTE_NO_SANITIZE_UNDEFINED ++# if (GCC_VERSION >= 4009) ++# define ATTRIBUTE_NO_SANITIZE_UNDEFINED __attribute__ ((no_sanitize_undefined)) ++# else ++# define ATTRIBUTE_NO_SANITIZE_UNDEFINED ++# endif /* GNUC >= 4.9 */ ++#endif /* ATTRIBUTE_NO_SANITIZE_UNDEFINED */ ++ ++/* We use __extension__ in some places to suppress -pedantic warnings ++ about GCC extensions. This feature didn't work properly before ++ gcc 2.8. */ ++#if GCC_VERSION < 2008 ++#define __extension__ ++#endif ++ ++/* This is used to declare a const variable which should be visible ++ outside of the current compilation unit. Use it as ++ EXPORTED_CONST int i = 1; ++ This is because the semantics of const are different in C and C++. ++ "extern const" is permitted in C but it looks strange, and gcc ++ warns about it when -Wc++-compat is not used. */ ++#ifdef __cplusplus ++#define EXPORTED_CONST extern const ++#else ++#define EXPORTED_CONST const ++#endif ++ ++/* Be conservative and only use enum bitfields with C++ or GCC. ++ FIXME: provide a complete autoconf test for buggy enum bitfields. */ ++ ++#ifdef __cplusplus ++#define ENUM_BITFIELD(TYPE) enum TYPE ++#elif (GCC_VERSION > 2000) ++#define ENUM_BITFIELD(TYPE) __extension__ enum TYPE ++#else ++#define ENUM_BITFIELD(TYPE) unsigned int ++#endif ++ ++/* C++11 adds the ability to add "override" after an implementation of a ++ virtual function in a subclass, to: ++ (A) document that this is an override of a virtual function ++ (B) allow the compiler to issue a warning if it isn't (e.g. a mismatch ++ of the type signature). ++ ++ Similarly, it allows us to add a "final" to indicate that no subclass ++ may subsequently override the vfunc. ++ ++ Provide OVERRIDE and FINAL as macros, allowing us to get these benefits ++ when compiling with C++11 support, but without requiring C++11. ++ ++ For gcc, use "-std=c++11" to enable C++11 support; gcc 6 onwards enables ++ this by default (actually GNU++14). */ ++ ++#if __cplusplus >= 201103 ++/* C++11 claims to be available: use it. final/override were only ++ implemented in 4.7, though. */ ++# if GCC_VERSION < 4007 ++# define OVERRIDE ++# define FINAL ++# else ++# define OVERRIDE override ++# define FINAL final ++# endif ++#elif GCC_VERSION >= 4007 ++/* G++ 4.7 supports __final in C++98. */ ++# define OVERRIDE ++# define FINAL __final ++#else ++/* No C++11 support; leave the macros empty: */ ++# define OVERRIDE ++# define FINAL ++#endif ++ ++#ifdef __cplusplus ++} ++#endif ++ ++#endif /* ansidecl.h */ diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/atomic.c index 000000000,000000000..4f31ff36f new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/atomic.c @@@ -1,0 -1,0 +1,113 @@@ ++/* atomic.c -- Support for atomic functions if not present. ++ Copyright (C) 2013-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++#include "config.h" ++ ++#include ++ ++#include "backtrace.h" ++#include "backtrace-supported.h" ++#include "internal.h" ++ ++/* This file holds implementations of the atomic functions that are ++ used if the host compiler has the sync functions but not the atomic ++ functions, as is true of versions of GCC before 4.7. */ ++ ++#if !defined (HAVE_ATOMIC_FUNCTIONS) && defined (HAVE_SYNC_FUNCTIONS) ++ ++/* Do an atomic load of a pointer. */ ++ ++void * ++backtrace_atomic_load_pointer (void *arg) ++{ ++ void **pp; ++ void *p; ++ ++ pp = (void **) arg; ++ p = *pp; ++ while (!__sync_bool_compare_and_swap (pp, p, p)) ++ p = *pp; ++ return p; ++} ++ ++/* Do an atomic load of an int. */ ++ ++int ++backtrace_atomic_load_int (int *p) ++{ ++ int i; ++ ++ i = *p; ++ while (!__sync_bool_compare_and_swap (p, i, i)) ++ i = *p; ++ return i; ++} ++ ++/* Do an atomic store of a pointer. */ ++ ++void ++backtrace_atomic_store_pointer (void *arg, void *p) ++{ ++ void **pp; ++ void *old; ++ ++ pp = (void **) arg; ++ old = *pp; ++ while (!__sync_bool_compare_and_swap (pp, old, p)) ++ old = *pp; ++} ++ ++/* Do an atomic store of a size_t value. */ ++ ++void ++backtrace_atomic_store_size_t (size_t *p, size_t v) ++{ ++ size_t old; ++ ++ old = *p; ++ while (!__sync_bool_compare_and_swap (p, old, v)) ++ old = *p; ++} ++ ++/* Do an atomic store of a int value. */ ++ ++void ++backtrace_atomic_store_int (int *p, int v) ++{ ++ size_t old; ++ ++ old = *p; ++ while (!__sync_bool_compare_and_swap (p, old, v)) ++ old = *p; ++} ++ ++#endif diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/backtrace-supported.h.in index 000000000,000000000..c2d03d241 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/backtrace-supported.h.in @@@ -1,0 -1,0 +1,66 @@@ ++/* backtrace-supported.h.in -- Whether stack backtrace is supported. ++ Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++/* The file backtrace-supported.h.in is used by configure to generate ++ the file backtrace-supported.h. The file backtrace-supported.h may ++ be #include'd to see whether the backtrace library will be able to ++ get a backtrace and produce symbolic information. */ ++ ++ ++/* BACKTRACE_SUPPORTED will be #define'd as 1 if the backtrace library ++ should work, 0 if it will not. Libraries may #include this to make ++ other arrangements. */ ++ ++#define BACKTRACE_SUPPORTED @BACKTRACE_SUPPORTED@ ++ ++/* BACKTRACE_USES_MALLOC will be #define'd as 1 if the backtrace ++ library will call malloc as it works, 0 if it will call mmap ++ instead. This may be used to determine whether it is safe to call ++ the backtrace functions from a signal handler. In general this ++ only applies to calls like backtrace and backtrace_pcinfo. It does ++ not apply to backtrace_simple, which never calls malloc. It does ++ not apply to backtrace_print, which always calls fprintf and ++ therefore malloc. */ ++ ++#define BACKTRACE_USES_MALLOC @BACKTRACE_USES_MALLOC@ ++ ++/* BACKTRACE_SUPPORTS_THREADS will be #define'd as 1 if the backtrace ++ library is configured with threading support, 0 if not. If this is ++ 0, the threaded parameter to backtrace_create_state must be passed ++ as 0. */ ++ ++#define BACKTRACE_SUPPORTS_THREADS @BACKTRACE_SUPPORTS_THREADS@ ++ ++/* BACKTRACE_SUPPORTS_DATA will be #defined'd as 1 if the backtrace_syminfo ++ will work for variables. It will always work for functions. */ ++ ++#define BACKTRACE_SUPPORTS_DATA @BACKTRACE_SUPPORTS_DATA@ diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/backtrace.c index 000000000,000000000..7372a27f1 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/backtrace.c @@@ -1,0 -1,0 +1,129 @@@ ++/* backtrace.c -- Entry point for stack backtrace library. ++ Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++#include "config.h" ++ ++#include ++ ++#include "unwind.h" ++#include "backtrace.h" ++#include "internal.h" ++ ++/* The main backtrace_full routine. */ ++ ++/* Data passed through _Unwind_Backtrace. */ ++ ++struct backtrace_data ++{ ++ /* Number of frames to skip. */ ++ int skip; ++ /* Library state. */ ++ struct backtrace_state *state; ++ /* Callback routine. */ ++ backtrace_full_callback callback; ++ /* Error callback routine. */ ++ backtrace_error_callback error_callback; ++ /* Data to pass to callback routines. */ ++ void *data; ++ /* Value to return from backtrace_full. */ ++ int ret; ++ /* Whether there is any memory available. */ ++ int can_alloc; ++}; ++ ++/* Unwind library callback routine. This is passed to ++ _Unwind_Backtrace. */ ++ ++static _Unwind_Reason_Code ++unwind (struct _Unwind_Context *context, void *vdata) ++{ ++ struct backtrace_data *bdata = (struct backtrace_data *) vdata; ++ uintptr_t pc; ++ int ip_before_insn = 0; ++ ++#ifdef HAVE_GETIPINFO ++ pc = _Unwind_GetIPInfo (context, &ip_before_insn); ++#else ++ pc = _Unwind_GetIP (context); ++#endif ++ ++ if (bdata->skip > 0) ++ { ++ --bdata->skip; ++ return _URC_NO_REASON; ++ } ++ ++ if (!ip_before_insn) ++ --pc; ++ ++ if (!bdata->can_alloc) ++ bdata->ret = bdata->callback (bdata->data, pc, NULL, 0, NULL); ++ else ++ bdata->ret = backtrace_pcinfo (bdata->state, pc, bdata->callback, ++ bdata->error_callback, bdata->data); ++ if (bdata->ret != 0) ++ return _URC_END_OF_STACK; ++ ++ return _URC_NO_REASON; ++} ++ ++/* Get a stack backtrace. */ ++ ++int ++backtrace_full (struct backtrace_state *state, int skip, ++ backtrace_full_callback callback, ++ backtrace_error_callback error_callback, void *data) ++{ ++ struct backtrace_data bdata; ++ void *p; ++ ++ bdata.skip = skip + 1; ++ bdata.state = state; ++ bdata.callback = callback; ++ bdata.error_callback = error_callback; ++ bdata.data = data; ++ bdata.ret = 0; ++ ++ /* If we can't allocate any memory at all, don't try to produce ++ file/line information. */ ++ p = backtrace_alloc (state, 4096, NULL, NULL); ++ if (p == NULL) ++ bdata.can_alloc = 0; ++ else ++ { ++ backtrace_free (state, p, 4096, NULL, NULL); ++ bdata.can_alloc = 1; ++ } ++ ++ _Unwind_Backtrace (unwind, &bdata); ++ return bdata.ret; ++} diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/backtrace.h index 000000000,000000000..0e6e29f39 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/backtrace.h @@@ -1,0 -1,0 +1,199 @@@ ++/* backtrace.h -- Public header file for stack backtrace library. ++ Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++#ifndef BACKTRACE_H ++#define BACKTRACE_H ++ ++#include ++#include ++ ++/* We want to get a definition for uintptr_t, but we still care about ++ systems that don't have . */ ++#if defined(__GLIBC__) && __GLIBC__ >= 2 ++ ++#include ++ ++#elif defined(HAVE_STDINT_H) ++ ++#include ++ ++#else ++ ++/* Systems that don't have must provide gstdint.h, e.g., ++ from GCC_HEADER_STDINT in configure.ac. */ ++#include "gstdint.h" ++ ++#endif ++ ++#ifdef __cplusplus ++extern "C" { ++#endif ++ ++/* The backtrace state. This struct is intentionally not defined in ++ the public interface. */ ++ ++struct backtrace_state; ++ ++/* The type of the error callback argument to backtrace functions. ++ This function, if not NULL, will be called for certain error cases. ++ The DATA argument is passed to the function that calls this one. ++ The MSG argument is an error message. The ERRNUM argument, if ++ greater than 0, holds an errno value. The MSG buffer may become ++ invalid after this function returns. ++ ++ As a special case, the ERRNUM argument will be passed as -1 if no ++ debug info can be found for the executable, but the function ++ requires debug info (e.g., backtrace_full, backtrace_pcinfo). The ++ MSG in this case will be something along the lines of "no debug ++ info". Similarly, ERRNUM will be passed as -1 if there is no ++ symbol table, but the function requires a symbol table (e.g., ++ backtrace_syminfo). This may be used as a signal that some other ++ approach should be tried. */ ++ ++typedef void (*backtrace_error_callback) (void *data, const char *msg, ++ int errnum); ++ ++/* Create state information for the backtrace routines. This must be ++ called before any of the other routines, and its return value must ++ be passed to all of the other routines. FILENAME is the path name ++ of the executable file; if it is NULL the library will try ++ system-specific path names. If not NULL, FILENAME must point to a ++ permanent buffer. If THREADED is non-zero the state may be ++ accessed by multiple threads simultaneously, and the library will ++ use appropriate atomic operations. If THREADED is zero the state ++ may only be accessed by one thread at a time. This returns a state ++ pointer on success, NULL on error. If an error occurs, this will ++ call the ERROR_CALLBACK routine. */ ++ ++extern struct backtrace_state *backtrace_create_state ( ++ const char *filename, int threaded, ++ backtrace_error_callback error_callback, void *data); ++ ++/* The type of the callback argument to the backtrace_full function. ++ DATA is the argument passed to backtrace_full. PC is the program ++ counter. FILENAME is the name of the file containing PC, or NULL ++ if not available. LINENO is the line number in FILENAME containing ++ PC, or 0 if not available. FUNCTION is the name of the function ++ containing PC, or NULL if not available. This should return 0 to ++ continuing tracing. The FILENAME and FUNCTION buffers may become ++ invalid after this function returns. */ ++ ++typedef int (*backtrace_full_callback) (void *data, uintptr_t pc, ++ const char *filename, int lineno, ++ const char *function); ++ ++/* Get a full stack backtrace. SKIP is the number of frames to skip; ++ passing 0 will start the trace with the function calling ++ backtrace_full. DATA is passed to the callback routine. If any ++ call to CALLBACK returns a non-zero value, the stack backtrace ++ stops, and backtrace returns that value; this may be used to limit ++ the number of stack frames desired. If all calls to CALLBACK ++ return 0, backtrace returns 0. The backtrace_full function will ++ make at least one call to either CALLBACK or ERROR_CALLBACK. This ++ function requires debug info for the executable. */ ++ ++extern int backtrace_full (struct backtrace_state *state, int skip, ++ backtrace_full_callback callback, ++ backtrace_error_callback error_callback, ++ void *data); ++ ++/* The type of the callback argument to the backtrace_simple function. ++ DATA is the argument passed to simple_backtrace. PC is the program ++ counter. This should return 0 to continue tracing. */ ++ ++typedef int (*backtrace_simple_callback) (void *data, uintptr_t pc); ++ ++/* Get a simple backtrace. SKIP is the number of frames to skip, as ++ in backtrace. DATA is passed to the callback routine. If any call ++ to CALLBACK returns a non-zero value, the stack backtrace stops, ++ and backtrace_simple returns that value. Otherwise ++ backtrace_simple returns 0. The backtrace_simple function will ++ make at least one call to either CALLBACK or ERROR_CALLBACK. This ++ function does not require any debug info for the executable. */ ++ ++extern int backtrace_simple (struct backtrace_state *state, int skip, ++ backtrace_simple_callback callback, ++ backtrace_error_callback error_callback, ++ void *data); ++ ++/* Print the current backtrace in a user readable format to a FILE. ++ SKIP is the number of frames to skip, as in backtrace_full. Any ++ error messages are printed to stderr. This function requires debug ++ info for the executable. */ ++ ++extern void backtrace_print (struct backtrace_state *state, int skip, FILE *); ++ ++/* Given PC, a program counter in the current program, call the ++ callback function with filename, line number, and function name ++ information. This will normally call the callback function exactly ++ once. However, if the PC happens to describe an inlined call, and ++ the debugging information contains the necessary information, then ++ this may call the callback function multiple times. This will make ++ at least one call to either CALLBACK or ERROR_CALLBACK. This ++ returns the first non-zero value returned by CALLBACK, or 0. */ ++ ++extern int backtrace_pcinfo (struct backtrace_state *state, uintptr_t pc, ++ backtrace_full_callback callback, ++ backtrace_error_callback error_callback, ++ void *data); ++ ++/* The type of the callback argument to backtrace_syminfo. DATA and ++ PC are the arguments passed to backtrace_syminfo. SYMNAME is the ++ name of the symbol for the corresponding code. SYMVAL is the ++ value and SYMSIZE is the size of the symbol. SYMNAME will be NULL ++ if no error occurred but the symbol could not be found. */ ++ ++typedef void (*backtrace_syminfo_callback) (void *data, uintptr_t pc, ++ const char *symname, ++ uintptr_t symval, ++ uintptr_t symsize); ++ ++/* Given ADDR, an address or program counter in the current program, ++ call the callback information with the symbol name and value ++ describing the function or variable in which ADDR may be found. ++ This will call either CALLBACK or ERROR_CALLBACK exactly once. ++ This returns 1 on success, 0 on failure. This function requires ++ the symbol table but does not require the debug info. Note that if ++ the symbol table is present but ADDR could not be found in the ++ table, CALLBACK will be called with a NULL SYMNAME argument. ++ Returns 1 on success, 0 on error. */ ++ ++extern int backtrace_syminfo (struct backtrace_state *state, uintptr_t addr, ++ backtrace_syminfo_callback callback, ++ backtrace_error_callback error_callback, ++ void *data); ++ ++#ifdef __cplusplus ++} /* End extern "C". */ ++#endif ++ ++#endif diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/btest.c index 000000000,000000000..8c69b1b87 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/btest.c @@@ -1,0 -1,0 +1,721 @@@ ++/* btest.c -- Test for libbacktrace library ++ Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++/* This program tests the externally visible interfaces of the ++ libbacktrace library. */ ++ ++#include ++#include ++#include ++#include ++ ++#include "filenames.h" ++ ++#include "backtrace.h" ++#include "backtrace-supported.h" ++ ++/* Portable attribute syntax. Actually some of these tests probably ++ won't work if the attributes are not recognized. */ ++ ++#ifndef GCC_VERSION ++# define GCC_VERSION (__GNUC__ * 1000 + __GNUC_MINOR__) ++#endif ++ ++#if (GCC_VERSION < 2007) ++# define __attribute__(x) ++#endif ++ ++#ifndef ATTRIBUTE_UNUSED ++# define ATTRIBUTE_UNUSED __attribute__ ((__unused__)) ++#endif ++ ++/* Used to collect backtrace info. */ ++ ++struct info ++{ ++ char *filename; ++ int lineno; ++ char *function; ++}; ++ ++/* Passed to backtrace callback function. */ ++ ++struct bdata ++{ ++ struct info *all; ++ size_t index; ++ size_t max; ++ int failed; ++}; ++ ++/* Passed to backtrace_simple callback function. */ ++ ++struct sdata ++{ ++ uintptr_t *addrs; ++ size_t index; ++ size_t max; ++ int failed; ++}; ++ ++/* Passed to backtrace_syminfo callback function. */ ++ ++struct symdata ++{ ++ const char *name; ++ uintptr_t val, size; ++ int failed; ++}; ++ ++/* The backtrace state. */ ++ ++static void *state; ++ ++/* The number of failures. */ ++ ++static int failures; ++ ++/* Return the base name in a path. */ ++ ++static const char * ++base (const char *p) ++{ ++ const char *last; ++ const char *s; ++ ++ last = NULL; ++ for (s = p; *s != '\0'; ++s) ++ { ++ if (IS_DIR_SEPARATOR (*s)) ++ last = s + 1; ++ } ++ return last != NULL ? last : p; ++} ++ ++/* Check an entry in a struct info array. */ ++ ++static void ++check (const char *name, int index, const struct info *all, int want_lineno, ++ const char *want_function, int *failed) ++{ ++ if (*failed) ++ return; ++ if (all[index].filename == NULL || all[index].function == NULL) ++ { ++ fprintf (stderr, "%s: [%d]: missing file name or function name\n", ++ name, index); ++ *failed = 1; ++ return; ++ } ++ if (strcmp (base (all[index].filename), "btest.c") != 0) ++ { ++ fprintf (stderr, "%s: [%d]: got %s expected test.c\n", name, index, ++ all[index].filename); ++ *failed = 1; ++ } ++ if (all[index].lineno != want_lineno) ++ { ++ fprintf (stderr, "%s: [%d]: got %d expected %d\n", name, index, ++ all[index].lineno, want_lineno); ++ *failed = 1; ++ } ++ if (strcmp (all[index].function, want_function) != 0) ++ { ++ fprintf (stderr, "%s: [%d]: got %s expected %s\n", name, index, ++ all[index].function, want_function); ++ *failed = 1; ++ } ++} ++ ++/* The backtrace callback function. */ ++ ++static int ++callback_one (void *vdata, uintptr_t pc ATTRIBUTE_UNUSED, ++ const char *filename, int lineno, const char *function) ++{ ++ struct bdata *data = (struct bdata *) vdata; ++ struct info *p; ++ ++ if (data->index >= data->max) ++ { ++ fprintf (stderr, "callback_one: callback called too many times\n"); ++ data->failed = 1; ++ return 1; ++ } ++ ++ p = &data->all[data->index]; ++ if (filename == NULL) ++ p->filename = NULL; ++ else ++ { ++ p->filename = strdup (filename); ++ assert (p->filename != NULL); ++ } ++ p->lineno = lineno; ++ if (function == NULL) ++ p->function = NULL; ++ else ++ { ++ p->function = strdup (function); ++ assert (p->function != NULL); ++ } ++ ++data->index; ++ ++ return 0; ++} ++ ++/* An error callback passed to backtrace. */ ++ ++static void ++error_callback_one (void *vdata, const char *msg, int errnum) ++{ ++ struct bdata *data = (struct bdata *) vdata; ++ ++ fprintf (stderr, "%s", msg); ++ if (errnum > 0) ++ fprintf (stderr, ": %s", strerror (errnum)); ++ fprintf (stderr, "\n"); ++ data->failed = 1; ++} ++ ++/* The backtrace_simple callback function. */ ++ ++static int ++callback_two (void *vdata, uintptr_t pc) ++{ ++ struct sdata *data = (struct sdata *) vdata; ++ ++ if (data->index >= data->max) ++ { ++ fprintf (stderr, "callback_two: callback called too many times\n"); ++ data->failed = 1; ++ return 1; ++ } ++ ++ data->addrs[data->index] = pc; ++ ++data->index; ++ ++ return 0; ++} ++ ++/* An error callback passed to backtrace_simple. */ ++ ++static void ++error_callback_two (void *vdata, const char *msg, int errnum) ++{ ++ struct sdata *data = (struct sdata *) vdata; ++ ++ fprintf (stderr, "%s", msg); ++ if (errnum > 0) ++ fprintf (stderr, ": %s", strerror (errnum)); ++ fprintf (stderr, "\n"); ++ data->failed = 1; ++} ++ ++/* The backtrace_syminfo callback function. */ ++ ++static void ++callback_three (void *vdata, uintptr_t pc ATTRIBUTE_UNUSED, ++ const char *symname, uintptr_t symval, ++ uintptr_t symsize) ++{ ++ struct symdata *data = (struct symdata *) vdata; ++ ++ if (symname == NULL) ++ data->name = NULL; ++ else ++ { ++ data->name = strdup (symname); ++ assert (data->name != NULL); ++ } ++ data->val = symval; ++ data->size = symsize; ++} ++ ++/* The backtrace_syminfo error callback function. */ ++ ++static void ++error_callback_three (void *vdata, const char *msg, int errnum) ++{ ++ struct symdata *data = (struct symdata *) vdata; ++ ++ fprintf (stderr, "%s", msg); ++ if (errnum > 0) ++ fprintf (stderr, ": %s", strerror (errnum)); ++ fprintf (stderr, "\n"); ++ data->failed = 1; ++} ++ ++/* Test the backtrace function with non-inlined functions. */ ++ ++static int test1 (void) __attribute__ ((noinline, unused)); ++static int f2 (int) __attribute__ ((noinline)); ++static int f3 (int, int) __attribute__ ((noinline)); ++ ++static int ++test1 (void) ++{ ++ /* Returning a value here and elsewhere avoids a tailcall which ++ would mess up the backtrace. */ ++ return f2 (__LINE__) + 1; ++} ++ ++static int ++f2 (int f1line) ++{ ++ return f3 (f1line, __LINE__) + 2; ++} ++ ++static int ++f3 (int f1line, int f2line) ++{ ++ struct info all[20]; ++ struct bdata data; ++ int f3line; ++ int i; ++ ++ data.all = &all[0]; ++ data.index = 0; ++ data.max = 20; ++ data.failed = 0; ++ ++ f3line = __LINE__ + 1; ++ i = backtrace_full (state, 0, callback_one, error_callback_one, &data); ++ ++ if (i != 0) ++ { ++ fprintf (stderr, "test1: unexpected return value %d\n", i); ++ data.failed = 1; ++ } ++ ++ if (data.index < 3) ++ { ++ fprintf (stderr, ++ "test1: not enough frames; got %zu, expected at least 3\n", ++ data.index); ++ data.failed = 1; ++ } ++ ++ check ("test1", 0, all, f3line, "f3", &data.failed); ++ check ("test1", 1, all, f2line, "f2", &data.failed); ++ check ("test1", 2, all, f1line, "test1", &data.failed); ++ ++ printf ("%s: backtrace_full noinline\n", data.failed ? "FAIL" : "PASS"); ++ ++ if (data.failed) ++ ++failures; ++ ++ return failures; ++} ++ ++/* Test the backtrace function with inlined functions. */ ++ ++static inline int test2 (void) __attribute__ ((always_inline, unused)); ++static inline int f12 (int) __attribute__ ((always_inline)); ++static inline int f13 (int, int) __attribute__ ((always_inline)); ++ ++static inline int ++test2 (void) ++{ ++ return f12 (__LINE__) + 1; ++} ++ ++static inline int ++f12 (int f1line) ++{ ++ return f13 (f1line, __LINE__) + 2; ++} ++ ++static inline int ++f13 (int f1line, int f2line) ++{ ++ struct info all[20]; ++ struct bdata data; ++ int f3line; ++ int i; ++ ++ data.all = &all[0]; ++ data.index = 0; ++ data.max = 20; ++ data.failed = 0; ++ ++ f3line = __LINE__ + 1; ++ i = backtrace_full (state, 0, callback_one, error_callback_one, &data); ++ ++ if (i != 0) ++ { ++ fprintf (stderr, "test2: unexpected return value %d\n", i); ++ data.failed = 1; ++ } ++ ++ check ("test2", 0, all, f3line, "f13", &data.failed); ++ check ("test2", 1, all, f2line, "f12", &data.failed); ++ check ("test2", 2, all, f1line, "test2", &data.failed); ++ ++ printf ("%s: backtrace_full inline\n", data.failed ? "FAIL" : "PASS"); ++ ++ if (data.failed) ++ ++failures; ++ ++ return failures; ++} ++ ++/* Test the backtrace_simple function with non-inlined functions. */ ++ ++static int test3 (void) __attribute__ ((noinline, unused)); ++static int f22 (int) __attribute__ ((noinline)); ++static int f23 (int, int) __attribute__ ((noinline)); ++ ++static int ++test3 (void) ++{ ++ return f22 (__LINE__) + 1; ++} ++ ++static int ++f22 (int f1line) ++{ ++ return f23 (f1line, __LINE__) + 2; ++} ++ ++static int ++f23 (int f1line, int f2line) ++{ ++ uintptr_t addrs[20]; ++ struct sdata data; ++ int f3line; ++ int i; ++ ++ data.addrs = &addrs[0]; ++ data.index = 0; ++ data.max = 20; ++ data.failed = 0; ++ ++ f3line = __LINE__ + 1; ++ i = backtrace_simple (state, 0, callback_two, error_callback_two, &data); ++ ++ if (i != 0) ++ { ++ fprintf (stderr, "test3: unexpected return value %d\n", i); ++ data.failed = 1; ++ } ++ ++ if (!data.failed) ++ { ++ struct info all[20]; ++ struct bdata bdata; ++ int j; ++ ++ bdata.all = &all[0]; ++ bdata.index = 0; ++ bdata.max = 20; ++ bdata.failed = 0; ++ ++ for (j = 0; j < 3; ++j) ++ { ++ i = backtrace_pcinfo (state, addrs[j], callback_one, ++ error_callback_one, &bdata); ++ if (i != 0) ++ { ++ fprintf (stderr, ++ ("test3: unexpected return value " ++ "from backtrace_pcinfo %d\n"), ++ i); ++ bdata.failed = 1; ++ } ++ if (!bdata.failed && bdata.index != (size_t) (j + 1)) ++ { ++ fprintf (stderr, ++ ("wrong number of calls from backtrace_pcinfo " ++ "got %u expected %d\n"), ++ (unsigned int) bdata.index, j + 1); ++ bdata.failed = 1; ++ } ++ } ++ ++ check ("test3", 0, all, f3line, "f23", &bdata.failed); ++ check ("test3", 1, all, f2line, "f22", &bdata.failed); ++ check ("test3", 2, all, f1line, "test3", &bdata.failed); ++ ++ if (bdata.failed) ++ data.failed = 1; ++ ++ for (j = 0; j < 3; ++j) ++ { ++ struct symdata symdata; ++ ++ symdata.name = NULL; ++ symdata.val = 0; ++ symdata.size = 0; ++ symdata.failed = 0; ++ ++ i = backtrace_syminfo (state, addrs[j], callback_three, ++ error_callback_three, &symdata); ++ if (i == 0) ++ { ++ fprintf (stderr, ++ ("test3: [%d]: unexpected return value " ++ "from backtrace_syminfo %d\n"), ++ j, i); ++ symdata.failed = 1; ++ } ++ ++ if (!symdata.failed) ++ { ++ const char *expected; ++ ++ switch (j) ++ { ++ case 0: ++ expected = "f23"; ++ break; ++ case 1: ++ expected = "f22"; ++ break; ++ case 2: ++ expected = "test3"; ++ break; ++ default: ++ assert (0); ++ } ++ ++ if (symdata.name == NULL) ++ { ++ fprintf (stderr, "test3: [%d]: NULL syminfo name\n", j); ++ symdata.failed = 1; ++ } ++ /* Use strncmp, not strcmp, because GCC might create a ++ clone. */ ++ else if (strncmp (symdata.name, expected, strlen (expected)) ++ != 0) ++ { ++ fprintf (stderr, ++ ("test3: [%d]: unexpected syminfo name " ++ "got %s expected %s\n"), ++ j, symdata.name, expected); ++ symdata.failed = 1; ++ } ++ } ++ ++ if (symdata.failed) ++ data.failed = 1; ++ } ++ } ++ ++ printf ("%s: backtrace_simple noinline\n", data.failed ? "FAIL" : "PASS"); ++ ++ if (data.failed) ++ ++failures; ++ ++ return failures; ++} ++ ++/* Test the backtrace_simple function with inlined functions. */ ++ ++static inline int test4 (void) __attribute__ ((always_inline, unused)); ++static inline int f32 (int) __attribute__ ((always_inline)); ++static inline int f33 (int, int) __attribute__ ((always_inline)); ++ ++static inline int ++test4 (void) ++{ ++ return f32 (__LINE__) + 1; ++} ++ ++static inline int ++f32 (int f1line) ++{ ++ return f33 (f1line, __LINE__) + 2; ++} ++ ++static inline int ++f33 (int f1line, int f2line) ++{ ++ uintptr_t addrs[20]; ++ struct sdata data; ++ int f3line; ++ int i; ++ ++ data.addrs = &addrs[0]; ++ data.index = 0; ++ data.max = 20; ++ data.failed = 0; ++ ++ f3line = __LINE__ + 1; ++ i = backtrace_simple (state, 0, callback_two, error_callback_two, &data); ++ ++ if (i != 0) ++ { ++ fprintf (stderr, "test3: unexpected return value %d\n", i); ++ data.failed = 1; ++ } ++ ++ if (!data.failed) ++ { ++ struct info all[20]; ++ struct bdata bdata; ++ ++ bdata.all = &all[0]; ++ bdata.index = 0; ++ bdata.max = 20; ++ bdata.failed = 0; ++ ++ i = backtrace_pcinfo (state, addrs[0], callback_one, error_callback_one, ++ &bdata); ++ if (i != 0) ++ { ++ fprintf (stderr, ++ ("test4: unexpected return value " ++ "from backtrace_pcinfo %d\n"), ++ i); ++ bdata.failed = 1; ++ } ++ ++ check ("test4", 0, all, f3line, "f33", &bdata.failed); ++ check ("test4", 1, all, f2line, "f32", &bdata.failed); ++ check ("test4", 2, all, f1line, "test4", &bdata.failed); ++ ++ if (bdata.failed) ++ data.failed = 1; ++ } ++ ++ printf ("%s: backtrace_simple inline\n", data.failed ? "FAIL" : "PASS"); ++ ++ if (data.failed) ++ ++failures; ++ ++ return failures; ++} ++ ++#if BACKTRACE_SUPPORTS_DATA ++ ++int global = 1; ++ ++static int ++test5 (void) ++{ ++ struct symdata symdata; ++ int i; ++ uintptr_t addr = (uintptr_t) &global; ++ ++ if (sizeof (global) > 1) ++ addr += 1; ++ ++ symdata.name = NULL; ++ symdata.val = 0; ++ symdata.size = 0; ++ symdata.failed = 0; ++ ++ i = backtrace_syminfo (state, addr, callback_three, ++ error_callback_three, &symdata); ++ if (i == 0) ++ { ++ fprintf (stderr, ++ "test5: unexpected return value from backtrace_syminfo %d\n", ++ i); ++ symdata.failed = 1; ++ } ++ ++ if (!symdata.failed) ++ { ++ if (symdata.name == NULL) ++ { ++ fprintf (stderr, "test5: NULL syminfo name\n"); ++ symdata.failed = 1; ++ } ++ else if (strcmp (symdata.name, "global") != 0) ++ { ++ fprintf (stderr, ++ "test5: unexpected syminfo name got %s expected %s\n", ++ symdata.name, "global"); ++ symdata.failed = 1; ++ } ++ else if (symdata.val != (uintptr_t) &global) ++ { ++ fprintf (stderr, ++ "test5: unexpected syminfo value got %lx expected %lx\n", ++ (unsigned long) symdata.val, ++ (unsigned long) (uintptr_t) &global); ++ symdata.failed = 1; ++ } ++ else if (symdata.size != sizeof (global)) ++ { ++ fprintf (stderr, ++ "test5: unexpected syminfo size got %lx expected %lx\n", ++ (unsigned long) symdata.size, ++ (unsigned long) sizeof (global)); ++ symdata.failed = 1; ++ } ++ } ++ ++ printf ("%s: backtrace_syminfo variable\n", ++ symdata.failed ? "FAIL" : "PASS"); ++ ++ if (symdata.failed) ++ ++failures; ++ ++ return failures; ++} ++ ++#endif /* BACKTRACE_SUPPORTS_DATA */ ++ ++static void ++error_callback_create (void *data ATTRIBUTE_UNUSED, const char *msg, ++ int errnum) ++{ ++ fprintf (stderr, "%s", msg); ++ if (errnum > 0) ++ fprintf (stderr, ": %s", strerror (errnum)); ++ fprintf (stderr, "\n"); ++ exit (EXIT_FAILURE); ++} ++ ++/* Run all the tests. */ ++ ++int ++main (int argc ATTRIBUTE_UNUSED, char **argv) ++{ ++ state = backtrace_create_state (argv[0], BACKTRACE_SUPPORTS_THREADS, ++ error_callback_create, NULL); ++ ++#if BACKTRACE_SUPPORTED ++ test1 (); ++ test2 (); ++ test3 (); ++ test4 (); ++#if BACKTRACE_SUPPORTS_DATA ++ test5 (); ++#endif ++#endif ++ ++ exit (failures ? EXIT_FAILURE : EXIT_SUCCESS); ++} diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/config.guess index 000000000,000000000..2e9ad7fe8 new file mode 100755 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/config.guess @@@ -1,0 -1,0 +1,1462 @@@ ++#! /bin/sh ++# Attempt to guess a canonical system name. ++# Copyright 1992-2016 Free Software Foundation, Inc. ++ ++timestamp='2016-10-02' ++ ++# This file is free software; you can redistribute it and/or modify it ++# under the terms of the GNU General Public License as published by ++# the Free Software Foundation; either version 3 of the License, or ++# (at your option) any later version. ++# ++# This program is distributed in the hope that it will be useful, but ++# WITHOUT ANY WARRANTY; without even the implied warranty of ++# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++# General Public License for more details. ++# ++# You should have received a copy of the GNU General Public License ++# along with this program; if not, see . ++# ++# As a special exception to the GNU General Public License, if you ++# distribute this file as part of a program that contains a ++# configuration script generated by Autoconf, you may include it under ++# the same distribution terms that you use for the rest of that ++# program. This Exception is an additional permission under section 7 ++# of the GNU General Public License, version 3 ("GPLv3"). ++# ++# Originally written by Per Bothner; maintained since 2000 by Ben Elliston. ++# ++# You can get the latest version of this script from: ++# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess ++# ++# Please send patches to . ++ ++ ++me=`echo "$0" | sed -e 's,.*/,,'` ++ ++usage="\ ++Usage: $0 [OPTION] ++ ++Output the configuration name of the system \`$me' is run on. ++ ++Operation modes: ++ -h, --help print this help, then exit ++ -t, --time-stamp print date of last modification, then exit ++ -v, --version print version number, then exit ++ ++Report bugs and patches to ." ++ ++version="\ ++GNU config.guess ($timestamp) ++ ++Originally written by Per Bothner. ++Copyright 1992-2016 Free Software Foundation, Inc. ++ ++This is free software; see the source for copying conditions. There is NO ++warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." ++ ++help=" ++Try \`$me --help' for more information." ++ ++# Parse command line ++while test $# -gt 0 ; do ++ case $1 in ++ --time-stamp | --time* | -t ) ++ echo "$timestamp" ; exit ;; ++ --version | -v ) ++ echo "$version" ; exit ;; ++ --help | --h* | -h ) ++ echo "$usage"; exit ;; ++ -- ) # Stop option processing ++ shift; break ;; ++ - ) # Use stdin as input. ++ break ;; ++ -* ) ++ echo "$me: invalid option $1$help" >&2 ++ exit 1 ;; ++ * ) ++ break ;; ++ esac ++done ++ ++if test $# != 0; then ++ echo "$me: too many arguments$help" >&2 ++ exit 1 ++fi ++ ++trap 'exit 1' 1 2 15 ++ ++# CC_FOR_BUILD -- compiler used by this script. Note that the use of a ++# compiler to aid in system detection is discouraged as it requires ++# temporary files to be created and, as you can see below, it is a ++# headache to deal with in a portable fashion. ++ ++# Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still ++# use `HOST_CC' if defined, but it is deprecated. ++ ++# Portable tmp directory creation inspired by the Autoconf team. ++ ++set_cc_for_build=' ++trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ; ++trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ; ++: ${TMPDIR=/tmp} ; ++ { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } || ++ { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } || ++ { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } || ++ { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ; ++dummy=$tmp/dummy ; ++tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ; ++case $CC_FOR_BUILD,$HOST_CC,$CC in ++ ,,) echo "int x;" > $dummy.c ; ++ for c in cc gcc c89 c99 ; do ++ if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then ++ CC_FOR_BUILD="$c"; break ; ++ fi ; ++ done ; ++ if test x"$CC_FOR_BUILD" = x ; then ++ CC_FOR_BUILD=no_compiler_found ; ++ fi ++ ;; ++ ,,*) CC_FOR_BUILD=$CC ;; ++ ,*,*) CC_FOR_BUILD=$HOST_CC ;; ++esac ; set_cc_for_build= ;' ++ ++# This is needed to find uname on a Pyramid OSx when run in the BSD universe. ++# (ghazi@noc.rutgers.edu 1994-08-24) ++if (test -f /.attbin/uname) >/dev/null 2>&1 ; then ++ PATH=$PATH:/.attbin ; export PATH ++fi ++ ++UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown ++UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown ++UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown ++UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown ++ ++case "${UNAME_SYSTEM}" in ++Linux|GNU|GNU/*) ++ # If the system lacks a compiler, then just pick glibc. ++ # We could probably try harder. ++ LIBC=gnu ++ ++ eval $set_cc_for_build ++ cat <<-EOF > $dummy.c ++ #include ++ #if defined(__UCLIBC__) ++ LIBC=uclibc ++ #elif defined(__dietlibc__) ++ LIBC=dietlibc ++ #else ++ LIBC=gnu ++ #endif ++ EOF ++ eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^LIBC' | sed 's, ,,g'` ++ ;; ++esac ++ ++# Note: order is significant - the case branches are not exclusive. ++ ++case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in ++ *:NetBSD:*:*) ++ # NetBSD (nbsd) targets should (where applicable) match one or ++ # more of the tuples: *-*-netbsdelf*, *-*-netbsdaout*, ++ # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently ++ # switched to ELF, *-*-netbsd* would select the old ++ # object file format. This provides both forward ++ # compatibility and a consistent mechanism for selecting the ++ # object file format. ++ # ++ # Note: NetBSD doesn't particularly care about the vendor ++ # portion of the name. We always set it to "unknown". ++ sysctl="sysctl -n hw.machine_arch" ++ UNAME_MACHINE_ARCH=`(uname -p 2>/dev/null || \ ++ /sbin/$sysctl 2>/dev/null || \ ++ /usr/sbin/$sysctl 2>/dev/null || \ ++ echo unknown)` ++ case "${UNAME_MACHINE_ARCH}" in ++ armeb) machine=armeb-unknown ;; ++ arm*) machine=arm-unknown ;; ++ sh3el) machine=shl-unknown ;; ++ sh3eb) machine=sh-unknown ;; ++ sh5el) machine=sh5le-unknown ;; ++ earmv*) ++ arch=`echo ${UNAME_MACHINE_ARCH} | sed -e 's,^e\(armv[0-9]\).*$,\1,'` ++ endian=`echo ${UNAME_MACHINE_ARCH} | sed -ne 's,^.*\(eb\)$,\1,p'` ++ machine=${arch}${endian}-unknown ++ ;; ++ *) machine=${UNAME_MACHINE_ARCH}-unknown ;; ++ esac ++ # The Operating System including object format, if it has switched ++ # to ELF recently (or will in the future) and ABI. ++ case "${UNAME_MACHINE_ARCH}" in ++ earm*) ++ os=netbsdelf ++ ;; ++ arm*|i386|m68k|ns32k|sh3*|sparc|vax) ++ eval $set_cc_for_build ++ if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \ ++ | grep -q __ELF__ ++ then ++ # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout). ++ # Return netbsd for either. FIX? ++ os=netbsd ++ else ++ os=netbsdelf ++ fi ++ ;; ++ *) ++ os=netbsd ++ ;; ++ esac ++ # Determine ABI tags. ++ case "${UNAME_MACHINE_ARCH}" in ++ earm*) ++ expr='s/^earmv[0-9]/-eabi/;s/eb$//' ++ abi=`echo ${UNAME_MACHINE_ARCH} | sed -e "$expr"` ++ ;; ++ esac ++ # The OS release ++ # Debian GNU/NetBSD machines have a different userland, and ++ # thus, need a distinct triplet. However, they do not need ++ # kernel version information, so it can be replaced with a ++ # suitable tag, in the style of linux-gnu. ++ case "${UNAME_VERSION}" in ++ Debian*) ++ release='-gnu' ++ ;; ++ *) ++ release=`echo ${UNAME_RELEASE} | sed -e 's/[-_].*//' | cut -d. -f1,2` ++ ;; ++ esac ++ # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM: ++ # contains redundant information, the shorter form: ++ # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used. ++ echo "${machine}-${os}${release}${abi}" ++ exit ;; ++ *:Bitrig:*:*) ++ UNAME_MACHINE_ARCH=`arch | sed 's/Bitrig.//'` ++ echo ${UNAME_MACHINE_ARCH}-unknown-bitrig${UNAME_RELEASE} ++ exit ;; ++ *:OpenBSD:*:*) ++ UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'` ++ echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE} ++ exit ;; ++ *:LibertyBSD:*:*) ++ UNAME_MACHINE_ARCH=`arch | sed 's/^.*BSD\.//'` ++ echo ${UNAME_MACHINE_ARCH}-unknown-libertybsd${UNAME_RELEASE} ++ exit ;; ++ *:ekkoBSD:*:*) ++ echo ${UNAME_MACHINE}-unknown-ekkobsd${UNAME_RELEASE} ++ exit ;; ++ *:SolidBSD:*:*) ++ echo ${UNAME_MACHINE}-unknown-solidbsd${UNAME_RELEASE} ++ exit ;; ++ macppc:MirBSD:*:*) ++ echo powerpc-unknown-mirbsd${UNAME_RELEASE} ++ exit ;; ++ *:MirBSD:*:*) ++ echo ${UNAME_MACHINE}-unknown-mirbsd${UNAME_RELEASE} ++ exit ;; ++ *:Sortix:*:*) ++ echo ${UNAME_MACHINE}-unknown-sortix ++ exit ;; ++ alpha:OSF1:*:*) ++ case $UNAME_RELEASE in ++ *4.0) ++ UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'` ++ ;; ++ *5.*) ++ UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'` ++ ;; ++ esac ++ # According to Compaq, /usr/sbin/psrinfo has been available on ++ # OSF/1 and Tru64 systems produced since 1995. I hope that ++ # covers most systems running today. This code pipes the CPU ++ # types through head -n 1, so we only detect the type of CPU 0. ++ ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1` ++ case "$ALPHA_CPU_TYPE" in ++ "EV4 (21064)") ++ UNAME_MACHINE=alpha ;; ++ "EV4.5 (21064)") ++ UNAME_MACHINE=alpha ;; ++ "LCA4 (21066/21068)") ++ UNAME_MACHINE=alpha ;; ++ "EV5 (21164)") ++ UNAME_MACHINE=alphaev5 ;; ++ "EV5.6 (21164A)") ++ UNAME_MACHINE=alphaev56 ;; ++ "EV5.6 (21164PC)") ++ UNAME_MACHINE=alphapca56 ;; ++ "EV5.7 (21164PC)") ++ UNAME_MACHINE=alphapca57 ;; ++ "EV6 (21264)") ++ UNAME_MACHINE=alphaev6 ;; ++ "EV6.7 (21264A)") ++ UNAME_MACHINE=alphaev67 ;; ++ "EV6.8CB (21264C)") ++ UNAME_MACHINE=alphaev68 ;; ++ "EV6.8AL (21264B)") ++ UNAME_MACHINE=alphaev68 ;; ++ "EV6.8CX (21264D)") ++ UNAME_MACHINE=alphaev68 ;; ++ "EV6.9A (21264/EV69A)") ++ UNAME_MACHINE=alphaev69 ;; ++ "EV7 (21364)") ++ UNAME_MACHINE=alphaev7 ;; ++ "EV7.9 (21364A)") ++ UNAME_MACHINE=alphaev79 ;; ++ esac ++ # A Pn.n version is a patched version. ++ # A Vn.n version is a released version. ++ # A Tn.n version is a released field test version. ++ # A Xn.n version is an unreleased experimental baselevel. ++ # 1.2 uses "1.2" for uname -r. ++ echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz` ++ # Reset EXIT trap before exiting to avoid spurious non-zero exit code. ++ exitcode=$? ++ trap '' 0 ++ exit $exitcode ;; ++ Alpha\ *:Windows_NT*:*) ++ # How do we know it's Interix rather than the generic POSIX subsystem? ++ # Should we change UNAME_MACHINE based on the output of uname instead ++ # of the specific Alpha model? ++ echo alpha-pc-interix ++ exit ;; ++ 21064:Windows_NT:50:3) ++ echo alpha-dec-winnt3.5 ++ exit ;; ++ Amiga*:UNIX_System_V:4.0:*) ++ echo m68k-unknown-sysv4 ++ exit ;; ++ *:[Aa]miga[Oo][Ss]:*:*) ++ echo ${UNAME_MACHINE}-unknown-amigaos ++ exit ;; ++ *:[Mm]orph[Oo][Ss]:*:*) ++ echo ${UNAME_MACHINE}-unknown-morphos ++ exit ;; ++ *:OS/390:*:*) ++ echo i370-ibm-openedition ++ exit ;; ++ *:z/VM:*:*) ++ echo s390-ibm-zvmoe ++ exit ;; ++ *:OS400:*:*) ++ echo powerpc-ibm-os400 ++ exit ;; ++ arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*) ++ echo arm-acorn-riscix${UNAME_RELEASE} ++ exit ;; ++ arm*:riscos:*:*|arm*:RISCOS:*:*) ++ echo arm-unknown-riscos ++ exit ;; ++ SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*) ++ echo hppa1.1-hitachi-hiuxmpp ++ exit ;; ++ Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*) ++ # akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE. ++ if test "`(/bin/universe) 2>/dev/null`" = att ; then ++ echo pyramid-pyramid-sysv3 ++ else ++ echo pyramid-pyramid-bsd ++ fi ++ exit ;; ++ NILE*:*:*:dcosx) ++ echo pyramid-pyramid-svr4 ++ exit ;; ++ DRS?6000:unix:4.0:6*) ++ echo sparc-icl-nx6 ++ exit ;; ++ DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*) ++ case `/usr/bin/uname -p` in ++ sparc) echo sparc-icl-nx7; exit ;; ++ esac ;; ++ s390x:SunOS:*:*) ++ echo ${UNAME_MACHINE}-ibm-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` ++ exit ;; ++ sun4H:SunOS:5.*:*) ++ echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` ++ exit ;; ++ sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*) ++ echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` ++ exit ;; ++ i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*) ++ echo i386-pc-auroraux${UNAME_RELEASE} ++ exit ;; ++ i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*) ++ eval $set_cc_for_build ++ SUN_ARCH=i386 ++ # If there is a compiler, see if it is configured for 64-bit objects. ++ # Note that the Sun cc does not turn __LP64__ into 1 like gcc does. ++ # This test works for both compilers. ++ if [ "$CC_FOR_BUILD" != no_compiler_found ]; then ++ if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \ ++ (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \ ++ grep IS_64BIT_ARCH >/dev/null ++ then ++ SUN_ARCH=x86_64 ++ fi ++ fi ++ echo ${SUN_ARCH}-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` ++ exit ;; ++ sun4*:SunOS:6*:*) ++ # According to config.sub, this is the proper way to canonicalize ++ # SunOS6. Hard to guess exactly what SunOS6 will be like, but ++ # it's likely to be more like Solaris than SunOS4. ++ echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` ++ exit ;; ++ sun4*:SunOS:*:*) ++ case "`/usr/bin/arch -k`" in ++ Series*|S4*) ++ UNAME_RELEASE=`uname -v` ++ ;; ++ esac ++ # Japanese Language versions have a version number like `4.1.3-JL'. ++ echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'` ++ exit ;; ++ sun3*:SunOS:*:*) ++ echo m68k-sun-sunos${UNAME_RELEASE} ++ exit ;; ++ sun*:*:4.2BSD:*) ++ UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null` ++ test "x${UNAME_RELEASE}" = x && UNAME_RELEASE=3 ++ case "`/bin/arch`" in ++ sun3) ++ echo m68k-sun-sunos${UNAME_RELEASE} ++ ;; ++ sun4) ++ echo sparc-sun-sunos${UNAME_RELEASE} ++ ;; ++ esac ++ exit ;; ++ aushp:SunOS:*:*) ++ echo sparc-auspex-sunos${UNAME_RELEASE} ++ exit ;; ++ # The situation for MiNT is a little confusing. The machine name ++ # can be virtually everything (everything which is not ++ # "atarist" or "atariste" at least should have a processor ++ # > m68000). The system name ranges from "MiNT" over "FreeMiNT" ++ # to the lowercase version "mint" (or "freemint"). Finally ++ # the system name "TOS" denotes a system which is actually not ++ # MiNT. But MiNT is downward compatible to TOS, so this should ++ # be no problem. ++ atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*) ++ echo m68k-atari-mint${UNAME_RELEASE} ++ exit ;; ++ atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*) ++ echo m68k-atari-mint${UNAME_RELEASE} ++ exit ;; ++ *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*) ++ echo m68k-atari-mint${UNAME_RELEASE} ++ exit ;; ++ milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*) ++ echo m68k-milan-mint${UNAME_RELEASE} ++ exit ;; ++ hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*) ++ echo m68k-hades-mint${UNAME_RELEASE} ++ exit ;; ++ *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*) ++ echo m68k-unknown-mint${UNAME_RELEASE} ++ exit ;; ++ m68k:machten:*:*) ++ echo m68k-apple-machten${UNAME_RELEASE} ++ exit ;; ++ powerpc:machten:*:*) ++ echo powerpc-apple-machten${UNAME_RELEASE} ++ exit ;; ++ RISC*:Mach:*:*) ++ echo mips-dec-mach_bsd4.3 ++ exit ;; ++ RISC*:ULTRIX:*:*) ++ echo mips-dec-ultrix${UNAME_RELEASE} ++ exit ;; ++ VAX*:ULTRIX*:*:*) ++ echo vax-dec-ultrix${UNAME_RELEASE} ++ exit ;; ++ 2020:CLIX:*:* | 2430:CLIX:*:*) ++ echo clipper-intergraph-clix${UNAME_RELEASE} ++ exit ;; ++ mips:*:*:UMIPS | mips:*:*:RISCos) ++ eval $set_cc_for_build ++ sed 's/^ //' << EOF >$dummy.c ++#ifdef __cplusplus ++#include /* for printf() prototype */ ++ int main (int argc, char *argv[]) { ++#else ++ int main (argc, argv) int argc; char *argv[]; { ++#endif ++ #if defined (host_mips) && defined (MIPSEB) ++ #if defined (SYSTYPE_SYSV) ++ printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0); ++ #endif ++ #if defined (SYSTYPE_SVR4) ++ printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0); ++ #endif ++ #if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD) ++ printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0); ++ #endif ++ #endif ++ exit (-1); ++ } ++EOF ++ $CC_FOR_BUILD -o $dummy $dummy.c && ++ dummyarg=`echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` && ++ SYSTEM_NAME=`$dummy $dummyarg` && ++ { echo "$SYSTEM_NAME"; exit; } ++ echo mips-mips-riscos${UNAME_RELEASE} ++ exit ;; ++ Motorola:PowerMAX_OS:*:*) ++ echo powerpc-motorola-powermax ++ exit ;; ++ Motorola:*:4.3:PL8-*) ++ echo powerpc-harris-powermax ++ exit ;; ++ Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*) ++ echo powerpc-harris-powermax ++ exit ;; ++ Night_Hawk:Power_UNIX:*:*) ++ echo powerpc-harris-powerunix ++ exit ;; ++ m88k:CX/UX:7*:*) ++ echo m88k-harris-cxux7 ++ exit ;; ++ m88k:*:4*:R4*) ++ echo m88k-motorola-sysv4 ++ exit ;; ++ m88k:*:3*:R3*) ++ echo m88k-motorola-sysv3 ++ exit ;; ++ AViiON:dgux:*:*) ++ # DG/UX returns AViiON for all architectures ++ UNAME_PROCESSOR=`/usr/bin/uname -p` ++ if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ] ++ then ++ if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \ ++ [ ${TARGET_BINARY_INTERFACE}x = x ] ++ then ++ echo m88k-dg-dgux${UNAME_RELEASE} ++ else ++ echo m88k-dg-dguxbcs${UNAME_RELEASE} ++ fi ++ else ++ echo i586-dg-dgux${UNAME_RELEASE} ++ fi ++ exit ;; ++ M88*:DolphinOS:*:*) # DolphinOS (SVR3) ++ echo m88k-dolphin-sysv3 ++ exit ;; ++ M88*:*:R3*:*) ++ # Delta 88k system running SVR3 ++ echo m88k-motorola-sysv3 ++ exit ;; ++ XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3) ++ echo m88k-tektronix-sysv3 ++ exit ;; ++ Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD) ++ echo m68k-tektronix-bsd ++ exit ;; ++ *:IRIX*:*:*) ++ echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'` ++ exit ;; ++ ????????:AIX?:[12].1:2) # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX. ++ echo romp-ibm-aix # uname -m gives an 8 hex-code CPU id ++ exit ;; # Note that: echo "'`uname -s`'" gives 'AIX ' ++ i*86:AIX:*:*) ++ echo i386-ibm-aix ++ exit ;; ++ ia64:AIX:*:*) ++ if [ -x /usr/bin/oslevel ] ; then ++ IBM_REV=`/usr/bin/oslevel` ++ else ++ IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} ++ fi ++ echo ${UNAME_MACHINE}-ibm-aix${IBM_REV} ++ exit ;; ++ *:AIX:2:3) ++ if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then ++ eval $set_cc_for_build ++ sed 's/^ //' << EOF >$dummy.c ++ #include ++ ++ main() ++ { ++ if (!__power_pc()) ++ exit(1); ++ puts("powerpc-ibm-aix3.2.5"); ++ exit(0); ++ } ++EOF ++ if $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` ++ then ++ echo "$SYSTEM_NAME" ++ else ++ echo rs6000-ibm-aix3.2.5 ++ fi ++ elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then ++ echo rs6000-ibm-aix3.2.4 ++ else ++ echo rs6000-ibm-aix3.2 ++ fi ++ exit ;; ++ *:AIX:*:[4567]) ++ IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'` ++ if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then ++ IBM_ARCH=rs6000 ++ else ++ IBM_ARCH=powerpc ++ fi ++ if [ -x /usr/bin/lslpp ] ; then ++ IBM_REV=`/usr/bin/lslpp -Lqc bos.rte.libc | ++ awk -F: '{ print $3 }' | sed s/[0-9]*$/0/` ++ else ++ IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} ++ fi ++ echo ${IBM_ARCH}-ibm-aix${IBM_REV} ++ exit ;; ++ *:AIX:*:*) ++ echo rs6000-ibm-aix ++ exit ;; ++ ibmrt:4.4BSD:*|romp-ibm:BSD:*) ++ echo romp-ibm-bsd4.4 ++ exit ;; ++ ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and ++ echo romp-ibm-bsd${UNAME_RELEASE} # 4.3 with uname added to ++ exit ;; # report: romp-ibm BSD 4.3 ++ *:BOSX:*:*) ++ echo rs6000-bull-bosx ++ exit ;; ++ DPX/2?00:B.O.S.:*:*) ++ echo m68k-bull-sysv3 ++ exit ;; ++ 9000/[34]??:4.3bsd:1.*:*) ++ echo m68k-hp-bsd ++ exit ;; ++ hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*) ++ echo m68k-hp-bsd4.4 ++ exit ;; ++ 9000/[34678]??:HP-UX:*:*) ++ HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` ++ case "${UNAME_MACHINE}" in ++ 9000/31? ) HP_ARCH=m68000 ;; ++ 9000/[34]?? ) HP_ARCH=m68k ;; ++ 9000/[678][0-9][0-9]) ++ if [ -x /usr/bin/getconf ]; then ++ sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null` ++ sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null` ++ case "${sc_cpu_version}" in ++ 523) HP_ARCH=hppa1.0 ;; # CPU_PA_RISC1_0 ++ 528) HP_ARCH=hppa1.1 ;; # CPU_PA_RISC1_1 ++ 532) # CPU_PA_RISC2_0 ++ case "${sc_kernel_bits}" in ++ 32) HP_ARCH=hppa2.0n ;; ++ 64) HP_ARCH=hppa2.0w ;; ++ '') HP_ARCH=hppa2.0 ;; # HP-UX 10.20 ++ esac ;; ++ esac ++ fi ++ if [ "${HP_ARCH}" = "" ]; then ++ eval $set_cc_for_build ++ sed 's/^ //' << EOF >$dummy.c ++ ++ #define _HPUX_SOURCE ++ #include ++ #include ++ ++ int main () ++ { ++ #if defined(_SC_KERNEL_BITS) ++ long bits = sysconf(_SC_KERNEL_BITS); ++ #endif ++ long cpu = sysconf (_SC_CPU_VERSION); ++ ++ switch (cpu) ++ { ++ case CPU_PA_RISC1_0: puts ("hppa1.0"); break; ++ case CPU_PA_RISC1_1: puts ("hppa1.1"); break; ++ case CPU_PA_RISC2_0: ++ #if defined(_SC_KERNEL_BITS) ++ switch (bits) ++ { ++ case 64: puts ("hppa2.0w"); break; ++ case 32: puts ("hppa2.0n"); break; ++ default: puts ("hppa2.0"); break; ++ } break; ++ #else /* !defined(_SC_KERNEL_BITS) */ ++ puts ("hppa2.0"); break; ++ #endif ++ default: puts ("hppa1.0"); break; ++ } ++ exit (0); ++ } ++EOF ++ (CCOPTS="" $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy` ++ test -z "$HP_ARCH" && HP_ARCH=hppa ++ fi ;; ++ esac ++ if [ ${HP_ARCH} = hppa2.0w ] ++ then ++ eval $set_cc_for_build ++ ++ # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating ++ # 32-bit code. hppa64-hp-hpux* has the same kernel and a compiler ++ # generating 64-bit code. GNU and HP use different nomenclature: ++ # ++ # $ CC_FOR_BUILD=cc ./config.guess ++ # => hppa2.0w-hp-hpux11.23 ++ # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess ++ # => hppa64-hp-hpux11.23 ++ ++ if echo __LP64__ | (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | ++ grep -q __LP64__ ++ then ++ HP_ARCH=hppa2.0w ++ else ++ HP_ARCH=hppa64 ++ fi ++ fi ++ echo ${HP_ARCH}-hp-hpux${HPUX_REV} ++ exit ;; ++ ia64:HP-UX:*:*) ++ HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` ++ echo ia64-hp-hpux${HPUX_REV} ++ exit ;; ++ 3050*:HI-UX:*:*) ++ eval $set_cc_for_build ++ sed 's/^ //' << EOF >$dummy.c ++ #include ++ int ++ main () ++ { ++ long cpu = sysconf (_SC_CPU_VERSION); ++ /* The order matters, because CPU_IS_HP_MC68K erroneously returns ++ true for CPU_PA_RISC1_0. CPU_IS_PA_RISC returns correct ++ results, however. */ ++ if (CPU_IS_PA_RISC (cpu)) ++ { ++ switch (cpu) ++ { ++ case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break; ++ case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break; ++ case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break; ++ default: puts ("hppa-hitachi-hiuxwe2"); break; ++ } ++ } ++ else if (CPU_IS_HP_MC68K (cpu)) ++ puts ("m68k-hitachi-hiuxwe2"); ++ else puts ("unknown-hitachi-hiuxwe2"); ++ exit (0); ++ } ++EOF ++ $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` && ++ { echo "$SYSTEM_NAME"; exit; } ++ echo unknown-hitachi-hiuxwe2 ++ exit ;; ++ 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* ) ++ echo hppa1.1-hp-bsd ++ exit ;; ++ 9000/8??:4.3bsd:*:*) ++ echo hppa1.0-hp-bsd ++ exit ;; ++ *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*) ++ echo hppa1.0-hp-mpeix ++ exit ;; ++ hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* ) ++ echo hppa1.1-hp-osf ++ exit ;; ++ hp8??:OSF1:*:*) ++ echo hppa1.0-hp-osf ++ exit ;; ++ i*86:OSF1:*:*) ++ if [ -x /usr/sbin/sysversion ] ; then ++ echo ${UNAME_MACHINE}-unknown-osf1mk ++ else ++ echo ${UNAME_MACHINE}-unknown-osf1 ++ fi ++ exit ;; ++ parisc*:Lites*:*:*) ++ echo hppa1.1-hp-lites ++ exit ;; ++ C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*) ++ echo c1-convex-bsd ++ exit ;; ++ C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*) ++ if getsysinfo -f scalar_acc ++ then echo c32-convex-bsd ++ else echo c2-convex-bsd ++ fi ++ exit ;; ++ C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*) ++ echo c34-convex-bsd ++ exit ;; ++ C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*) ++ echo c38-convex-bsd ++ exit ;; ++ C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*) ++ echo c4-convex-bsd ++ exit ;; ++ CRAY*Y-MP:*:*:*) ++ echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' ++ exit ;; ++ CRAY*[A-Z]90:*:*:*) ++ echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \ ++ | sed -e 's/CRAY.*\([A-Z]90\)/\1/' \ ++ -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \ ++ -e 's/\.[^.]*$/.X/' ++ exit ;; ++ CRAY*TS:*:*:*) ++ echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' ++ exit ;; ++ CRAY*T3E:*:*:*) ++ echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' ++ exit ;; ++ CRAY*SV1:*:*:*) ++ echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' ++ exit ;; ++ *:UNICOS/mp:*:*) ++ echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' ++ exit ;; ++ F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*) ++ FUJITSU_PROC=`uname -m | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz` ++ FUJITSU_SYS=`uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///'` ++ FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'` ++ echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" ++ exit ;; ++ 5000:UNIX_System_V:4.*:*) ++ FUJITSU_SYS=`uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///'` ++ FUJITSU_REL=`echo ${UNAME_RELEASE} | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/ /_/'` ++ echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" ++ exit ;; ++ i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*) ++ echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE} ++ exit ;; ++ sparc*:BSD/OS:*:*) ++ echo sparc-unknown-bsdi${UNAME_RELEASE} ++ exit ;; ++ *:BSD/OS:*:*) ++ echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE} ++ exit ;; ++ *:FreeBSD:*:*) ++ UNAME_PROCESSOR=`/usr/bin/uname -p` ++ case ${UNAME_PROCESSOR} in ++ amd64) ++ echo x86_64-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; ++ *) ++ echo ${UNAME_PROCESSOR}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; ++ esac ++ exit ;; ++ i*:CYGWIN*:*) ++ echo ${UNAME_MACHINE}-pc-cygwin ++ exit ;; ++ *:MINGW64*:*) ++ echo ${UNAME_MACHINE}-pc-mingw64 ++ exit ;; ++ *:MINGW*:*) ++ echo ${UNAME_MACHINE}-pc-mingw32 ++ exit ;; ++ *:MSYS*:*) ++ echo ${UNAME_MACHINE}-pc-msys ++ exit ;; ++ i*:windows32*:*) ++ # uname -m includes "-pc" on this system. ++ echo ${UNAME_MACHINE}-mingw32 ++ exit ;; ++ i*:PW*:*) ++ echo ${UNAME_MACHINE}-pc-pw32 ++ exit ;; ++ *:Interix*:*) ++ case ${UNAME_MACHINE} in ++ x86) ++ echo i586-pc-interix${UNAME_RELEASE} ++ exit ;; ++ authenticamd | genuineintel | EM64T) ++ echo x86_64-unknown-interix${UNAME_RELEASE} ++ exit ;; ++ IA64) ++ echo ia64-unknown-interix${UNAME_RELEASE} ++ exit ;; ++ esac ;; ++ [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*) ++ echo i${UNAME_MACHINE}-pc-mks ++ exit ;; ++ 8664:Windows_NT:*) ++ echo x86_64-pc-mks ++ exit ;; ++ i*:Windows_NT*:* | Pentium*:Windows_NT*:*) ++ # How do we know it's Interix rather than the generic POSIX subsystem? ++ # It also conflicts with pre-2.0 versions of AT&T UWIN. Should we ++ # UNAME_MACHINE based on the output of uname instead of i386? ++ echo i586-pc-interix ++ exit ;; ++ i*:UWIN*:*) ++ echo ${UNAME_MACHINE}-pc-uwin ++ exit ;; ++ amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*) ++ echo x86_64-unknown-cygwin ++ exit ;; ++ p*:CYGWIN*:*) ++ echo powerpcle-unknown-cygwin ++ exit ;; ++ prep*:SunOS:5.*:*) ++ echo powerpcle-unknown-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` ++ exit ;; ++ *:GNU:*:*) ++ # the GNU system ++ echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-${LIBC}`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'` ++ exit ;; ++ *:GNU/*:*:*) ++ # other systems with GNU libc and userland ++ echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr "[:upper:]" "[:lower:]"``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-${LIBC} ++ exit ;; ++ i*86:Minix:*:*) ++ echo ${UNAME_MACHINE}-pc-minix ++ exit ;; ++ aarch64:Linux:*:*) ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ exit ;; ++ aarch64_be:Linux:*:*) ++ UNAME_MACHINE=aarch64_be ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ exit ;; ++ alpha:Linux:*:*) ++ case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in ++ EV5) UNAME_MACHINE=alphaev5 ;; ++ EV56) UNAME_MACHINE=alphaev56 ;; ++ PCA56) UNAME_MACHINE=alphapca56 ;; ++ PCA57) UNAME_MACHINE=alphapca56 ;; ++ EV6) UNAME_MACHINE=alphaev6 ;; ++ EV67) UNAME_MACHINE=alphaev67 ;; ++ EV68*) UNAME_MACHINE=alphaev68 ;; ++ esac ++ objdump --private-headers /bin/sh | grep -q ld.so.1 ++ if test "$?" = 0 ; then LIBC=gnulibc1 ; fi ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ exit ;; ++ arc:Linux:*:* | arceb:Linux:*:*) ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ exit ;; ++ arm*:Linux:*:*) ++ eval $set_cc_for_build ++ if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \ ++ | grep -q __ARM_EABI__ ++ then ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ else ++ if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \ ++ | grep -q __ARM_PCS_VFP ++ then ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}eabi ++ else ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}eabihf ++ fi ++ fi ++ exit ;; ++ avr32*:Linux:*:*) ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ exit ;; ++ cris:Linux:*:*) ++ echo ${UNAME_MACHINE}-axis-linux-${LIBC} ++ exit ;; ++ crisv32:Linux:*:*) ++ echo ${UNAME_MACHINE}-axis-linux-${LIBC} ++ exit ;; ++ e2k:Linux:*:*) ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ exit ;; ++ frv:Linux:*:*) ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ exit ;; ++ hexagon:Linux:*:*) ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ exit ;; ++ i*86:Linux:*:*) ++ echo ${UNAME_MACHINE}-pc-linux-${LIBC} ++ exit ;; ++ ia64:Linux:*:*) ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ exit ;; ++ k1om:Linux:*:*) ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ exit ;; ++ m32r*:Linux:*:*) ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ exit ;; ++ m68*:Linux:*:*) ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ exit ;; ++ mips:Linux:*:* | mips64:Linux:*:*) ++ eval $set_cc_for_build ++ sed 's/^ //' << EOF >$dummy.c ++ #undef CPU ++ #undef ${UNAME_MACHINE} ++ #undef ${UNAME_MACHINE}el ++ #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL) ++ CPU=${UNAME_MACHINE}el ++ #else ++ #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB) ++ CPU=${UNAME_MACHINE} ++ #else ++ CPU= ++ #endif ++ #endif ++EOF ++ eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^CPU'` ++ test x"${CPU}" != x && { echo "${CPU}-unknown-linux-${LIBC}"; exit; } ++ ;; ++ mips64el:Linux:*:*) ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ exit ;; ++ openrisc*:Linux:*:*) ++ echo or1k-unknown-linux-${LIBC} ++ exit ;; ++ or32:Linux:*:* | or1k*:Linux:*:*) ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ exit ;; ++ padre:Linux:*:*) ++ echo sparc-unknown-linux-${LIBC} ++ exit ;; ++ parisc64:Linux:*:* | hppa64:Linux:*:*) ++ echo hppa64-unknown-linux-${LIBC} ++ exit ;; ++ parisc:Linux:*:* | hppa:Linux:*:*) ++ # Look for CPU level ++ case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in ++ PA7*) echo hppa1.1-unknown-linux-${LIBC} ;; ++ PA8*) echo hppa2.0-unknown-linux-${LIBC} ;; ++ *) echo hppa-unknown-linux-${LIBC} ;; ++ esac ++ exit ;; ++ ppc64:Linux:*:*) ++ echo powerpc64-unknown-linux-${LIBC} ++ exit ;; ++ ppc:Linux:*:*) ++ echo powerpc-unknown-linux-${LIBC} ++ exit ;; ++ ppc64le:Linux:*:*) ++ echo powerpc64le-unknown-linux-${LIBC} ++ exit ;; ++ ppcle:Linux:*:*) ++ echo powerpcle-unknown-linux-${LIBC} ++ exit ;; ++ riscv32:Linux:*:* | riscv64:Linux:*:*) ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ exit ;; ++ s390:Linux:*:* | s390x:Linux:*:*) ++ echo ${UNAME_MACHINE}-ibm-linux-${LIBC} ++ exit ;; ++ sh64*:Linux:*:*) ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ exit ;; ++ sh*:Linux:*:*) ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ exit ;; ++ sparc:Linux:*:* | sparc64:Linux:*:*) ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ exit ;; ++ tile*:Linux:*:*) ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ exit ;; ++ vax:Linux:*:*) ++ echo ${UNAME_MACHINE}-dec-linux-${LIBC} ++ exit ;; ++ x86_64:Linux:*:*) ++ echo ${UNAME_MACHINE}-pc-linux-${LIBC} ++ exit ;; ++ xtensa*:Linux:*:*) ++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC} ++ exit ;; ++ i*86:DYNIX/ptx:4*:*) ++ # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there. ++ # earlier versions are messed up and put the nodename in both ++ # sysname and nodename. ++ echo i386-sequent-sysv4 ++ exit ;; ++ i*86:UNIX_SV:4.2MP:2.*) ++ # Unixware is an offshoot of SVR4, but it has its own version ++ # number series starting with 2... ++ # I am not positive that other SVR4 systems won't match this, ++ # I just have to hope. -- rms. ++ # Use sysv4.2uw... so that sysv4* matches it. ++ echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION} ++ exit ;; ++ i*86:OS/2:*:*) ++ # If we were able to find `uname', then EMX Unix compatibility ++ # is probably installed. ++ echo ${UNAME_MACHINE}-pc-os2-emx ++ exit ;; ++ i*86:XTS-300:*:STOP) ++ echo ${UNAME_MACHINE}-unknown-stop ++ exit ;; ++ i*86:atheos:*:*) ++ echo ${UNAME_MACHINE}-unknown-atheos ++ exit ;; ++ i*86:syllable:*:*) ++ echo ${UNAME_MACHINE}-pc-syllable ++ exit ;; ++ i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*) ++ echo i386-unknown-lynxos${UNAME_RELEASE} ++ exit ;; ++ i*86:*DOS:*:*) ++ echo ${UNAME_MACHINE}-pc-msdosdjgpp ++ exit ;; ++ i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*) ++ UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'` ++ if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then ++ echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL} ++ else ++ echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL} ++ fi ++ exit ;; ++ i*86:*:5:[678]*) ++ # UnixWare 7.x, OpenUNIX and OpenServer 6. ++ case `/bin/uname -X | grep "^Machine"` in ++ *486*) UNAME_MACHINE=i486 ;; ++ *Pentium) UNAME_MACHINE=i586 ;; ++ *Pent*|*Celeron) UNAME_MACHINE=i686 ;; ++ esac ++ echo ${UNAME_MACHINE}-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION} ++ exit ;; ++ i*86:*:3.2:*) ++ if test -f /usr/options/cb.name; then ++ UNAME_REL=`sed -n 's/.*Version //p' /dev/null >/dev/null ; then ++ UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')` ++ (/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486 ++ (/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \ ++ && UNAME_MACHINE=i586 ++ (/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \ ++ && UNAME_MACHINE=i686 ++ (/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \ ++ && UNAME_MACHINE=i686 ++ echo ${UNAME_MACHINE}-pc-sco$UNAME_REL ++ else ++ echo ${UNAME_MACHINE}-pc-sysv32 ++ fi ++ exit ;; ++ pc:*:*:*) ++ # Left here for compatibility: ++ # uname -m prints for DJGPP always 'pc', but it prints nothing about ++ # the processor, so we play safe by assuming i586. ++ # Note: whatever this is, it MUST be the same as what config.sub ++ # prints for the "djgpp" host, or else GDB configure will decide that ++ # this is a cross-build. ++ echo i586-pc-msdosdjgpp ++ exit ;; ++ Intel:Mach:3*:*) ++ echo i386-pc-mach3 ++ exit ;; ++ paragon:*:*:*) ++ echo i860-intel-osf1 ++ exit ;; ++ i860:*:4.*:*) # i860-SVR4 ++ if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then ++ echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4 ++ else # Add other i860-SVR4 vendors below as they are discovered. ++ echo i860-unknown-sysv${UNAME_RELEASE} # Unknown i860-SVR4 ++ fi ++ exit ;; ++ mini*:CTIX:SYS*5:*) ++ # "miniframe" ++ echo m68010-convergent-sysv ++ exit ;; ++ mc68k:UNIX:SYSTEM5:3.51m) ++ echo m68k-convergent-sysv ++ exit ;; ++ M680?0:D-NIX:5.3:*) ++ echo m68k-diab-dnix ++ exit ;; ++ M68*:*:R3V[5678]*:*) ++ test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;; ++ 3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0) ++ OS_REL='' ++ test -r /etc/.relid \ ++ && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` ++ /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ ++ && { echo i486-ncr-sysv4.3${OS_REL}; exit; } ++ /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ ++ && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; ++ 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*) ++ /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ ++ && { echo i486-ncr-sysv4; exit; } ;; ++ NCR*:*:4.2:* | MPRAS*:*:4.2:*) ++ OS_REL='.3' ++ test -r /etc/.relid \ ++ && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` ++ /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ ++ && { echo i486-ncr-sysv4.3${OS_REL}; exit; } ++ /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ ++ && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ++ /bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \ ++ && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; ++ m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*) ++ echo m68k-unknown-lynxos${UNAME_RELEASE} ++ exit ;; ++ mc68030:UNIX_System_V:4.*:*) ++ echo m68k-atari-sysv4 ++ exit ;; ++ TSUNAMI:LynxOS:2.*:*) ++ echo sparc-unknown-lynxos${UNAME_RELEASE} ++ exit ;; ++ rs6000:LynxOS:2.*:*) ++ echo rs6000-unknown-lynxos${UNAME_RELEASE} ++ exit ;; ++ PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*) ++ echo powerpc-unknown-lynxos${UNAME_RELEASE} ++ exit ;; ++ SM[BE]S:UNIX_SV:*:*) ++ echo mips-dde-sysv${UNAME_RELEASE} ++ exit ;; ++ RM*:ReliantUNIX-*:*:*) ++ echo mips-sni-sysv4 ++ exit ;; ++ RM*:SINIX-*:*:*) ++ echo mips-sni-sysv4 ++ exit ;; ++ *:SINIX-*:*:*) ++ if uname -p 2>/dev/null >/dev/null ; then ++ UNAME_MACHINE=`(uname -p) 2>/dev/null` ++ echo ${UNAME_MACHINE}-sni-sysv4 ++ else ++ echo ns32k-sni-sysv ++ fi ++ exit ;; ++ PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort ++ # says ++ echo i586-unisys-sysv4 ++ exit ;; ++ *:UNIX_System_V:4*:FTX*) ++ # From Gerald Hewes . ++ # How about differentiating between stratus architectures? -djm ++ echo hppa1.1-stratus-sysv4 ++ exit ;; ++ *:*:*:FTX*) ++ # From seanf@swdc.stratus.com. ++ echo i860-stratus-sysv4 ++ exit ;; ++ i*86:VOS:*:*) ++ # From Paul.Green@stratus.com. ++ echo ${UNAME_MACHINE}-stratus-vos ++ exit ;; ++ *:VOS:*:*) ++ # From Paul.Green@stratus.com. ++ echo hppa1.1-stratus-vos ++ exit ;; ++ mc68*:A/UX:*:*) ++ echo m68k-apple-aux${UNAME_RELEASE} ++ exit ;; ++ news*:NEWS-OS:6*:*) ++ echo mips-sony-newsos6 ++ exit ;; ++ R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*) ++ if [ -d /usr/nec ]; then ++ echo mips-nec-sysv${UNAME_RELEASE} ++ else ++ echo mips-unknown-sysv${UNAME_RELEASE} ++ fi ++ exit ;; ++ BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only. ++ echo powerpc-be-beos ++ exit ;; ++ BeMac:BeOS:*:*) # BeOS running on Mac or Mac clone, PPC only. ++ echo powerpc-apple-beos ++ exit ;; ++ BePC:BeOS:*:*) # BeOS running on Intel PC compatible. ++ echo i586-pc-beos ++ exit ;; ++ BePC:Haiku:*:*) # Haiku running on Intel PC compatible. ++ echo i586-pc-haiku ++ exit ;; ++ x86_64:Haiku:*:*) ++ echo x86_64-unknown-haiku ++ exit ;; ++ SX-4:SUPER-UX:*:*) ++ echo sx4-nec-superux${UNAME_RELEASE} ++ exit ;; ++ SX-5:SUPER-UX:*:*) ++ echo sx5-nec-superux${UNAME_RELEASE} ++ exit ;; ++ SX-6:SUPER-UX:*:*) ++ echo sx6-nec-superux${UNAME_RELEASE} ++ exit ;; ++ SX-7:SUPER-UX:*:*) ++ echo sx7-nec-superux${UNAME_RELEASE} ++ exit ;; ++ SX-8:SUPER-UX:*:*) ++ echo sx8-nec-superux${UNAME_RELEASE} ++ exit ;; ++ SX-8R:SUPER-UX:*:*) ++ echo sx8r-nec-superux${UNAME_RELEASE} ++ exit ;; ++ SX-ACE:SUPER-UX:*:*) ++ echo sxace-nec-superux${UNAME_RELEASE} ++ exit ;; ++ Power*:Rhapsody:*:*) ++ echo powerpc-apple-rhapsody${UNAME_RELEASE} ++ exit ;; ++ *:Rhapsody:*:*) ++ echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE} ++ exit ;; ++ *:Darwin:*:*) ++ UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown ++ eval $set_cc_for_build ++ if test "$UNAME_PROCESSOR" = unknown ; then ++ UNAME_PROCESSOR=powerpc ++ fi ++ if test `echo "$UNAME_RELEASE" | sed -e 's/\..*//'` -le 10 ; then ++ if [ "$CC_FOR_BUILD" != no_compiler_found ]; then ++ if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \ ++ (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \ ++ grep IS_64BIT_ARCH >/dev/null ++ then ++ case $UNAME_PROCESSOR in ++ i386) UNAME_PROCESSOR=x86_64 ;; ++ powerpc) UNAME_PROCESSOR=powerpc64 ;; ++ esac ++ fi ++ fi ++ elif test "$UNAME_PROCESSOR" = i386 ; then ++ # Avoid executing cc on OS X 10.9, as it ships with a stub ++ # that puts up a graphical alert prompting to install ++ # developer tools. Any system running Mac OS X 10.7 or ++ # later (Darwin 11 and later) is required to have a 64-bit ++ # processor. This is not true of the ARM version of Darwin ++ # that Apple uses in portable devices. ++ UNAME_PROCESSOR=x86_64 ++ fi ++ echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE} ++ exit ;; ++ *:procnto*:*:* | *:QNX:[0123456789]*:*) ++ UNAME_PROCESSOR=`uname -p` ++ if test "$UNAME_PROCESSOR" = x86; then ++ UNAME_PROCESSOR=i386 ++ UNAME_MACHINE=pc ++ fi ++ echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE} ++ exit ;; ++ *:QNX:*:4*) ++ echo i386-pc-qnx ++ exit ;; ++ NEO-?:NONSTOP_KERNEL:*:*) ++ echo neo-tandem-nsk${UNAME_RELEASE} ++ exit ;; ++ NSE-*:NONSTOP_KERNEL:*:*) ++ echo nse-tandem-nsk${UNAME_RELEASE} ++ exit ;; ++ NSR-?:NONSTOP_KERNEL:*:*) ++ echo nsr-tandem-nsk${UNAME_RELEASE} ++ exit ;; ++ *:NonStop-UX:*:*) ++ echo mips-compaq-nonstopux ++ exit ;; ++ BS2000:POSIX*:*:*) ++ echo bs2000-siemens-sysv ++ exit ;; ++ DS/*:UNIX_System_V:*:*) ++ echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE} ++ exit ;; ++ *:Plan9:*:*) ++ # "uname -m" is not consistent, so use $cputype instead. 386 ++ # is converted to i386 for consistency with other x86 ++ # operating systems. ++ if test "$cputype" = 386; then ++ UNAME_MACHINE=i386 ++ else ++ UNAME_MACHINE="$cputype" ++ fi ++ echo ${UNAME_MACHINE}-unknown-plan9 ++ exit ;; ++ *:TOPS-10:*:*) ++ echo pdp10-unknown-tops10 ++ exit ;; ++ *:TENEX:*:*) ++ echo pdp10-unknown-tenex ++ exit ;; ++ KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*) ++ echo pdp10-dec-tops20 ++ exit ;; ++ XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*) ++ echo pdp10-xkl-tops20 ++ exit ;; ++ *:TOPS-20:*:*) ++ echo pdp10-unknown-tops20 ++ exit ;; ++ *:ITS:*:*) ++ echo pdp10-unknown-its ++ exit ;; ++ SEI:*:*:SEIUX) ++ echo mips-sei-seiux${UNAME_RELEASE} ++ exit ;; ++ *:DragonFly:*:*) ++ echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ++ exit ;; ++ *:*VMS:*:*) ++ UNAME_MACHINE=`(uname -p) 2>/dev/null` ++ case "${UNAME_MACHINE}" in ++ A*) echo alpha-dec-vms ; exit ;; ++ I*) echo ia64-dec-vms ; exit ;; ++ V*) echo vax-dec-vms ; exit ;; ++ esac ;; ++ *:XENIX:*:SysV) ++ echo i386-pc-xenix ++ exit ;; ++ i*86:skyos:*:*) ++ echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE} | sed -e 's/ .*$//'` ++ exit ;; ++ i*86:rdos:*:*) ++ echo ${UNAME_MACHINE}-pc-rdos ++ exit ;; ++ i*86:AROS:*:*) ++ echo ${UNAME_MACHINE}-pc-aros ++ exit ;; ++ x86_64:VMkernel:*:*) ++ echo ${UNAME_MACHINE}-unknown-esx ++ exit ;; ++ amd64:Isilon\ OneFS:*:*) ++ echo x86_64-unknown-onefs ++ exit ;; ++esac ++ ++cat >&2 </dev/null || echo unknown` ++uname -r = `(uname -r) 2>/dev/null || echo unknown` ++uname -s = `(uname -s) 2>/dev/null || echo unknown` ++uname -v = `(uname -v) 2>/dev/null || echo unknown` ++ ++/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null` ++/bin/uname -X = `(/bin/uname -X) 2>/dev/null` ++ ++hostinfo = `(hostinfo) 2>/dev/null` ++/bin/universe = `(/bin/universe) 2>/dev/null` ++/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null` ++/bin/arch = `(/bin/arch) 2>/dev/null` ++/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null` ++/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null` ++ ++UNAME_MACHINE = ${UNAME_MACHINE} ++UNAME_RELEASE = ${UNAME_RELEASE} ++UNAME_SYSTEM = ${UNAME_SYSTEM} ++UNAME_VERSION = ${UNAME_VERSION} ++EOF ++ ++exit 1 ++ ++# Local variables: ++# eval: (add-hook 'write-file-hooks 'time-stamp) ++# time-stamp-start: "timestamp='" ++# time-stamp-format: "%:y-%02m-%02d" ++# time-stamp-end: "'" ++# End: diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/config.h.in index 000000000,000000000..87cb80598 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/config.h.in @@@ -1,0 -1,0 +1,134 @@@ ++/* config.h.in. Generated from configure.ac by autoheader. */ ++ ++/* ELF size: 32 or 64 */ ++#undef BACKTRACE_ELF_SIZE ++ ++/* Define to 1 if you have the __atomic functions */ ++#undef HAVE_ATOMIC_FUNCTIONS ++ ++/* Define to 1 if you have the declaration of `strnlen', and to 0 if you ++ don't. */ ++#undef HAVE_DECL_STRNLEN ++ ++/* Define to 1 if you have the header file. */ ++#undef HAVE_DLFCN_H ++ ++/* Define if dl_iterate_phdr is available. */ ++#undef HAVE_DL_ITERATE_PHDR ++ ++/* Define to 1 if you have the fcntl function */ ++#undef HAVE_FCNTL ++ ++/* Define if getexecname is available. */ ++#undef HAVE_GETEXECNAME ++ ++/* Define if _Unwind_GetIPInfo is available. */ ++#undef HAVE_GETIPINFO ++ ++/* Define to 1 if you have the header file. */ ++#undef HAVE_INTTYPES_H ++ ++/* Define to 1 if you have the header file. */ ++#undef HAVE_LINK_H ++ ++/* Define to 1 if you have the header file. */ ++#undef HAVE_MEMORY_H ++ ++/* Define to 1 if you have the header file. */ ++#undef HAVE_STDINT_H ++ ++/* Define to 1 if you have the header file. */ ++#undef HAVE_STDLIB_H ++ ++/* Define to 1 if you have the header file. */ ++#undef HAVE_STRINGS_H ++ ++/* Define to 1 if you have the header file. */ ++#undef HAVE_STRING_H ++ ++/* Define to 1 if you have the __sync functions */ ++#undef HAVE_SYNC_FUNCTIONS ++ ++/* Define to 1 if you have the header file. */ ++#undef HAVE_SYS_MMAN_H ++ ++/* Define to 1 if you have the header file. */ ++#undef HAVE_SYS_STAT_H ++ ++/* Define to 1 if you have the header file. */ ++#undef HAVE_SYS_TYPES_H ++ ++/* Define to 1 if you have the header file. */ ++#undef HAVE_UNISTD_H ++ ++/* Define to the sub-directory in which libtool stores uninstalled libraries. ++ */ ++#undef LT_OBJDIR ++ ++/* Define to the address where bug reports for this package should be sent. */ ++#undef PACKAGE_BUGREPORT ++ ++/* Define to the full name of this package. */ ++#undef PACKAGE_NAME ++ ++/* Define to the full name and version of this package. */ ++#undef PACKAGE_STRING ++ ++/* Define to the one symbol short name of this package. */ ++#undef PACKAGE_TARNAME ++ ++/* Define to the home page for this package. */ ++#undef PACKAGE_URL ++ ++/* Define to the version of this package. */ ++#undef PACKAGE_VERSION ++ ++/* The size of `char', as computed by sizeof. */ ++#undef SIZEOF_CHAR ++ ++/* The size of `int', as computed by sizeof. */ ++#undef SIZEOF_INT ++ ++/* The size of `long', as computed by sizeof. */ ++#undef SIZEOF_LONG ++ ++/* The size of `short', as computed by sizeof. */ ++#undef SIZEOF_SHORT ++ ++/* The size of `void *', as computed by sizeof. */ ++#undef SIZEOF_VOID_P ++ ++/* Define to 1 if you have the ANSI C header files. */ ++#undef STDC_HEADERS ++ ++/* Enable extensions on AIX 3, Interix. */ ++#ifndef _ALL_SOURCE ++# undef _ALL_SOURCE ++#endif ++/* Enable GNU extensions on systems that have them. */ ++#ifndef _GNU_SOURCE ++# undef _GNU_SOURCE ++#endif ++/* Enable threading extensions on Solaris. */ ++#ifndef _POSIX_PTHREAD_SEMANTICS ++# undef _POSIX_PTHREAD_SEMANTICS ++#endif ++/* Enable extensions on HP NonStop. */ ++#ifndef _TANDEM_SOURCE ++# undef _TANDEM_SOURCE ++#endif ++/* Enable general extensions on Solaris. */ ++#ifndef __EXTENSIONS__ ++# undef __EXTENSIONS__ ++#endif ++ ++ ++/* Define to 1 if on MINIX. */ ++#undef _MINIX ++ ++/* Define to 2 if the system does not provide POSIX.1 features except with ++ this defined. */ ++#undef _POSIX_1_SOURCE ++ ++/* Define to 1 if you need to in order for `stat' and other things to work. */ ++#undef _POSIX_SOURCE diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/config.sub index 000000000,000000000..3478c1fd0 new file mode 100755 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/config.sub @@@ -1,0 -1,0 +1,1825 @@@ ++#! /bin/sh ++# Configuration validation subroutine script. ++# Copyright 1992-2016 Free Software Foundation, Inc. ++ ++timestamp='2016-11-19' ++ ++# This file is free software; you can redistribute it and/or modify it ++# under the terms of the GNU General Public License as published by ++# the Free Software Foundation; either version 3 of the License, or ++# (at your option) any later version. ++# ++# This program is distributed in the hope that it will be useful, but ++# WITHOUT ANY WARRANTY; without even the implied warranty of ++# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++# General Public License for more details. ++# ++# You should have received a copy of the GNU General Public License ++# along with this program; if not, see . ++# ++# As a special exception to the GNU General Public License, if you ++# distribute this file as part of a program that contains a ++# configuration script generated by Autoconf, you may include it under ++# the same distribution terms that you use for the rest of that ++# program. This Exception is an additional permission under section 7 ++# of the GNU General Public License, version 3 ("GPLv3"). ++ ++ ++# Please send patches to . ++# ++# Configuration subroutine to validate and canonicalize a configuration type. ++# Supply the specified configuration type as an argument. ++# If it is invalid, we print an error message on stderr and exit with code 1. ++# Otherwise, we print the canonical config type on stdout and succeed. ++ ++# You can get the latest version of this script from: ++# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub ++ ++# This file is supposed to be the same for all GNU packages ++# and recognize all the CPU types, system types and aliases ++# that are meaningful with *any* GNU software. ++# Each package is responsible for reporting which valid configurations ++# it does not support. The user should be able to distinguish ++# a failure to support a valid configuration from a meaningless ++# configuration. ++ ++# The goal of this file is to map all the various variations of a given ++# machine specification into a single specification in the form: ++# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM ++# or in some cases, the newer four-part form: ++# CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM ++# It is wrong to echo any other type of specification. ++ ++me=`echo "$0" | sed -e 's,.*/,,'` ++ ++usage="\ ++Usage: $0 [OPTION] CPU-MFR-OPSYS or ALIAS ++ ++Canonicalize a configuration name. ++ ++Operation modes: ++ -h, --help print this help, then exit ++ -t, --time-stamp print date of last modification, then exit ++ -v, --version print version number, then exit ++ ++Report bugs and patches to ." ++ ++version="\ ++GNU config.sub ($timestamp) ++ ++Copyright 1992-2016 Free Software Foundation, Inc. ++ ++This is free software; see the source for copying conditions. There is NO ++warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." ++ ++help=" ++Try \`$me --help' for more information." ++ ++# Parse command line ++while test $# -gt 0 ; do ++ case $1 in ++ --time-stamp | --time* | -t ) ++ echo "$timestamp" ; exit ;; ++ --version | -v ) ++ echo "$version" ; exit ;; ++ --help | --h* | -h ) ++ echo "$usage"; exit ;; ++ -- ) # Stop option processing ++ shift; break ;; ++ - ) # Use stdin as input. ++ break ;; ++ -* ) ++ echo "$me: invalid option $1$help" ++ exit 1 ;; ++ ++ *local*) ++ # First pass through any local machine types. ++ echo $1 ++ exit ;; ++ ++ * ) ++ break ;; ++ esac ++done ++ ++case $# in ++ 0) echo "$me: missing argument$help" >&2 ++ exit 1;; ++ 1) ;; ++ *) echo "$me: too many arguments$help" >&2 ++ exit 1;; ++esac ++ ++# Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any). ++# Here we must recognize all the valid KERNEL-OS combinations. ++maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'` ++case $maybe_os in ++ nto-qnx* | linux-gnu* | linux-android* | linux-dietlibc | linux-newlib* | \ ++ linux-musl* | linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | \ ++ knetbsd*-gnu* | netbsd*-gnu* | netbsd*-eabi* | \ ++ kopensolaris*-gnu* | cloudabi*-eabi* | \ ++ storm-chaos* | os2-emx* | rtmk-nova*) ++ os=-$maybe_os ++ basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'` ++ ;; ++ android-linux) ++ os=-linux-android ++ basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`-unknown ++ ;; ++ *) ++ basic_machine=`echo $1 | sed 's/-[^-]*$//'` ++ if [ $basic_machine != $1 ] ++ then os=`echo $1 | sed 's/.*-/-/'` ++ else os=; fi ++ ;; ++esac ++ ++### Let's recognize common machines as not being operating systems so ++### that things like config.sub decstation-3100 work. We also ++### recognize some manufacturers as not being operating systems, so we ++### can provide default operating systems below. ++case $os in ++ -sun*os*) ++ # Prevent following clause from handling this invalid input. ++ ;; ++ -dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \ ++ -att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \ ++ -unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \ ++ -convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\ ++ -c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \ ++ -harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \ ++ -apple | -axis | -knuth | -cray | -microblaze*) ++ os= ++ basic_machine=$1 ++ ;; ++ -bluegene*) ++ os=-cnk ++ ;; ++ -sim | -cisco | -oki | -wec | -winbond) ++ os= ++ basic_machine=$1 ++ ;; ++ -scout) ++ ;; ++ -wrs) ++ os=-vxworks ++ basic_machine=$1 ++ ;; ++ -chorusos*) ++ os=-chorusos ++ basic_machine=$1 ++ ;; ++ -chorusrdb) ++ os=-chorusrdb ++ basic_machine=$1 ++ ;; ++ -hiux*) ++ os=-hiuxwe2 ++ ;; ++ -sco6) ++ os=-sco5v6 ++ basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ++ ;; ++ -sco5) ++ os=-sco3.2v5 ++ basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ++ ;; ++ -sco4) ++ os=-sco3.2v4 ++ basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ++ ;; ++ -sco3.2.[4-9]*) ++ os=`echo $os | sed -e 's/sco3.2./sco3.2v/'` ++ basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ++ ;; ++ -sco3.2v[4-9]*) ++ # Don't forget version if it is 3.2v4 or newer. ++ basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ++ ;; ++ -sco5v6*) ++ # Don't forget version if it is 3.2v4 or newer. ++ basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ++ ;; ++ -sco*) ++ os=-sco3.2v2 ++ basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ++ ;; ++ -udk*) ++ basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ++ ;; ++ -isc) ++ os=-isc2.2 ++ basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ++ ;; ++ -clix*) ++ basic_machine=clipper-intergraph ++ ;; ++ -isc*) ++ basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ++ ;; ++ -lynx*178) ++ os=-lynxos178 ++ ;; ++ -lynx*5) ++ os=-lynxos5 ++ ;; ++ -lynx*) ++ os=-lynxos ++ ;; ++ -ptx*) ++ basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'` ++ ;; ++ -windowsnt*) ++ os=`echo $os | sed -e 's/windowsnt/winnt/'` ++ ;; ++ -psos*) ++ os=-psos ++ ;; ++ -mint | -mint[0-9]*) ++ basic_machine=m68k-atari ++ os=-mint ++ ;; ++esac ++ ++# Decode aliases for certain CPU-COMPANY combinations. ++case $basic_machine in ++ # Recognize the basic CPU types without company name. ++ # Some are omitted here because they have special meanings below. ++ 1750a | 580 \ ++ | a29k \ ++ | aarch64 | aarch64_be \ ++ | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \ ++ | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \ ++ | am33_2.0 \ ++ | arc | arceb \ ++ | arm | arm[bl]e | arme[lb] | armv[2-8] | armv[3-8][lb] | armv7[arm] \ ++ | avr | avr32 \ ++ | ba \ ++ | be32 | be64 \ ++ | bfin \ ++ | c4x | c8051 | clipper \ ++ | d10v | d30v | dlx | dsp16xx \ ++ | e2k | epiphany \ ++ | fido | fr30 | frv | ft32 \ ++ | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \ ++ | hexagon \ ++ | i370 | i860 | i960 | ia64 \ ++ | ip2k | iq2000 \ ++ | k1om \ ++ | le32 | le64 \ ++ | lm32 \ ++ | m32c | m32r | m32rle | m68000 | m68k | m88k \ ++ | maxq | mb | microblaze | microblazeel | mcore | mep | metag \ ++ | mips | mipsbe | mipseb | mipsel | mipsle \ ++ | mips16 \ ++ | mips64 | mips64el \ ++ | mips64octeon | mips64octeonel \ ++ | mips64orion | mips64orionel \ ++ | mips64r5900 | mips64r5900el \ ++ | mips64vr | mips64vrel \ ++ | mips64vr4100 | mips64vr4100el \ ++ | mips64vr4300 | mips64vr4300el \ ++ | mips64vr5000 | mips64vr5000el \ ++ | mips64vr5900 | mips64vr5900el \ ++ | mipsisa32 | mipsisa32el \ ++ | mipsisa32r2 | mipsisa32r2el \ ++ | mipsisa32r6 | mipsisa32r6el \ ++ | mipsisa64 | mipsisa64el \ ++ | mipsisa64r2 | mipsisa64r2el \ ++ | mipsisa64r6 | mipsisa64r6el \ ++ | mipsisa64sb1 | mipsisa64sb1el \ ++ | mipsisa64sr71k | mipsisa64sr71kel \ ++ | mipsr5900 | mipsr5900el \ ++ | mipstx39 | mipstx39el \ ++ | mn10200 | mn10300 \ ++ | moxie \ ++ | mt \ ++ | msp430 \ ++ | nds32 | nds32le | nds32be \ ++ | nios | nios2 | nios2eb | nios2el \ ++ | ns16k | ns32k \ ++ | open8 | or1k | or1knd | or32 \ ++ | pdp10 | pdp11 | pj | pjl \ ++ | powerpc | powerpc64 | powerpc64le | powerpcle \ ++ | pru \ ++ | pyramid \ ++ | riscv32 | riscv64 \ ++ | rl78 | rx \ ++ | score \ ++ | sh | sh[1234] | sh[24]a | sh[24]aeb | sh[23]e | sh[234]eb | sheb | shbe | shle | sh[1234]le | sh3ele \ ++ | sh64 | sh64le \ ++ | sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \ ++ | sparcv8 | sparcv9 | sparcv9b | sparcv9v \ ++ | spu \ ++ | tahoe | tic4x | tic54x | tic55x | tic6x | tic80 | tron \ ++ | ubicom32 \ ++ | v850 | v850e | v850e1 | v850e2 | v850es | v850e2v3 \ ++ | visium \ ++ | we32k \ ++ | x86 | xc16x | xstormy16 | xtensa \ ++ | z8k | z80) ++ basic_machine=$basic_machine-unknown ++ ;; ++ c54x) ++ basic_machine=tic54x-unknown ++ ;; ++ c55x) ++ basic_machine=tic55x-unknown ++ ;; ++ c6x) ++ basic_machine=tic6x-unknown ++ ;; ++ leon|leon[3-9]) ++ basic_machine=sparc-$basic_machine ++ ;; ++ m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | nvptx | picochip) ++ basic_machine=$basic_machine-unknown ++ os=-none ++ ;; ++ m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k) ++ ;; ++ ms1) ++ basic_machine=mt-unknown ++ ;; ++ ++ strongarm | thumb | xscale) ++ basic_machine=arm-unknown ++ ;; ++ xgate) ++ basic_machine=$basic_machine-unknown ++ os=-none ++ ;; ++ xscaleeb) ++ basic_machine=armeb-unknown ++ ;; ++ ++ xscaleel) ++ basic_machine=armel-unknown ++ ;; ++ ++ # We use `pc' rather than `unknown' ++ # because (1) that's what they normally are, and ++ # (2) the word "unknown" tends to confuse beginning users. ++ i*86 | x86_64) ++ basic_machine=$basic_machine-pc ++ ;; ++ # Object if more than one company name word. ++ *-*-*) ++ echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 ++ exit 1 ++ ;; ++ # Recognize the basic CPU types with company name. ++ 580-* \ ++ | a29k-* \ ++ | aarch64-* | aarch64_be-* \ ++ | alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \ ++ | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \ ++ | alphapca5[67]-* | alpha64pca5[67]-* | arc-* | arceb-* \ ++ | arm-* | armbe-* | armle-* | armeb-* | armv*-* \ ++ | avr-* | avr32-* \ ++ | ba-* \ ++ | be32-* | be64-* \ ++ | bfin-* | bs2000-* \ ++ | c[123]* | c30-* | [cjt]90-* | c4x-* \ ++ | c8051-* | clipper-* | craynv-* | cydra-* \ ++ | d10v-* | d30v-* | dlx-* \ ++ | e2k-* | elxsi-* \ ++ | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \ ++ | h8300-* | h8500-* \ ++ | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \ ++ | hexagon-* \ ++ | i*86-* | i860-* | i960-* | ia64-* \ ++ | ip2k-* | iq2000-* \ ++ | k1om-* \ ++ | le32-* | le64-* \ ++ | lm32-* \ ++ | m32c-* | m32r-* | m32rle-* \ ++ | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \ ++ | m88110-* | m88k-* | maxq-* | mcore-* | metag-* \ ++ | microblaze-* | microblazeel-* \ ++ | mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \ ++ | mips16-* \ ++ | mips64-* | mips64el-* \ ++ | mips64octeon-* | mips64octeonel-* \ ++ | mips64orion-* | mips64orionel-* \ ++ | mips64r5900-* | mips64r5900el-* \ ++ | mips64vr-* | mips64vrel-* \ ++ | mips64vr4100-* | mips64vr4100el-* \ ++ | mips64vr4300-* | mips64vr4300el-* \ ++ | mips64vr5000-* | mips64vr5000el-* \ ++ | mips64vr5900-* | mips64vr5900el-* \ ++ | mipsisa32-* | mipsisa32el-* \ ++ | mipsisa32r2-* | mipsisa32r2el-* \ ++ | mipsisa32r6-* | mipsisa32r6el-* \ ++ | mipsisa64-* | mipsisa64el-* \ ++ | mipsisa64r2-* | mipsisa64r2el-* \ ++ | mipsisa64r6-* | mipsisa64r6el-* \ ++ | mipsisa64sb1-* | mipsisa64sb1el-* \ ++ | mipsisa64sr71k-* | mipsisa64sr71kel-* \ ++ | mipsr5900-* | mipsr5900el-* \ ++ | mipstx39-* | mipstx39el-* \ ++ | mmix-* \ ++ | mt-* \ ++ | msp430-* \ ++ | nds32-* | nds32le-* | nds32be-* \ ++ | nios-* | nios2-* | nios2eb-* | nios2el-* \ ++ | none-* | np1-* | ns16k-* | ns32k-* \ ++ | open8-* \ ++ | or1k*-* \ ++ | orion-* \ ++ | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \ ++ | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* \ ++ | pru-* \ ++ | pyramid-* \ ++ | riscv32-* | riscv64-* \ ++ | rl78-* | romp-* | rs6000-* | rx-* \ ++ | sh-* | sh[1234]-* | sh[24]a-* | sh[24]aeb-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \ ++ | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \ ++ | sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \ ++ | sparclite-* \ ++ | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | sv1-* | sx*-* \ ++ | tahoe-* \ ++ | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \ ++ | tile*-* \ ++ | tron-* \ ++ | ubicom32-* \ ++ | v850-* | v850e-* | v850e1-* | v850es-* | v850e2-* | v850e2v3-* \ ++ | vax-* \ ++ | visium-* \ ++ | we32k-* \ ++ | x86-* | x86_64-* | xc16x-* | xps100-* \ ++ | xstormy16-* | xtensa*-* \ ++ | ymp-* \ ++ | z8k-* | z80-*) ++ ;; ++ # Recognize the basic CPU types without company name, with glob match. ++ xtensa*) ++ basic_machine=$basic_machine-unknown ++ ;; ++ # Recognize the various machine names and aliases which stand ++ # for a CPU type and a company and sometimes even an OS. ++ 386bsd) ++ basic_machine=i386-unknown ++ os=-bsd ++ ;; ++ 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc) ++ basic_machine=m68000-att ++ ;; ++ 3b*) ++ basic_machine=we32k-att ++ ;; ++ a29khif) ++ basic_machine=a29k-amd ++ os=-udi ++ ;; ++ abacus) ++ basic_machine=abacus-unknown ++ ;; ++ adobe68k) ++ basic_machine=m68010-adobe ++ os=-scout ++ ;; ++ alliant | fx80) ++ basic_machine=fx80-alliant ++ ;; ++ altos | altos3068) ++ basic_machine=m68k-altos ++ ;; ++ am29k) ++ basic_machine=a29k-none ++ os=-bsd ++ ;; ++ amd64) ++ basic_machine=x86_64-pc ++ ;; ++ amd64-*) ++ basic_machine=x86_64-`echo $basic_machine | sed 's/^[^-]*-//'` ++ ;; ++ amdahl) ++ basic_machine=580-amdahl ++ os=-sysv ++ ;; ++ amiga | amiga-*) ++ basic_machine=m68k-unknown ++ ;; ++ amigaos | amigados) ++ basic_machine=m68k-unknown ++ os=-amigaos ++ ;; ++ amigaunix | amix) ++ basic_machine=m68k-unknown ++ os=-sysv4 ++ ;; ++ apollo68) ++ basic_machine=m68k-apollo ++ os=-sysv ++ ;; ++ apollo68bsd) ++ basic_machine=m68k-apollo ++ os=-bsd ++ ;; ++ aros) ++ basic_machine=i386-pc ++ os=-aros ++ ;; ++ asmjs) ++ basic_machine=asmjs-unknown ++ ;; ++ aux) ++ basic_machine=m68k-apple ++ os=-aux ++ ;; ++ balance) ++ basic_machine=ns32k-sequent ++ os=-dynix ++ ;; ++ blackfin) ++ basic_machine=bfin-unknown ++ os=-linux ++ ;; ++ blackfin-*) ++ basic_machine=bfin-`echo $basic_machine | sed 's/^[^-]*-//'` ++ os=-linux ++ ;; ++ bluegene*) ++ basic_machine=powerpc-ibm ++ os=-cnk ++ ;; ++ c54x-*) ++ basic_machine=tic54x-`echo $basic_machine | sed 's/^[^-]*-//'` ++ ;; ++ c55x-*) ++ basic_machine=tic55x-`echo $basic_machine | sed 's/^[^-]*-//'` ++ ;; ++ c6x-*) ++ basic_machine=tic6x-`echo $basic_machine | sed 's/^[^-]*-//'` ++ ;; ++ c90) ++ basic_machine=c90-cray ++ os=-unicos ++ ;; ++ cegcc) ++ basic_machine=arm-unknown ++ os=-cegcc ++ ;; ++ convex-c1) ++ basic_machine=c1-convex ++ os=-bsd ++ ;; ++ convex-c2) ++ basic_machine=c2-convex ++ os=-bsd ++ ;; ++ convex-c32) ++ basic_machine=c32-convex ++ os=-bsd ++ ;; ++ convex-c34) ++ basic_machine=c34-convex ++ os=-bsd ++ ;; ++ convex-c38) ++ basic_machine=c38-convex ++ os=-bsd ++ ;; ++ cray | j90) ++ basic_machine=j90-cray ++ os=-unicos ++ ;; ++ craynv) ++ basic_machine=craynv-cray ++ os=-unicosmp ++ ;; ++ cr16 | cr16-*) ++ basic_machine=cr16-unknown ++ os=-elf ++ ;; ++ crds | unos) ++ basic_machine=m68k-crds ++ ;; ++ crisv32 | crisv32-* | etraxfs*) ++ basic_machine=crisv32-axis ++ ;; ++ cris | cris-* | etrax*) ++ basic_machine=cris-axis ++ ;; ++ crx) ++ basic_machine=crx-unknown ++ os=-elf ++ ;; ++ da30 | da30-*) ++ basic_machine=m68k-da30 ++ ;; ++ decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn) ++ basic_machine=mips-dec ++ ;; ++ decsystem10* | dec10*) ++ basic_machine=pdp10-dec ++ os=-tops10 ++ ;; ++ decsystem20* | dec20*) ++ basic_machine=pdp10-dec ++ os=-tops20 ++ ;; ++ delta | 3300 | motorola-3300 | motorola-delta \ ++ | 3300-motorola | delta-motorola) ++ basic_machine=m68k-motorola ++ ;; ++ delta88) ++ basic_machine=m88k-motorola ++ os=-sysv3 ++ ;; ++ dicos) ++ basic_machine=i686-pc ++ os=-dicos ++ ;; ++ djgpp) ++ basic_machine=i586-pc ++ os=-msdosdjgpp ++ ;; ++ dpx20 | dpx20-*) ++ basic_machine=rs6000-bull ++ os=-bosx ++ ;; ++ dpx2* | dpx2*-bull) ++ basic_machine=m68k-bull ++ os=-sysv3 ++ ;; ++ e500v[12]) ++ basic_machine=powerpc-unknown ++ os=$os"spe" ++ ;; ++ e500v[12]-*) ++ basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'` ++ os=$os"spe" ++ ;; ++ ebmon29k) ++ basic_machine=a29k-amd ++ os=-ebmon ++ ;; ++ elxsi) ++ basic_machine=elxsi-elxsi ++ os=-bsd ++ ;; ++ encore | umax | mmax) ++ basic_machine=ns32k-encore ++ ;; ++ es1800 | OSE68k | ose68k | ose | OSE) ++ basic_machine=m68k-ericsson ++ os=-ose ++ ;; ++ fx2800) ++ basic_machine=i860-alliant ++ ;; ++ genix) ++ basic_machine=ns32k-ns ++ ;; ++ gmicro) ++ basic_machine=tron-gmicro ++ os=-sysv ++ ;; ++ go32) ++ basic_machine=i386-pc ++ os=-go32 ++ ;; ++ h3050r* | hiux*) ++ basic_machine=hppa1.1-hitachi ++ os=-hiuxwe2 ++ ;; ++ h8300hms) ++ basic_machine=h8300-hitachi ++ os=-hms ++ ;; ++ h8300xray) ++ basic_machine=h8300-hitachi ++ os=-xray ++ ;; ++ h8500hms) ++ basic_machine=h8500-hitachi ++ os=-hms ++ ;; ++ harris) ++ basic_machine=m88k-harris ++ os=-sysv3 ++ ;; ++ hp300-*) ++ basic_machine=m68k-hp ++ ;; ++ hp300bsd) ++ basic_machine=m68k-hp ++ os=-bsd ++ ;; ++ hp300hpux) ++ basic_machine=m68k-hp ++ os=-hpux ++ ;; ++ hp3k9[0-9][0-9] | hp9[0-9][0-9]) ++ basic_machine=hppa1.0-hp ++ ;; ++ hp9k2[0-9][0-9] | hp9k31[0-9]) ++ basic_machine=m68000-hp ++ ;; ++ hp9k3[2-9][0-9]) ++ basic_machine=m68k-hp ++ ;; ++ hp9k6[0-9][0-9] | hp6[0-9][0-9]) ++ basic_machine=hppa1.0-hp ++ ;; ++ hp9k7[0-79][0-9] | hp7[0-79][0-9]) ++ basic_machine=hppa1.1-hp ++ ;; ++ hp9k78[0-9] | hp78[0-9]) ++ # FIXME: really hppa2.0-hp ++ basic_machine=hppa1.1-hp ++ ;; ++ hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893) ++ # FIXME: really hppa2.0-hp ++ basic_machine=hppa1.1-hp ++ ;; ++ hp9k8[0-9][13679] | hp8[0-9][13679]) ++ basic_machine=hppa1.1-hp ++ ;; ++ hp9k8[0-9][0-9] | hp8[0-9][0-9]) ++ basic_machine=hppa1.0-hp ++ ;; ++ hppa-next) ++ os=-nextstep3 ++ ;; ++ hppaosf) ++ basic_machine=hppa1.1-hp ++ os=-osf ++ ;; ++ hppro) ++ basic_machine=hppa1.1-hp ++ os=-proelf ++ ;; ++ i370-ibm* | ibm*) ++ basic_machine=i370-ibm ++ ;; ++ i*86v32) ++ basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` ++ os=-sysv32 ++ ;; ++ i*86v4*) ++ basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` ++ os=-sysv4 ++ ;; ++ i*86v) ++ basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` ++ os=-sysv ++ ;; ++ i*86sol2) ++ basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` ++ os=-solaris2 ++ ;; ++ i386mach) ++ basic_machine=i386-mach ++ os=-mach ++ ;; ++ i386-vsta | vsta) ++ basic_machine=i386-unknown ++ os=-vsta ++ ;; ++ iris | iris4d) ++ basic_machine=mips-sgi ++ case $os in ++ -irix*) ++ ;; ++ *) ++ os=-irix4 ++ ;; ++ esac ++ ;; ++ isi68 | isi) ++ basic_machine=m68k-isi ++ os=-sysv ++ ;; ++ leon-*|leon[3-9]-*) ++ basic_machine=sparc-`echo $basic_machine | sed 's/-.*//'` ++ ;; ++ m68knommu) ++ basic_machine=m68k-unknown ++ os=-linux ++ ;; ++ m68knommu-*) ++ basic_machine=m68k-`echo $basic_machine | sed 's/^[^-]*-//'` ++ os=-linux ++ ;; ++ m88k-omron*) ++ basic_machine=m88k-omron ++ ;; ++ magnum | m3230) ++ basic_machine=mips-mips ++ os=-sysv ++ ;; ++ merlin) ++ basic_machine=ns32k-utek ++ os=-sysv ++ ;; ++ microblaze*) ++ basic_machine=microblaze-xilinx ++ ;; ++ mingw64) ++ basic_machine=x86_64-pc ++ os=-mingw64 ++ ;; ++ mingw32) ++ basic_machine=i686-pc ++ os=-mingw32 ++ ;; ++ mingw32ce) ++ basic_machine=arm-unknown ++ os=-mingw32ce ++ ;; ++ miniframe) ++ basic_machine=m68000-convergent ++ ;; ++ *mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*) ++ basic_machine=m68k-atari ++ os=-mint ++ ;; ++ mips3*-*) ++ basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'` ++ ;; ++ mips3*) ++ basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown ++ ;; ++ monitor) ++ basic_machine=m68k-rom68k ++ os=-coff ++ ;; ++ morphos) ++ basic_machine=powerpc-unknown ++ os=-morphos ++ ;; ++ moxiebox) ++ basic_machine=moxie-unknown ++ os=-moxiebox ++ ;; ++ msdos) ++ basic_machine=i386-pc ++ os=-msdos ++ ;; ++ ms1-*) ++ basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'` ++ ;; ++ msys) ++ basic_machine=i686-pc ++ os=-msys ++ ;; ++ mvs) ++ basic_machine=i370-ibm ++ os=-mvs ++ ;; ++ nacl) ++ basic_machine=le32-unknown ++ os=-nacl ++ ;; ++ ncr3000) ++ basic_machine=i486-ncr ++ os=-sysv4 ++ ;; ++ netbsd386) ++ basic_machine=i386-unknown ++ os=-netbsd ++ ;; ++ netwinder) ++ basic_machine=armv4l-rebel ++ os=-linux ++ ;; ++ news | news700 | news800 | news900) ++ basic_machine=m68k-sony ++ os=-newsos ++ ;; ++ news1000) ++ basic_machine=m68030-sony ++ os=-newsos ++ ;; ++ news-3600 | risc-news) ++ basic_machine=mips-sony ++ os=-newsos ++ ;; ++ necv70) ++ basic_machine=v70-nec ++ os=-sysv ++ ;; ++ next | m*-next ) ++ basic_machine=m68k-next ++ case $os in ++ -nextstep* ) ++ ;; ++ -ns2*) ++ os=-nextstep2 ++ ;; ++ *) ++ os=-nextstep3 ++ ;; ++ esac ++ ;; ++ nh3000) ++ basic_machine=m68k-harris ++ os=-cxux ++ ;; ++ nh[45]000) ++ basic_machine=m88k-harris ++ os=-cxux ++ ;; ++ nindy960) ++ basic_machine=i960-intel ++ os=-nindy ++ ;; ++ mon960) ++ basic_machine=i960-intel ++ os=-mon960 ++ ;; ++ nonstopux) ++ basic_machine=mips-compaq ++ os=-nonstopux ++ ;; ++ np1) ++ basic_machine=np1-gould ++ ;; ++ neo-tandem) ++ basic_machine=neo-tandem ++ ;; ++ nse-tandem) ++ basic_machine=nse-tandem ++ ;; ++ nsr-tandem) ++ basic_machine=nsr-tandem ++ ;; ++ op50n-* | op60c-*) ++ basic_machine=hppa1.1-oki ++ os=-proelf ++ ;; ++ openrisc | openrisc-*) ++ basic_machine=or32-unknown ++ ;; ++ os400) ++ basic_machine=powerpc-ibm ++ os=-os400 ++ ;; ++ OSE68000 | ose68000) ++ basic_machine=m68000-ericsson ++ os=-ose ++ ;; ++ os68k) ++ basic_machine=m68k-none ++ os=-os68k ++ ;; ++ pa-hitachi) ++ basic_machine=hppa1.1-hitachi ++ os=-hiuxwe2 ++ ;; ++ paragon) ++ basic_machine=i860-intel ++ os=-osf ++ ;; ++ parisc) ++ basic_machine=hppa-unknown ++ os=-linux ++ ;; ++ parisc-*) ++ basic_machine=hppa-`echo $basic_machine | sed 's/^[^-]*-//'` ++ os=-linux ++ ;; ++ pbd) ++ basic_machine=sparc-tti ++ ;; ++ pbb) ++ basic_machine=m68k-tti ++ ;; ++ pc532 | pc532-*) ++ basic_machine=ns32k-pc532 ++ ;; ++ pc98) ++ basic_machine=i386-pc ++ ;; ++ pc98-*) ++ basic_machine=i386-`echo $basic_machine | sed 's/^[^-]*-//'` ++ ;; ++ pentium | p5 | k5 | k6 | nexgen | viac3) ++ basic_machine=i586-pc ++ ;; ++ pentiumpro | p6 | 6x86 | athlon | athlon_*) ++ basic_machine=i686-pc ++ ;; ++ pentiumii | pentium2 | pentiumiii | pentium3) ++ basic_machine=i686-pc ++ ;; ++ pentium4) ++ basic_machine=i786-pc ++ ;; ++ pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*) ++ basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'` ++ ;; ++ pentiumpro-* | p6-* | 6x86-* | athlon-*) ++ basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` ++ ;; ++ pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*) ++ basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` ++ ;; ++ pentium4-*) ++ basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'` ++ ;; ++ pn) ++ basic_machine=pn-gould ++ ;; ++ power) basic_machine=power-ibm ++ ;; ++ ppc | ppcbe) basic_machine=powerpc-unknown ++ ;; ++ ppc-* | ppcbe-*) ++ basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'` ++ ;; ++ ppcle | powerpclittle) ++ basic_machine=powerpcle-unknown ++ ;; ++ ppcle-* | powerpclittle-*) ++ basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'` ++ ;; ++ ppc64) basic_machine=powerpc64-unknown ++ ;; ++ ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` ++ ;; ++ ppc64le | powerpc64little) ++ basic_machine=powerpc64le-unknown ++ ;; ++ ppc64le-* | powerpc64little-*) ++ basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'` ++ ;; ++ ps2) ++ basic_machine=i386-ibm ++ ;; ++ pw32) ++ basic_machine=i586-unknown ++ os=-pw32 ++ ;; ++ rdos | rdos64) ++ basic_machine=x86_64-pc ++ os=-rdos ++ ;; ++ rdos32) ++ basic_machine=i386-pc ++ os=-rdos ++ ;; ++ rom68k) ++ basic_machine=m68k-rom68k ++ os=-coff ++ ;; ++ rm[46]00) ++ basic_machine=mips-siemens ++ ;; ++ rtpc | rtpc-*) ++ basic_machine=romp-ibm ++ ;; ++ s390 | s390-*) ++ basic_machine=s390-ibm ++ ;; ++ s390x | s390x-*) ++ basic_machine=s390x-ibm ++ ;; ++ sa29200) ++ basic_machine=a29k-amd ++ os=-udi ++ ;; ++ sb1) ++ basic_machine=mipsisa64sb1-unknown ++ ;; ++ sb1el) ++ basic_machine=mipsisa64sb1el-unknown ++ ;; ++ sde) ++ basic_machine=mipsisa32-sde ++ os=-elf ++ ;; ++ sei) ++ basic_machine=mips-sei ++ os=-seiux ++ ;; ++ sequent) ++ basic_machine=i386-sequent ++ ;; ++ sh) ++ basic_machine=sh-hitachi ++ os=-hms ++ ;; ++ sh5el) ++ basic_machine=sh5le-unknown ++ ;; ++ sh64) ++ basic_machine=sh64-unknown ++ ;; ++ sparclite-wrs | simso-wrs) ++ basic_machine=sparclite-wrs ++ os=-vxworks ++ ;; ++ sps7) ++ basic_machine=m68k-bull ++ os=-sysv2 ++ ;; ++ spur) ++ basic_machine=spur-unknown ++ ;; ++ st2000) ++ basic_machine=m68k-tandem ++ ;; ++ stratus) ++ basic_machine=i860-stratus ++ os=-sysv4 ++ ;; ++ strongarm-* | thumb-*) ++ basic_machine=arm-`echo $basic_machine | sed 's/^[^-]*-//'` ++ ;; ++ sun2) ++ basic_machine=m68000-sun ++ ;; ++ sun2os3) ++ basic_machine=m68000-sun ++ os=-sunos3 ++ ;; ++ sun2os4) ++ basic_machine=m68000-sun ++ os=-sunos4 ++ ;; ++ sun3os3) ++ basic_machine=m68k-sun ++ os=-sunos3 ++ ;; ++ sun3os4) ++ basic_machine=m68k-sun ++ os=-sunos4 ++ ;; ++ sun4os3) ++ basic_machine=sparc-sun ++ os=-sunos3 ++ ;; ++ sun4os4) ++ basic_machine=sparc-sun ++ os=-sunos4 ++ ;; ++ sun4sol2) ++ basic_machine=sparc-sun ++ os=-solaris2 ++ ;; ++ sun3 | sun3-*) ++ basic_machine=m68k-sun ++ ;; ++ sun4) ++ basic_machine=sparc-sun ++ ;; ++ sun386 | sun386i | roadrunner) ++ basic_machine=i386-sun ++ ;; ++ sv1) ++ basic_machine=sv1-cray ++ os=-unicos ++ ;; ++ symmetry) ++ basic_machine=i386-sequent ++ os=-dynix ++ ;; ++ t3e) ++ basic_machine=alphaev5-cray ++ os=-unicos ++ ;; ++ t90) ++ basic_machine=t90-cray ++ os=-unicos ++ ;; ++ tile*) ++ basic_machine=$basic_machine-unknown ++ os=-linux-gnu ++ ;; ++ tx39) ++ basic_machine=mipstx39-unknown ++ ;; ++ tx39el) ++ basic_machine=mipstx39el-unknown ++ ;; ++ toad1) ++ basic_machine=pdp10-xkl ++ os=-tops20 ++ ;; ++ tower | tower-32) ++ basic_machine=m68k-ncr ++ ;; ++ tpf) ++ basic_machine=s390x-ibm ++ os=-tpf ++ ;; ++ udi29k) ++ basic_machine=a29k-amd ++ os=-udi ++ ;; ++ ultra3) ++ basic_machine=a29k-nyu ++ os=-sym1 ++ ;; ++ v810 | necv810) ++ basic_machine=v810-nec ++ os=-none ++ ;; ++ vaxv) ++ basic_machine=vax-dec ++ os=-sysv ++ ;; ++ vms) ++ basic_machine=vax-dec ++ os=-vms ++ ;; ++ vpp*|vx|vx-*) ++ basic_machine=f301-fujitsu ++ ;; ++ vxworks960) ++ basic_machine=i960-wrs ++ os=-vxworks ++ ;; ++ vxworks68) ++ basic_machine=m68k-wrs ++ os=-vxworks ++ ;; ++ vxworks29k) ++ basic_machine=a29k-wrs ++ os=-vxworks ++ ;; ++ w65*) ++ basic_machine=w65-wdc ++ os=-none ++ ;; ++ w89k-*) ++ basic_machine=hppa1.1-winbond ++ os=-proelf ++ ;; ++ xbox) ++ basic_machine=i686-pc ++ os=-mingw32 ++ ;; ++ xps | xps100) ++ basic_machine=xps100-honeywell ++ ;; ++ xscale-* | xscalee[bl]-*) ++ basic_machine=`echo $basic_machine | sed 's/^xscale/arm/'` ++ ;; ++ ymp) ++ basic_machine=ymp-cray ++ os=-unicos ++ ;; ++ z8k-*-coff) ++ basic_machine=z8k-unknown ++ os=-sim ++ ;; ++ z80-*-coff) ++ basic_machine=z80-unknown ++ os=-sim ++ ;; ++ none) ++ basic_machine=none-none ++ os=-none ++ ;; ++ ++# Here we handle the default manufacturer of certain CPU types. It is in ++# some cases the only manufacturer, in others, it is the most popular. ++ w89k) ++ basic_machine=hppa1.1-winbond ++ ;; ++ op50n) ++ basic_machine=hppa1.1-oki ++ ;; ++ op60c) ++ basic_machine=hppa1.1-oki ++ ;; ++ romp) ++ basic_machine=romp-ibm ++ ;; ++ mmix) ++ basic_machine=mmix-knuth ++ ;; ++ rs6000) ++ basic_machine=rs6000-ibm ++ ;; ++ vax) ++ basic_machine=vax-dec ++ ;; ++ pdp10) ++ # there are many clones, so DEC is not a safe bet ++ basic_machine=pdp10-unknown ++ ;; ++ pdp11) ++ basic_machine=pdp11-dec ++ ;; ++ we32k) ++ basic_machine=we32k-att ++ ;; ++ sh[1234] | sh[24]a | sh[24]aeb | sh[34]eb | sh[1234]le | sh[23]ele) ++ basic_machine=sh-unknown ++ ;; ++ sparc | sparcv8 | sparcv9 | sparcv9b | sparcv9v) ++ basic_machine=sparc-sun ++ ;; ++ cydra) ++ basic_machine=cydra-cydrome ++ ;; ++ orion) ++ basic_machine=orion-highlevel ++ ;; ++ orion105) ++ basic_machine=clipper-highlevel ++ ;; ++ mac | mpw | mac-mpw) ++ basic_machine=m68k-apple ++ ;; ++ pmac | pmac-mpw) ++ basic_machine=powerpc-apple ++ ;; ++ *-unknown) ++ # Make sure to match an already-canonicalized machine name. ++ ;; ++ *) ++ echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 ++ exit 1 ++ ;; ++esac ++ ++# Here we canonicalize certain aliases for manufacturers. ++case $basic_machine in ++ *-digital*) ++ basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'` ++ ;; ++ *-commodore*) ++ basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'` ++ ;; ++ *) ++ ;; ++esac ++ ++# Decode manufacturer-specific aliases for certain operating systems. ++ ++if [ x"$os" != x"" ] ++then ++case $os in ++ # First match some system type aliases ++ # that might get confused with valid system types. ++ # -solaris* is a basic system type, with this one exception. ++ -auroraux) ++ os=-auroraux ++ ;; ++ -solaris1 | -solaris1.*) ++ os=`echo $os | sed -e 's|solaris1|sunos4|'` ++ ;; ++ -solaris) ++ os=-solaris2 ++ ;; ++ -svr4*) ++ os=-sysv4 ++ ;; ++ -unixware*) ++ os=-sysv4.2uw ++ ;; ++ -gnu/linux*) ++ os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'` ++ ;; ++ # First accept the basic system types. ++ # The portable systems comes first. ++ # Each alternative MUST END IN A *, to match a version number. ++ # -sysv* is not here because it comes later, after sysvr4. ++ -gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \ ++ | -*vms* | -sco* | -esix* | -isc* | -aix* | -cnk* | -sunos | -sunos[34]*\ ++ | -hpux* | -unos* | -osf* | -luna* | -dgux* | -auroraux* | -solaris* \ ++ | -sym* | -kopensolaris* | -plan9* \ ++ | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \ ++ | -aos* | -aros* | -cloudabi* | -sortix* \ ++ | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \ ++ | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \ ++ | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* \ ++ | -bitrig* | -openbsd* | -solidbsd* | -libertybsd* \ ++ | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \ ++ | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \ ++ | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \ ++ | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \ ++ | -chorusos* | -chorusrdb* | -cegcc* | -glidix* \ ++ | -cygwin* | -msys* | -pe* | -psos* | -moss* | -proelf* | -rtems* \ ++ | -midipix* | -mingw32* | -mingw64* | -linux-gnu* | -linux-android* \ ++ | -linux-newlib* | -linux-musl* | -linux-uclibc* \ ++ | -uxpv* | -beos* | -mpeix* | -udk* | -moxiebox* \ ++ | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \ ++ | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \ ++ | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \ ++ | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \ ++ | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \ ++ | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \ ++ | -skyos* | -haiku* | -rdos* | -toppers* | -drops* | -es* \ ++ | -onefs* | -tirtos* | -phoenix* | -fuchsia*) ++ # Remember, each alternative MUST END IN *, to match a version number. ++ ;; ++ -qnx*) ++ case $basic_machine in ++ x86-* | i*86-*) ++ ;; ++ *) ++ os=-nto$os ++ ;; ++ esac ++ ;; ++ -nto-qnx*) ++ ;; ++ -nto*) ++ os=`echo $os | sed -e 's|nto|nto-qnx|'` ++ ;; ++ -sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \ ++ | -windows* | -osx | -abug | -netware* | -os9* | -beos* | -haiku* \ ++ | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*) ++ ;; ++ -mac*) ++ os=`echo $os | sed -e 's|mac|macos|'` ++ ;; ++ -linux-dietlibc) ++ os=-linux-dietlibc ++ ;; ++ -linux*) ++ os=`echo $os | sed -e 's|linux|linux-gnu|'` ++ ;; ++ -sunos5*) ++ os=`echo $os | sed -e 's|sunos5|solaris2|'` ++ ;; ++ -sunos6*) ++ os=`echo $os | sed -e 's|sunos6|solaris3|'` ++ ;; ++ -opened*) ++ os=-openedition ++ ;; ++ -os400*) ++ os=-os400 ++ ;; ++ -wince*) ++ os=-wince ++ ;; ++ -osfrose*) ++ os=-osfrose ++ ;; ++ -osf*) ++ os=-osf ++ ;; ++ -utek*) ++ os=-bsd ++ ;; ++ -dynix*) ++ os=-bsd ++ ;; ++ -acis*) ++ os=-aos ++ ;; ++ -atheos*) ++ os=-atheos ++ ;; ++ -syllable*) ++ os=-syllable ++ ;; ++ -386bsd) ++ os=-bsd ++ ;; ++ -ctix* | -uts*) ++ os=-sysv ++ ;; ++ -nova*) ++ os=-rtmk-nova ++ ;; ++ -ns2 ) ++ os=-nextstep2 ++ ;; ++ -nsk*) ++ os=-nsk ++ ;; ++ # Preserve the version number of sinix5. ++ -sinix5.*) ++ os=`echo $os | sed -e 's|sinix|sysv|'` ++ ;; ++ -sinix*) ++ os=-sysv4 ++ ;; ++ -tpf*) ++ os=-tpf ++ ;; ++ -triton*) ++ os=-sysv3 ++ ;; ++ -oss*) ++ os=-sysv3 ++ ;; ++ -svr4) ++ os=-sysv4 ++ ;; ++ -svr3) ++ os=-sysv3 ++ ;; ++ -sysvr4) ++ os=-sysv4 ++ ;; ++ # This must come after -sysvr4. ++ -sysv*) ++ ;; ++ -ose*) ++ os=-ose ++ ;; ++ -es1800*) ++ os=-ose ++ ;; ++ -xenix) ++ os=-xenix ++ ;; ++ -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) ++ os=-mint ++ ;; ++ -aros*) ++ os=-aros ++ ;; ++ -zvmoe) ++ os=-zvmoe ++ ;; ++ -dicos*) ++ os=-dicos ++ ;; ++ -nacl*) ++ ;; ++ -ios) ++ ;; ++ -none) ++ ;; ++ *) ++ # Get rid of the `-' at the beginning of $os. ++ os=`echo $os | sed 's/[^-]*-//'` ++ echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2 ++ exit 1 ++ ;; ++esac ++else ++ ++# Here we handle the default operating systems that come with various machines. ++# The value should be what the vendor currently ships out the door with their ++# machine or put another way, the most popular os provided with the machine. ++ ++# Note that if you're going to try to match "-MANUFACTURER" here (say, ++# "-sun"), then you have to tell the case statement up towards the top ++# that MANUFACTURER isn't an operating system. Otherwise, code above ++# will signal an error saying that MANUFACTURER isn't an operating ++# system, and we'll never get to this point. ++ ++case $basic_machine in ++ score-*) ++ os=-elf ++ ;; ++ spu-*) ++ os=-elf ++ ;; ++ *-acorn) ++ os=-riscix1.2 ++ ;; ++ arm*-rebel) ++ os=-linux ++ ;; ++ arm*-semi) ++ os=-aout ++ ;; ++ c4x-* | tic4x-*) ++ os=-coff ++ ;; ++ c8051-*) ++ os=-elf ++ ;; ++ hexagon-*) ++ os=-elf ++ ;; ++ tic54x-*) ++ os=-coff ++ ;; ++ tic55x-*) ++ os=-coff ++ ;; ++ tic6x-*) ++ os=-coff ++ ;; ++ # This must come before the *-dec entry. ++ pdp10-*) ++ os=-tops20 ++ ;; ++ pdp11-*) ++ os=-none ++ ;; ++ *-dec | vax-*) ++ os=-ultrix4.2 ++ ;; ++ m68*-apollo) ++ os=-domain ++ ;; ++ i386-sun) ++ os=-sunos4.0.2 ++ ;; ++ m68000-sun) ++ os=-sunos3 ++ ;; ++ m68*-cisco) ++ os=-aout ++ ;; ++ mep-*) ++ os=-elf ++ ;; ++ mips*-cisco) ++ os=-elf ++ ;; ++ mips*-*) ++ os=-elf ++ ;; ++ or32-*) ++ os=-coff ++ ;; ++ *-tti) # must be before sparc entry or we get the wrong os. ++ os=-sysv3 ++ ;; ++ sparc-* | *-sun) ++ os=-sunos4.1.1 ++ ;; ++ *-be) ++ os=-beos ++ ;; ++ *-haiku) ++ os=-haiku ++ ;; ++ *-ibm) ++ os=-aix ++ ;; ++ *-knuth) ++ os=-mmixware ++ ;; ++ *-wec) ++ os=-proelf ++ ;; ++ *-winbond) ++ os=-proelf ++ ;; ++ *-oki) ++ os=-proelf ++ ;; ++ *-hp) ++ os=-hpux ++ ;; ++ *-hitachi) ++ os=-hiux ++ ;; ++ i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent) ++ os=-sysv ++ ;; ++ *-cbm) ++ os=-amigaos ++ ;; ++ *-dg) ++ os=-dgux ++ ;; ++ *-dolphin) ++ os=-sysv3 ++ ;; ++ m68k-ccur) ++ os=-rtu ++ ;; ++ m88k-omron*) ++ os=-luna ++ ;; ++ *-next ) ++ os=-nextstep ++ ;; ++ *-sequent) ++ os=-ptx ++ ;; ++ *-crds) ++ os=-unos ++ ;; ++ *-ns) ++ os=-genix ++ ;; ++ i370-*) ++ os=-mvs ++ ;; ++ *-next) ++ os=-nextstep3 ++ ;; ++ *-gould) ++ os=-sysv ++ ;; ++ *-highlevel) ++ os=-bsd ++ ;; ++ *-encore) ++ os=-bsd ++ ;; ++ *-sgi) ++ os=-irix ++ ;; ++ *-siemens) ++ os=-sysv4 ++ ;; ++ *-masscomp) ++ os=-rtu ++ ;; ++ f30[01]-fujitsu | f700-fujitsu) ++ os=-uxpv ++ ;; ++ *-rom68k) ++ os=-coff ++ ;; ++ *-*bug) ++ os=-coff ++ ;; ++ *-apple) ++ os=-macos ++ ;; ++ *-atari*) ++ os=-mint ++ ;; ++ *) ++ os=-none ++ ;; ++esac ++fi ++ ++# Here we handle the case where we know the os, and the CPU type, but not the ++# manufacturer. We pick the logical manufacturer. ++vendor=unknown ++case $basic_machine in ++ *-unknown) ++ case $os in ++ -riscix*) ++ vendor=acorn ++ ;; ++ -sunos*) ++ vendor=sun ++ ;; ++ -cnk*|-aix*) ++ vendor=ibm ++ ;; ++ -beos*) ++ vendor=be ++ ;; ++ -hpux*) ++ vendor=hp ++ ;; ++ -mpeix*) ++ vendor=hp ++ ;; ++ -hiux*) ++ vendor=hitachi ++ ;; ++ -unos*) ++ vendor=crds ++ ;; ++ -dgux*) ++ vendor=dg ++ ;; ++ -luna*) ++ vendor=omron ++ ;; ++ -genix*) ++ vendor=ns ++ ;; ++ -mvs* | -opened*) ++ vendor=ibm ++ ;; ++ -os400*) ++ vendor=ibm ++ ;; ++ -ptx*) ++ vendor=sequent ++ ;; ++ -tpf*) ++ vendor=ibm ++ ;; ++ -vxsim* | -vxworks* | -windiss*) ++ vendor=wrs ++ ;; ++ -aux*) ++ vendor=apple ++ ;; ++ -hms*) ++ vendor=hitachi ++ ;; ++ -mpw* | -macos*) ++ vendor=apple ++ ;; ++ -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) ++ vendor=atari ++ ;; ++ -vos*) ++ vendor=stratus ++ ;; ++ esac ++ basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"` ++ ;; ++esac ++ ++echo $basic_machine$os ++exit ++ ++# Local variables: ++# eval: (add-hook 'write-file-hooks 'time-stamp) ++# time-stamp-start: "timestamp='" ++# time-stamp-format: "%:y-%02m-%02d" ++# time-stamp-end: "'" ++# End: diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/configure index 000000000,000000000..ee90bc6de new file mode 100755 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/configure @@@ -1,0 -1,0 +1,15189 @@@ ++#! /bin/sh ++# Guess values for system-dependent variables and create Makefiles. ++# Generated by GNU Autoconf 2.64 for package-unused version-unused. ++# ++# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, ++# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software ++# Foundation, Inc. ++# ++# This configure script is free software; the Free Software Foundation ++# gives unlimited permission to copy, distribute and modify it. ++## -------------------- ## ++## M4sh Initialization. ## ++## -------------------- ## ++ ++# Be more Bourne compatible ++DUALCASE=1; export DUALCASE # for MKS sh ++if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : ++ emulate sh ++ NULLCMD=: ++ # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which ++ # is contrary to our usage. Disable this feature. ++ alias -g '${1+"$@"}'='"$@"' ++ setopt NO_GLOB_SUBST ++else ++ case `(set -o) 2>/dev/null` in #( ++ *posix*) : ++ set -o posix ;; #( ++ *) : ++ ;; ++esac ++fi ++ ++ ++as_nl=' ++' ++export as_nl ++# Printing a long string crashes Solaris 7 /usr/bin/printf. ++as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' ++as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo ++as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo ++# Prefer a ksh shell builtin over an external printf program on Solaris, ++# but without wasting forks for bash or zsh. ++if test -z "$BASH_VERSION$ZSH_VERSION" \ ++ && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then ++ as_echo='print -r --' ++ as_echo_n='print -rn --' ++elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then ++ as_echo='printf %s\n' ++ as_echo_n='printf %s' ++else ++ if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then ++ as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' ++ as_echo_n='/usr/ucb/echo -n' ++ else ++ as_echo_body='eval expr "X$1" : "X\\(.*\\)"' ++ as_echo_n_body='eval ++ arg=$1; ++ case $arg in #( ++ *"$as_nl"*) ++ expr "X$arg" : "X\\(.*\\)$as_nl"; ++ arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; ++ esac; ++ expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" ++ ' ++ export as_echo_n_body ++ as_echo_n='sh -c $as_echo_n_body as_echo' ++ fi ++ export as_echo_body ++ as_echo='sh -c $as_echo_body as_echo' ++fi ++ ++# The user is always right. ++if test "${PATH_SEPARATOR+set}" != set; then ++ PATH_SEPARATOR=: ++ (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { ++ (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || ++ PATH_SEPARATOR=';' ++ } ++fi ++ ++ ++# IFS ++# We need space, tab and new line, in precisely that order. Quoting is ++# there to prevent editors from complaining about space-tab. ++# (If _AS_PATH_WALK were called with IFS unset, it would disable word ++# splitting by setting IFS to empty value.) ++IFS=" "" $as_nl" ++ ++# Find who we are. Look in the path if we contain no directory separator. ++case $0 in #(( ++ *[\\/]* ) as_myself=$0 ;; ++ *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break ++ done ++IFS=$as_save_IFS ++ ++ ;; ++esac ++# We did not find ourselves, most probably we were run as `sh COMMAND' ++# in which case we are not to be found in the path. ++if test "x$as_myself" = x; then ++ as_myself=$0 ++fi ++if test ! -f "$as_myself"; then ++ $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 ++ exit 1 ++fi ++ ++# Unset variables that we do not need and which cause bugs (e.g. in ++# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" ++# suppresses any "Segmentation fault" message there. '((' could ++# trigger a bug in pdksh 5.2.14. ++for as_var in BASH_ENV ENV MAIL MAILPATH ++do eval test x\${$as_var+set} = xset \ ++ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : ++done ++PS1='$ ' ++PS2='> ' ++PS4='+ ' ++ ++# NLS nuisances. ++LC_ALL=C ++export LC_ALL ++LANGUAGE=C ++export LANGUAGE ++ ++# CDPATH. ++(unset CDPATH) >/dev/null 2>&1 && unset CDPATH ++ ++if test "x$CONFIG_SHELL" = x; then ++ as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : ++ emulate sh ++ NULLCMD=: ++ # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which ++ # is contrary to our usage. Disable this feature. ++ alias -g '\${1+\"\$@\"}'='\"\$@\"' ++ setopt NO_GLOB_SUBST ++else ++ case \`(set -o) 2>/dev/null\` in #( ++ *posix*) : ++ set -o posix ;; #( ++ *) : ++ ;; ++esac ++fi ++" ++ as_required="as_fn_return () { (exit \$1); } ++as_fn_success () { as_fn_return 0; } ++as_fn_failure () { as_fn_return 1; } ++as_fn_ret_success () { return 0; } ++as_fn_ret_failure () { return 1; } ++ ++exitcode=0 ++as_fn_success || { exitcode=1; echo as_fn_success failed.; } ++as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } ++as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } ++as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } ++if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : ++ ++else ++ exitcode=1; echo positional parameters were not saved. ++fi ++test x\$exitcode = x0 || exit 1" ++ as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO ++ as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO ++ eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && ++ test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1 ++test \$(( 1 + 1 )) = 2 || exit 1 ++ ++ test -n \"\${ZSH_VERSION+set}\${BASH_VERSION+set}\" || ( ++ ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' ++ ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO ++ ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO ++ PATH=/empty FPATH=/empty; export PATH FPATH ++ test \"X\`printf %s \$ECHO\`\" = \"X\$ECHO\" \\ ++ || test \"X\`print -r -- \$ECHO\`\" = \"X\$ECHO\" ) || exit 1" ++ if (eval "$as_required") 2>/dev/null; then : ++ as_have_required=yes ++else ++ as_have_required=no ++fi ++ if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : ++ ++else ++ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++as_found=false ++for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ as_found=: ++ case $as_dir in #( ++ /*) ++ for as_base in sh bash ksh sh5; do ++ # Try only shells that exist, to save several forks. ++ as_shell=$as_dir/$as_base ++ if { test -f "$as_shell" || test -f "$as_shell.exe"; } && ++ { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : ++ CONFIG_SHELL=$as_shell as_have_required=yes ++ if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then : ++ break 2 ++fi ++fi ++ done;; ++ esac ++ as_found=false ++done ++$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && ++ { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : ++ CONFIG_SHELL=$SHELL as_have_required=yes ++fi; } ++IFS=$as_save_IFS ++ ++ ++ if test "x$CONFIG_SHELL" != x; then : ++ # We cannot yet assume a decent shell, so we have to provide a ++ # neutralization value for shells without unset; and this also ++ # works around shells that cannot unset nonexistent variables. ++ BASH_ENV=/dev/null ++ ENV=/dev/null ++ (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV ++ export CONFIG_SHELL ++ exec "$CONFIG_SHELL" "$as_myself" ${1+"$@"} ++fi ++ ++ if test x$as_have_required = xno; then : ++ $as_echo "$0: This script requires a shell more modern than all" ++ $as_echo "$0: the shells that I found on your system." ++ if test x${ZSH_VERSION+set} = xset ; then ++ $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" ++ $as_echo "$0: be upgraded to zsh 4.3.4 or later." ++ else ++ $as_echo "$0: Please tell bug-autoconf@gnu.org about your system, ++$0: including any error possibly output before this ++$0: message. Then install a modern shell, or manually run ++$0: the script under such a shell if you do have one." ++ fi ++ exit 1 ++fi ++fi ++fi ++SHELL=${CONFIG_SHELL-/bin/sh} ++export SHELL ++# Unset more variables known to interfere with behavior of common tools. ++CLICOLOR_FORCE= GREP_OPTIONS= ++unset CLICOLOR_FORCE GREP_OPTIONS ++ ++## --------------------- ## ++## M4sh Shell Functions. ## ++## --------------------- ## ++# as_fn_unset VAR ++# --------------- ++# Portably unset VAR. ++as_fn_unset () ++{ ++ { eval $1=; unset $1;} ++} ++as_unset=as_fn_unset ++ ++# as_fn_set_status STATUS ++# ----------------------- ++# Set $? to STATUS, without forking. ++as_fn_set_status () ++{ ++ return $1 ++} # as_fn_set_status ++ ++# as_fn_exit STATUS ++# ----------------- ++# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. ++as_fn_exit () ++{ ++ set +e ++ as_fn_set_status $1 ++ exit $1 ++} # as_fn_exit ++ ++# as_fn_mkdir_p ++# ------------- ++# Create "$as_dir" as a directory, including parents if necessary. ++as_fn_mkdir_p () ++{ ++ ++ case $as_dir in #( ++ -*) as_dir=./$as_dir;; ++ esac ++ test -d "$as_dir" || eval $as_mkdir_p || { ++ as_dirs= ++ while :; do ++ case $as_dir in #( ++ *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( ++ *) as_qdir=$as_dir;; ++ esac ++ as_dirs="'$as_qdir' $as_dirs" ++ as_dir=`$as_dirname -- "$as_dir" || ++$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ ++ X"$as_dir" : 'X\(//\)[^/]' \| \ ++ X"$as_dir" : 'X\(//\)$' \| \ ++ X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || ++$as_echo X"$as_dir" | ++ sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ ++ s//\1/ ++ q ++ } ++ /^X\(\/\/\)[^/].*/{ ++ s//\1/ ++ q ++ } ++ /^X\(\/\/\)$/{ ++ s//\1/ ++ q ++ } ++ /^X\(\/\).*/{ ++ s//\1/ ++ q ++ } ++ s/.*/./; q'` ++ test -d "$as_dir" && break ++ done ++ test -z "$as_dirs" || eval "mkdir $as_dirs" ++ } || test -d "$as_dir" || as_fn_error "cannot create directory $as_dir" ++ ++ ++} # as_fn_mkdir_p ++# as_fn_append VAR VALUE ++# ---------------------- ++# Append the text in VALUE to the end of the definition contained in VAR. Take ++# advantage of any shell optimizations that allow amortized linear growth over ++# repeated appends, instead of the typical quadratic growth present in naive ++# implementations. ++if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : ++ eval 'as_fn_append () ++ { ++ eval $1+=\$2 ++ }' ++else ++ as_fn_append () ++ { ++ eval $1=\$$1\$2 ++ } ++fi # as_fn_append ++ ++# as_fn_arith ARG... ++# ------------------ ++# Perform arithmetic evaluation on the ARGs, and store the result in the ++# global $as_val. Take advantage of shells that can avoid forks. The arguments ++# must be portable across $(()) and expr. ++if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : ++ eval 'as_fn_arith () ++ { ++ as_val=$(( $* )) ++ }' ++else ++ as_fn_arith () ++ { ++ as_val=`expr "$@" || test $? -eq 1` ++ } ++fi # as_fn_arith ++ ++ ++# as_fn_error ERROR [LINENO LOG_FD] ++# --------------------------------- ++# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are ++# provided, also output the error to LOG_FD, referencing LINENO. Then exit the ++# script with status $?, using 1 if that was 0. ++as_fn_error () ++{ ++ as_status=$?; test $as_status -eq 0 && as_status=1 ++ if test "$3"; then ++ as_lineno=${as_lineno-"$2"} as_lineno_stack=as_lineno_stack=$as_lineno_stack ++ $as_echo "$as_me:${as_lineno-$LINENO}: error: $1" >&$3 ++ fi ++ $as_echo "$as_me: error: $1" >&2 ++ as_fn_exit $as_status ++} # as_fn_error ++ ++if expr a : '\(a\)' >/dev/null 2>&1 && ++ test "X`expr 00001 : '.*\(...\)'`" = X001; then ++ as_expr=expr ++else ++ as_expr=false ++fi ++ ++if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then ++ as_basename=basename ++else ++ as_basename=false ++fi ++ ++if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then ++ as_dirname=dirname ++else ++ as_dirname=false ++fi ++ ++as_me=`$as_basename -- "$0" || ++$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ ++ X"$0" : 'X\(//\)$' \| \ ++ X"$0" : 'X\(/\)' \| . 2>/dev/null || ++$as_echo X/"$0" | ++ sed '/^.*\/\([^/][^/]*\)\/*$/{ ++ s//\1/ ++ q ++ } ++ /^X\/\(\/\/\)$/{ ++ s//\1/ ++ q ++ } ++ /^X\/\(\/\).*/{ ++ s//\1/ ++ q ++ } ++ s/.*/./; q'` ++ ++# Avoid depending upon Character Ranges. ++as_cr_letters='abcdefghijklmnopqrstuvwxyz' ++as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' ++as_cr_Letters=$as_cr_letters$as_cr_LETTERS ++as_cr_digits='0123456789' ++as_cr_alnum=$as_cr_Letters$as_cr_digits ++ ++ ++ as_lineno_1=$LINENO as_lineno_1a=$LINENO ++ as_lineno_2=$LINENO as_lineno_2a=$LINENO ++ eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && ++ test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { ++ # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) ++ sed -n ' ++ p ++ /[$]LINENO/= ++ ' <$as_myself | ++ sed ' ++ s/[$]LINENO.*/&-/ ++ t lineno ++ b ++ :lineno ++ N ++ :loop ++ s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ ++ t loop ++ s/-\n.*// ++ ' >$as_me.lineno && ++ chmod +x "$as_me.lineno" || ++ { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } ++ ++ # Don't try to exec as it changes $[0], causing all sort of problems ++ # (the dirname of $[0] is not the place where we might find the ++ # original and so on. Autoconf is especially sensitive to this). ++ . "./$as_me.lineno" ++ # Exit status is that of the last command. ++ exit ++} ++ ++ECHO_C= ECHO_N= ECHO_T= ++case `echo -n x` in #((((( ++-n*) ++ case `echo 'xy\c'` in ++ *c*) ECHO_T=' ';; # ECHO_T is single tab character. ++ xy) ECHO_C='\c';; ++ *) echo `echo ksh88 bug on AIX 6.1` > /dev/null ++ ECHO_T=' ';; ++ esac;; ++*) ++ ECHO_N='-n';; ++esac ++ ++rm -f conf$$ conf$$.exe conf$$.file ++if test -d conf$$.dir; then ++ rm -f conf$$.dir/conf$$.file ++else ++ rm -f conf$$.dir ++ mkdir conf$$.dir 2>/dev/null ++fi ++if (echo >conf$$.file) 2>/dev/null; then ++ if ln -s conf$$.file conf$$ 2>/dev/null; then ++ as_ln_s='ln -s' ++ # ... but there are two gotchas: ++ # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. ++ # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. ++ # In both cases, we have to default to `cp -p'. ++ ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || ++ as_ln_s='cp -p' ++ elif ln conf$$.file conf$$ 2>/dev/null; then ++ as_ln_s=ln ++ else ++ as_ln_s='cp -p' ++ fi ++else ++ as_ln_s='cp -p' ++fi ++rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file ++rmdir conf$$.dir 2>/dev/null ++ ++if mkdir -p . 2>/dev/null; then ++ as_mkdir_p='mkdir -p "$as_dir"' ++else ++ test -d ./-p && rmdir ./-p ++ as_mkdir_p=false ++fi ++ ++if test -x / >/dev/null 2>&1; then ++ as_test_x='test -x' ++else ++ if ls -dL / >/dev/null 2>&1; then ++ as_ls_L_option=L ++ else ++ as_ls_L_option= ++ fi ++ as_test_x=' ++ eval sh -c '\'' ++ if test -d "$1"; then ++ test -d "$1/."; ++ else ++ case $1 in #( ++ -*)set "./$1";; ++ esac; ++ case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #(( ++ ???[sx]*):;;*)false;;esac;fi ++ '\'' sh ++ ' ++fi ++as_executable_p=$as_test_x ++ ++# Sed expression to map a string onto a valid CPP name. ++as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" ++ ++# Sed expression to map a string onto a valid variable name. ++as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" ++ ++SHELL=${CONFIG_SHELL-/bin/sh} ++ ++ ++exec 7<&0 &1 ++ ++# Name of the host. ++# hostname on some systems (SVR3.2, Linux) returns a bogus exit status, ++# so uname gets run too. ++ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` ++ ++# ++# Initializations. ++# ++ac_default_prefix=/usr/local ++ac_clean_files= ++ac_config_libobj_dir=. ++LIBOBJS= ++cross_compiling=no ++subdirs= ++MFLAGS= ++MAKEFLAGS= ++ ++# Identity of this package. ++PACKAGE_NAME='package-unused' ++PACKAGE_TARNAME='libbacktrace' ++PACKAGE_VERSION='version-unused' ++PACKAGE_STRING='package-unused version-unused' ++PACKAGE_BUGREPORT='' ++PACKAGE_URL='' ++ ++ac_unique_file="backtrace.h" ++# Factoring default headers for most tests. ++ac_includes_default="\ ++#include ++#ifdef HAVE_SYS_TYPES_H ++# include ++#endif ++#ifdef HAVE_SYS_STAT_H ++# include ++#endif ++#ifdef STDC_HEADERS ++# include ++# include ++#else ++# ifdef HAVE_STDLIB_H ++# include ++# endif ++#endif ++#ifdef HAVE_STRING_H ++# if !defined STDC_HEADERS && defined HAVE_MEMORY_H ++# include ++# endif ++# include ++#endif ++#ifdef HAVE_STRINGS_H ++# include ++#endif ++#ifdef HAVE_INTTYPES_H ++# include ++#endif ++#ifdef HAVE_STDINT_H ++# include ++#endif ++#ifdef HAVE_UNISTD_H ++# include ++#endif" ++ ++ac_subst_vars='am__EXEEXT_FALSE ++am__EXEEXT_TRUE ++LTLIBOBJS ++LIBOBJS ++NATIVE_FALSE ++NATIVE_TRUE ++BACKTRACE_USES_MALLOC ++ALLOC_FILE ++VIEW_FILE ++BACKTRACE_SUPPORTS_DATA ++BACKTRACE_SUPPORTED ++FORMAT_FILE ++BACKTRACE_SUPPORTS_THREADS ++PIC_FLAG ++WARN_FLAGS ++EXTRA_FLAGS ++BACKTRACE_FILE ++OTOOL64 ++OTOOL ++LIPO ++NMEDIT ++DSYMUTIL ++AR ++OBJDUMP ++LN_S ++NM ++ac_ct_DUMPBIN ++DUMPBIN ++LD ++FGREP ++SED ++LIBTOOL ++RANLIB ++MAINT ++MAINTAINER_MODE_FALSE ++MAINTAINER_MODE_TRUE ++am__untar ++am__tar ++AMTAR ++am__leading_dot ++SET_MAKE ++AWK ++mkdir_p ++MKDIR_P ++INSTALL_STRIP_PROGRAM ++STRIP ++install_sh ++MAKEINFO ++AUTOHEADER ++AUTOMAKE ++AUTOCONF ++ACLOCAL ++VERSION ++PACKAGE ++CYGPATH_W ++am__isrc ++INSTALL_DATA ++INSTALL_SCRIPT ++INSTALL_PROGRAM ++libtool_VERSION ++EGREP ++GREP ++CPP ++OBJEXT ++EXEEXT ++ac_ct_CC ++CPPFLAGS ++LDFLAGS ++CFLAGS ++CC ++target_os ++target_vendor ++target_cpu ++target ++host_os ++host_vendor ++host_cpu ++host ++build_os ++build_vendor ++build_cpu ++build ++multi_basedir ++target_alias ++host_alias ++build_alias ++LIBS ++ECHO_T ++ECHO_N ++ECHO_C ++DEFS ++mandir ++localedir ++libdir ++psdir ++pdfdir ++dvidir ++htmldir ++infodir ++docdir ++oldincludedir ++includedir ++localstatedir ++sharedstatedir ++sysconfdir ++datadir ++datarootdir ++libexecdir ++sbindir ++bindir ++program_transform_name ++prefix ++exec_prefix ++PACKAGE_URL ++PACKAGE_BUGREPORT ++PACKAGE_STRING ++PACKAGE_VERSION ++PACKAGE_TARNAME ++PACKAGE_NAME ++PATH_SEPARATOR ++SHELL' ++ac_subst_files='' ++ac_user_opts=' ++enable_option_checking ++enable_multilib ++enable_maintainer_mode ++with_target_subdir ++enable_shared ++enable_static ++with_pic ++enable_fast_install ++with_gnu_ld ++enable_libtool_lock ++with_system_libunwind ++enable_host_shared ++' ++ ac_precious_vars='build_alias ++host_alias ++target_alias ++CC ++CFLAGS ++LDFLAGS ++LIBS ++CPPFLAGS ++CPP' ++ ++ ++# Initialize some variables set by options. ++ac_init_help= ++ac_init_version=false ++ac_unrecognized_opts= ++ac_unrecognized_sep= ++# The variables have the same names as the options, with ++# dashes changed to underlines. ++cache_file=/dev/null ++exec_prefix=NONE ++no_create= ++no_recursion= ++prefix=NONE ++program_prefix=NONE ++program_suffix=NONE ++program_transform_name=s,x,x, ++silent= ++site= ++srcdir= ++verbose= ++x_includes=NONE ++x_libraries=NONE ++ ++# Installation directory options. ++# These are left unexpanded so users can "make install exec_prefix=/foo" ++# and all the variables that are supposed to be based on exec_prefix ++# by default will actually change. ++# Use braces instead of parens because sh, perl, etc. also accept them. ++# (The list follows the same order as the GNU Coding Standards.) ++bindir='${exec_prefix}/bin' ++sbindir='${exec_prefix}/sbin' ++libexecdir='${exec_prefix}/libexec' ++datarootdir='${prefix}/share' ++datadir='${datarootdir}' ++sysconfdir='${prefix}/etc' ++sharedstatedir='${prefix}/com' ++localstatedir='${prefix}/var' ++includedir='${prefix}/include' ++oldincludedir='/usr/include' ++docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' ++infodir='${datarootdir}/info' ++htmldir='${docdir}' ++dvidir='${docdir}' ++pdfdir='${docdir}' ++psdir='${docdir}' ++libdir='${exec_prefix}/lib' ++localedir='${datarootdir}/locale' ++mandir='${datarootdir}/man' ++ ++ac_prev= ++ac_dashdash= ++for ac_option ++do ++ # If the previous option needs an argument, assign it. ++ if test -n "$ac_prev"; then ++ eval $ac_prev=\$ac_option ++ ac_prev= ++ continue ++ fi ++ ++ case $ac_option in ++ *=*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; ++ *) ac_optarg=yes ;; ++ esac ++ ++ # Accept the important Cygnus configure options, so we can diagnose typos. ++ ++ case $ac_dashdash$ac_option in ++ --) ++ ac_dashdash=yes ;; ++ ++ -bindir | --bindir | --bindi | --bind | --bin | --bi) ++ ac_prev=bindir ;; ++ -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) ++ bindir=$ac_optarg ;; ++ ++ -build | --build | --buil | --bui | --bu) ++ ac_prev=build_alias ;; ++ -build=* | --build=* | --buil=* | --bui=* | --bu=*) ++ build_alias=$ac_optarg ;; ++ ++ -cache-file | --cache-file | --cache-fil | --cache-fi \ ++ | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) ++ ac_prev=cache_file ;; ++ -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ ++ | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) ++ cache_file=$ac_optarg ;; ++ ++ --config-cache | -C) ++ cache_file=config.cache ;; ++ ++ -datadir | --datadir | --datadi | --datad) ++ ac_prev=datadir ;; ++ -datadir=* | --datadir=* | --datadi=* | --datad=*) ++ datadir=$ac_optarg ;; ++ ++ -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ ++ | --dataroo | --dataro | --datar) ++ ac_prev=datarootdir ;; ++ -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ ++ | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) ++ datarootdir=$ac_optarg ;; ++ ++ -disable-* | --disable-*) ++ ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` ++ # Reject names that are not valid shell variable names. ++ expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && ++ as_fn_error "invalid feature name: $ac_useropt" ++ ac_useropt_orig=$ac_useropt ++ ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` ++ case $ac_user_opts in ++ *" ++"enable_$ac_useropt" ++"*) ;; ++ *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" ++ ac_unrecognized_sep=', ';; ++ esac ++ eval enable_$ac_useropt=no ;; ++ ++ -docdir | --docdir | --docdi | --doc | --do) ++ ac_prev=docdir ;; ++ -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) ++ docdir=$ac_optarg ;; ++ ++ -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) ++ ac_prev=dvidir ;; ++ -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) ++ dvidir=$ac_optarg ;; ++ ++ -enable-* | --enable-*) ++ ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` ++ # Reject names that are not valid shell variable names. ++ expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && ++ as_fn_error "invalid feature name: $ac_useropt" ++ ac_useropt_orig=$ac_useropt ++ ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` ++ case $ac_user_opts in ++ *" ++"enable_$ac_useropt" ++"*) ;; ++ *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" ++ ac_unrecognized_sep=', ';; ++ esac ++ eval enable_$ac_useropt=\$ac_optarg ;; ++ ++ -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ ++ | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ ++ | --exec | --exe | --ex) ++ ac_prev=exec_prefix ;; ++ -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ ++ | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ ++ | --exec=* | --exe=* | --ex=*) ++ exec_prefix=$ac_optarg ;; ++ ++ -gas | --gas | --ga | --g) ++ # Obsolete; use --with-gas. ++ with_gas=yes ;; ++ ++ -help | --help | --hel | --he | -h) ++ ac_init_help=long ;; ++ -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) ++ ac_init_help=recursive ;; ++ -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) ++ ac_init_help=short ;; ++ ++ -host | --host | --hos | --ho) ++ ac_prev=host_alias ;; ++ -host=* | --host=* | --hos=* | --ho=*) ++ host_alias=$ac_optarg ;; ++ ++ -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) ++ ac_prev=htmldir ;; ++ -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ ++ | --ht=*) ++ htmldir=$ac_optarg ;; ++ ++ -includedir | --includedir | --includedi | --included | --include \ ++ | --includ | --inclu | --incl | --inc) ++ ac_prev=includedir ;; ++ -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ ++ | --includ=* | --inclu=* | --incl=* | --inc=*) ++ includedir=$ac_optarg ;; ++ ++ -infodir | --infodir | --infodi | --infod | --info | --inf) ++ ac_prev=infodir ;; ++ -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) ++ infodir=$ac_optarg ;; ++ ++ -libdir | --libdir | --libdi | --libd) ++ ac_prev=libdir ;; ++ -libdir=* | --libdir=* | --libdi=* | --libd=*) ++ libdir=$ac_optarg ;; ++ ++ -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ ++ | --libexe | --libex | --libe) ++ ac_prev=libexecdir ;; ++ -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ ++ | --libexe=* | --libex=* | --libe=*) ++ libexecdir=$ac_optarg ;; ++ ++ -localedir | --localedir | --localedi | --localed | --locale) ++ ac_prev=localedir ;; ++ -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) ++ localedir=$ac_optarg ;; ++ ++ -localstatedir | --localstatedir | --localstatedi | --localstated \ ++ | --localstate | --localstat | --localsta | --localst | --locals) ++ ac_prev=localstatedir ;; ++ -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ ++ | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) ++ localstatedir=$ac_optarg ;; ++ ++ -mandir | --mandir | --mandi | --mand | --man | --ma | --m) ++ ac_prev=mandir ;; ++ -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) ++ mandir=$ac_optarg ;; ++ ++ -nfp | --nfp | --nf) ++ # Obsolete; use --without-fp. ++ with_fp=no ;; ++ ++ -no-create | --no-create | --no-creat | --no-crea | --no-cre \ ++ | --no-cr | --no-c | -n) ++ no_create=yes ;; ++ ++ -no-recursion | --no-recursion | --no-recursio | --no-recursi \ ++ | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) ++ no_recursion=yes ;; ++ ++ -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ ++ | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ ++ | --oldin | --oldi | --old | --ol | --o) ++ ac_prev=oldincludedir ;; ++ -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ ++ | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ ++ | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) ++ oldincludedir=$ac_optarg ;; ++ ++ -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) ++ ac_prev=prefix ;; ++ -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) ++ prefix=$ac_optarg ;; ++ ++ -program-prefix | --program-prefix | --program-prefi | --program-pref \ ++ | --program-pre | --program-pr | --program-p) ++ ac_prev=program_prefix ;; ++ -program-prefix=* | --program-prefix=* | --program-prefi=* \ ++ | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) ++ program_prefix=$ac_optarg ;; ++ ++ -program-suffix | --program-suffix | --program-suffi | --program-suff \ ++ | --program-suf | --program-su | --program-s) ++ ac_prev=program_suffix ;; ++ -program-suffix=* | --program-suffix=* | --program-suffi=* \ ++ | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) ++ program_suffix=$ac_optarg ;; ++ ++ -program-transform-name | --program-transform-name \ ++ | --program-transform-nam | --program-transform-na \ ++ | --program-transform-n | --program-transform- \ ++ | --program-transform | --program-transfor \ ++ | --program-transfo | --program-transf \ ++ | --program-trans | --program-tran \ ++ | --progr-tra | --program-tr | --program-t) ++ ac_prev=program_transform_name ;; ++ -program-transform-name=* | --program-transform-name=* \ ++ | --program-transform-nam=* | --program-transform-na=* \ ++ | --program-transform-n=* | --program-transform-=* \ ++ | --program-transform=* | --program-transfor=* \ ++ | --program-transfo=* | --program-transf=* \ ++ | --program-trans=* | --program-tran=* \ ++ | --progr-tra=* | --program-tr=* | --program-t=*) ++ program_transform_name=$ac_optarg ;; ++ ++ -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) ++ ac_prev=pdfdir ;; ++ -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) ++ pdfdir=$ac_optarg ;; ++ ++ -psdir | --psdir | --psdi | --psd | --ps) ++ ac_prev=psdir ;; ++ -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) ++ psdir=$ac_optarg ;; ++ ++ -q | -quiet | --quiet | --quie | --qui | --qu | --q \ ++ | -silent | --silent | --silen | --sile | --sil) ++ silent=yes ;; ++ ++ -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) ++ ac_prev=sbindir ;; ++ -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ ++ | --sbi=* | --sb=*) ++ sbindir=$ac_optarg ;; ++ ++ -sharedstatedir | --sharedstatedir | --sharedstatedi \ ++ | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ ++ | --sharedst | --shareds | --shared | --share | --shar \ ++ | --sha | --sh) ++ ac_prev=sharedstatedir ;; ++ -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ ++ | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ ++ | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ ++ | --sha=* | --sh=*) ++ sharedstatedir=$ac_optarg ;; ++ ++ -site | --site | --sit) ++ ac_prev=site ;; ++ -site=* | --site=* | --sit=*) ++ site=$ac_optarg ;; ++ ++ -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) ++ ac_prev=srcdir ;; ++ -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) ++ srcdir=$ac_optarg ;; ++ ++ -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ ++ | --syscon | --sysco | --sysc | --sys | --sy) ++ ac_prev=sysconfdir ;; ++ -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ ++ | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) ++ sysconfdir=$ac_optarg ;; ++ ++ -target | --target | --targe | --targ | --tar | --ta | --t) ++ ac_prev=target_alias ;; ++ -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) ++ target_alias=$ac_optarg ;; ++ ++ -v | -verbose | --verbose | --verbos | --verbo | --verb) ++ verbose=yes ;; ++ ++ -version | --version | --versio | --versi | --vers | -V) ++ ac_init_version=: ;; ++ ++ -with-* | --with-*) ++ ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` ++ # Reject names that are not valid shell variable names. ++ expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && ++ as_fn_error "invalid package name: $ac_useropt" ++ ac_useropt_orig=$ac_useropt ++ ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` ++ case $ac_user_opts in ++ *" ++"with_$ac_useropt" ++"*) ;; ++ *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" ++ ac_unrecognized_sep=', ';; ++ esac ++ eval with_$ac_useropt=\$ac_optarg ;; ++ ++ -without-* | --without-*) ++ ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` ++ # Reject names that are not valid shell variable names. ++ expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && ++ as_fn_error "invalid package name: $ac_useropt" ++ ac_useropt_orig=$ac_useropt ++ ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` ++ case $ac_user_opts in ++ *" ++"with_$ac_useropt" ++"*) ;; ++ *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" ++ ac_unrecognized_sep=', ';; ++ esac ++ eval with_$ac_useropt=no ;; ++ ++ --x) ++ # Obsolete; use --with-x. ++ with_x=yes ;; ++ ++ -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ ++ | --x-incl | --x-inc | --x-in | --x-i) ++ ac_prev=x_includes ;; ++ -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ ++ | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) ++ x_includes=$ac_optarg ;; ++ ++ -x-libraries | --x-libraries | --x-librarie | --x-librari \ ++ | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) ++ ac_prev=x_libraries ;; ++ -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ ++ | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) ++ x_libraries=$ac_optarg ;; ++ ++ -*) as_fn_error "unrecognized option: \`$ac_option' ++Try \`$0 --help' for more information." ++ ;; ++ ++ *=*) ++ ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` ++ # Reject names that are not valid shell variable names. ++ case $ac_envvar in #( ++ '' | [0-9]* | *[!_$as_cr_alnum]* ) ++ as_fn_error "invalid variable name: \`$ac_envvar'" ;; ++ esac ++ eval $ac_envvar=\$ac_optarg ++ export $ac_envvar ;; ++ ++ *) ++ # FIXME: should be removed in autoconf 3.0. ++ $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2 ++ expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && ++ $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2 ++ : ${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option} ++ ;; ++ ++ esac ++done ++ ++if test -n "$ac_prev"; then ++ ac_option=--`echo $ac_prev | sed 's/_/-/g'` ++ as_fn_error "missing argument to $ac_option" ++fi ++ ++if test -n "$ac_unrecognized_opts"; then ++ case $enable_option_checking in ++ no) ;; ++ fatal) as_fn_error "unrecognized options: $ac_unrecognized_opts" ;; ++ *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; ++ esac ++fi ++ ++# Check all directory arguments for consistency. ++for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ ++ datadir sysconfdir sharedstatedir localstatedir includedir \ ++ oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ ++ libdir localedir mandir ++do ++ eval ac_val=\$$ac_var ++ # Remove trailing slashes. ++ case $ac_val in ++ */ ) ++ ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` ++ eval $ac_var=\$ac_val;; ++ esac ++ # Be sure to have absolute directory names. ++ case $ac_val in ++ [\\/$]* | ?:[\\/]* ) continue;; ++ NONE | '' ) case $ac_var in *prefix ) continue;; esac;; ++ esac ++ as_fn_error "expected an absolute directory name for --$ac_var: $ac_val" ++done ++ ++# There might be people who depend on the old broken behavior: `$host' ++# used to hold the argument of --host etc. ++# FIXME: To remove some day. ++build=$build_alias ++host=$host_alias ++target=$target_alias ++ ++# FIXME: To remove some day. ++if test "x$host_alias" != x; then ++ if test "x$build_alias" = x; then ++ cross_compiling=maybe ++ $as_echo "$as_me: WARNING: If you wanted to set the --build type, don't use --host. ++ If a cross compiler is detected then cross compile mode will be used." >&2 ++ elif test "x$build_alias" != "x$host_alias"; then ++ cross_compiling=yes ++ fi ++fi ++ ++ac_tool_prefix= ++test -n "$host_alias" && ac_tool_prefix=$host_alias- ++ ++test "$silent" = yes && exec 6>/dev/null ++ ++ ++ac_pwd=`pwd` && test -n "$ac_pwd" && ++ac_ls_di=`ls -di .` && ++ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || ++ as_fn_error "working directory cannot be determined" ++test "X$ac_ls_di" = "X$ac_pwd_ls_di" || ++ as_fn_error "pwd does not report name of working directory" ++ ++ ++# Find the source files, if location was not specified. ++if test -z "$srcdir"; then ++ ac_srcdir_defaulted=yes ++ # Try the directory containing this script, then the parent directory. ++ ac_confdir=`$as_dirname -- "$as_myself" || ++$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ ++ X"$as_myself" : 'X\(//\)[^/]' \| \ ++ X"$as_myself" : 'X\(//\)$' \| \ ++ X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || ++$as_echo X"$as_myself" | ++ sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ ++ s//\1/ ++ q ++ } ++ /^X\(\/\/\)[^/].*/{ ++ s//\1/ ++ q ++ } ++ /^X\(\/\/\)$/{ ++ s//\1/ ++ q ++ } ++ /^X\(\/\).*/{ ++ s//\1/ ++ q ++ } ++ s/.*/./; q'` ++ srcdir=$ac_confdir ++ if test ! -r "$srcdir/$ac_unique_file"; then ++ srcdir=.. ++ fi ++else ++ ac_srcdir_defaulted=no ++fi ++if test ! -r "$srcdir/$ac_unique_file"; then ++ test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." ++ as_fn_error "cannot find sources ($ac_unique_file) in $srcdir" ++fi ++ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" ++ac_abs_confdir=`( ++ cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error "$ac_msg" ++ pwd)` ++# When building in place, set srcdir=. ++if test "$ac_abs_confdir" = "$ac_pwd"; then ++ srcdir=. ++fi ++# Remove unnecessary trailing slashes from srcdir. ++# Double slashes in file names in object file debugging info ++# mess up M-x gdb in Emacs. ++case $srcdir in ++*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; ++esac ++for ac_var in $ac_precious_vars; do ++ eval ac_env_${ac_var}_set=\${${ac_var}+set} ++ eval ac_env_${ac_var}_value=\$${ac_var} ++ eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} ++ eval ac_cv_env_${ac_var}_value=\$${ac_var} ++done ++ ++# ++# Report the --help message. ++# ++if test "$ac_init_help" = "long"; then ++ # Omit some internal or obsolete options to make the list less imposing. ++ # This message is too long to be a string in the A/UX 3.1 sh. ++ cat <<_ACEOF ++\`configure' configures package-unused version-unused to adapt to many kinds of systems. ++ ++Usage: $0 [OPTION]... [VAR=VALUE]... ++ ++To assign environment variables (e.g., CC, CFLAGS...), specify them as ++VAR=VALUE. See below for descriptions of some of the useful variables. ++ ++Defaults for the options are specified in brackets. ++ ++Configuration: ++ -h, --help display this help and exit ++ --help=short display options specific to this package ++ --help=recursive display the short help of all the included packages ++ -V, --version display version information and exit ++ -q, --quiet, --silent do not print \`checking...' messages ++ --cache-file=FILE cache test results in FILE [disabled] ++ -C, --config-cache alias for \`--cache-file=config.cache' ++ -n, --no-create do not create output files ++ --srcdir=DIR find the sources in DIR [configure dir or \`..'] ++ ++Installation directories: ++ --prefix=PREFIX install architecture-independent files in PREFIX ++ [$ac_default_prefix] ++ --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX ++ [PREFIX] ++ ++By default, \`make install' will install all the files in ++\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify ++an installation prefix other than \`$ac_default_prefix' using \`--prefix', ++for instance \`--prefix=\$HOME'. ++ ++For better control, use the options below. ++ ++Fine tuning of the installation directories: ++ --bindir=DIR user executables [EPREFIX/bin] ++ --sbindir=DIR system admin executables [EPREFIX/sbin] ++ --libexecdir=DIR program executables [EPREFIX/libexec] ++ --sysconfdir=DIR read-only single-machine data [PREFIX/etc] ++ --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] ++ --localstatedir=DIR modifiable single-machine data [PREFIX/var] ++ --libdir=DIR object code libraries [EPREFIX/lib] ++ --includedir=DIR C header files [PREFIX/include] ++ --oldincludedir=DIR C header files for non-gcc [/usr/include] ++ --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] ++ --datadir=DIR read-only architecture-independent data [DATAROOTDIR] ++ --infodir=DIR info documentation [DATAROOTDIR/info] ++ --localedir=DIR locale-dependent data [DATAROOTDIR/locale] ++ --mandir=DIR man documentation [DATAROOTDIR/man] ++ --docdir=DIR documentation root [DATAROOTDIR/doc/libbacktrace] ++ --htmldir=DIR html documentation [DOCDIR] ++ --dvidir=DIR dvi documentation [DOCDIR] ++ --pdfdir=DIR pdf documentation [DOCDIR] ++ --psdir=DIR ps documentation [DOCDIR] ++_ACEOF ++ ++ cat <<\_ACEOF ++ ++Program names: ++ --program-prefix=PREFIX prepend PREFIX to installed program names ++ --program-suffix=SUFFIX append SUFFIX to installed program names ++ --program-transform-name=PROGRAM run sed PROGRAM on installed program names ++ ++System types: ++ --build=BUILD configure for building on BUILD [guessed] ++ --host=HOST cross-compile to build programs to run on HOST [BUILD] ++ --target=TARGET configure for building compilers for TARGET [HOST] ++_ACEOF ++fi ++ ++if test -n "$ac_init_help"; then ++ case $ac_init_help in ++ short | recursive ) echo "Configuration of package-unused version-unused:";; ++ esac ++ cat <<\_ACEOF ++ ++Optional Features: ++ --disable-option-checking ignore unrecognized --enable/--with options ++ --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) ++ --enable-FEATURE[=ARG] include FEATURE [ARG=yes] ++ --enable-multilib build many library versions (default) ++ --enable-maintainer-mode enable make rules and dependencies not useful ++ (and sometimes confusing) to the casual installer ++ --enable-shared[=PKGS] build shared libraries [default=yes] ++ --enable-static[=PKGS] build static libraries [default=yes] ++ --enable-fast-install[=PKGS] ++ optimize for fast installation [default=yes] ++ --disable-libtool-lock avoid locking (might break parallel builds) ++ --enable-host-shared build host code as shared libraries ++ ++Optional Packages: ++ --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] ++ --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) ++ --with-target-subdir=SUBDIR Configuring in a subdirectory for target ++ --with-pic try to use only PIC/non-PIC objects [default=use ++ both] ++ --with-gnu-ld assume the C compiler uses GNU ld [default=no] ++ --with-system-libunwind use installed libunwind ++ ++Some influential environment variables: ++ CC C compiler command ++ CFLAGS C compiler flags ++ LDFLAGS linker flags, e.g. -L if you have libraries in a ++ nonstandard directory ++ LIBS libraries to pass to the linker, e.g. -l ++ CPPFLAGS C/C++/Objective C preprocessor flags, e.g. -I if ++ you have headers in a nonstandard directory ++ CPP C preprocessor ++ ++Use these variables to override the choices made by `configure' or to help ++it to find libraries and programs with nonstandard names/locations. ++ ++Report bugs to the package provider. ++_ACEOF ++ac_status=$? ++fi ++ ++if test "$ac_init_help" = "recursive"; then ++ # If there are subdirs, report their specific --help. ++ for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue ++ test -d "$ac_dir" || ++ { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || ++ continue ++ ac_builddir=. ++ ++case "$ac_dir" in ++.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; ++*) ++ ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` ++ # A ".." for each directory in $ac_dir_suffix. ++ ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` ++ case $ac_top_builddir_sub in ++ "") ac_top_builddir_sub=. ac_top_build_prefix= ;; ++ *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; ++ esac ;; ++esac ++ac_abs_top_builddir=$ac_pwd ++ac_abs_builddir=$ac_pwd$ac_dir_suffix ++# for backward compatibility: ++ac_top_builddir=$ac_top_build_prefix ++ ++case $srcdir in ++ .) # We are building in place. ++ ac_srcdir=. ++ ac_top_srcdir=$ac_top_builddir_sub ++ ac_abs_top_srcdir=$ac_pwd ;; ++ [\\/]* | ?:[\\/]* ) # Absolute name. ++ ac_srcdir=$srcdir$ac_dir_suffix; ++ ac_top_srcdir=$srcdir ++ ac_abs_top_srcdir=$srcdir ;; ++ *) # Relative name. ++ ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix ++ ac_top_srcdir=$ac_top_build_prefix$srcdir ++ ac_abs_top_srcdir=$ac_pwd/$srcdir ;; ++esac ++ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix ++ ++ cd "$ac_dir" || { ac_status=$?; continue; } ++ # Check for guested configure. ++ if test -f "$ac_srcdir/configure.gnu"; then ++ echo && ++ $SHELL "$ac_srcdir/configure.gnu" --help=recursive ++ elif test -f "$ac_srcdir/configure"; then ++ echo && ++ $SHELL "$ac_srcdir/configure" --help=recursive ++ else ++ $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 ++ fi || ac_status=$? ++ cd "$ac_pwd" || { ac_status=$?; break; } ++ done ++fi ++ ++test -n "$ac_init_help" && exit $ac_status ++if $ac_init_version; then ++ cat <<\_ACEOF ++package-unused configure version-unused ++generated by GNU Autoconf 2.64 ++ ++Copyright (C) 2009 Free Software Foundation, Inc. ++This configure script is free software; the Free Software Foundation ++gives unlimited permission to copy, distribute and modify it. ++_ACEOF ++ exit ++fi ++ ++## ------------------------ ## ++## Autoconf initialization. ## ++## ------------------------ ## ++ ++# ac_fn_c_try_compile LINENO ++# -------------------------- ++# Try to compile conftest.$ac_ext, and return whether this succeeded. ++ac_fn_c_try_compile () ++{ ++ as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack ++ rm -f conftest.$ac_objext ++ if { { ac_try="$ac_compile" ++case "(($ac_try" in ++ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; ++ *) ac_try_echo=$ac_try;; ++esac ++eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" ++$as_echo "$ac_try_echo"; } >&5 ++ (eval "$ac_compile") 2>conftest.err ++ ac_status=$? ++ if test -s conftest.err; then ++ grep -v '^ *+' conftest.err >conftest.er1 ++ cat conftest.er1 >&5 ++ mv -f conftest.er1 conftest.err ++ fi ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } && { ++ test -z "$ac_c_werror_flag" || ++ test ! -s conftest.err ++ } && test -s conftest.$ac_objext; then : ++ ac_retval=0 ++else ++ $as_echo "$as_me: failed program was:" >&5 ++sed 's/^/| /' conftest.$ac_ext >&5 ++ ++ ac_retval=1 ++fi ++ eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} ++ return $ac_retval ++ ++} # ac_fn_c_try_compile ++ ++# ac_fn_c_try_cpp LINENO ++# ---------------------- ++# Try to preprocess conftest.$ac_ext, and return whether this succeeded. ++ac_fn_c_try_cpp () ++{ ++ as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack ++ if { { ac_try="$ac_cpp conftest.$ac_ext" ++case "(($ac_try" in ++ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; ++ *) ac_try_echo=$ac_try;; ++esac ++eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" ++$as_echo "$ac_try_echo"; } >&5 ++ (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err ++ ac_status=$? ++ if test -s conftest.err; then ++ grep -v '^ *+' conftest.err >conftest.er1 ++ cat conftest.er1 >&5 ++ mv -f conftest.er1 conftest.err ++ fi ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } >/dev/null && { ++ test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" || ++ test ! -s conftest.err ++ }; then : ++ ac_retval=0 ++else ++ $as_echo "$as_me: failed program was:" >&5 ++sed 's/^/| /' conftest.$ac_ext >&5 ++ ++ ac_retval=1 ++fi ++ eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} ++ return $ac_retval ++ ++} # ac_fn_c_try_cpp ++ ++# ac_fn_c_check_header_mongrel LINENO HEADER VAR INCLUDES ++# ------------------------------------------------------- ++# Tests whether HEADER exists, giving a warning if it cannot be compiled using ++# the include files in INCLUDES and setting the cache variable VAR ++# accordingly. ++ac_fn_c_check_header_mongrel () ++{ ++ as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack ++ if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then : ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 ++$as_echo_n "checking for $2... " >&6; } ++if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then : ++ $as_echo_n "(cached) " >&6 ++fi ++eval ac_res=\$$3 ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 ++$as_echo "$ac_res" >&6; } ++else ++ # Is the header compilable? ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5 ++$as_echo_n "checking $2 usability... " >&6; } ++cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++$4 ++#include <$2> ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ ac_header_compiler=yes ++else ++ ac_header_compiler=no ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5 ++$as_echo "$ac_header_compiler" >&6; } ++ ++# Is the header present? ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5 ++$as_echo_n "checking $2 presence... " >&6; } ++cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++#include <$2> ++_ACEOF ++if ac_fn_c_try_cpp "$LINENO"; then : ++ ac_header_preproc=yes ++else ++ ac_header_preproc=no ++fi ++rm -f conftest.err conftest.$ac_ext ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5 ++$as_echo "$ac_header_preproc" >&6; } ++ ++# So? What about this header? ++case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in #(( ++ yes:no: ) ++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5 ++$as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;} ++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 ++$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} ++ ;; ++ no:yes:* ) ++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5 ++$as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;} ++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: check for missing prerequisite headers?" >&5 ++$as_echo "$as_me: WARNING: $2: check for missing prerequisite headers?" >&2;} ++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5 ++$as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;} ++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&5 ++$as_echo "$as_me: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&2;} ++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 ++$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} ++ ;; ++esac ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 ++$as_echo_n "checking for $2... " >&6; } ++if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then : ++ $as_echo_n "(cached) " >&6 ++else ++ eval "$3=\$ac_header_compiler" ++fi ++eval ac_res=\$$3 ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 ++$as_echo "$ac_res" >&6; } ++fi ++ eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} ++ ++} # ac_fn_c_check_header_mongrel ++ ++# ac_fn_c_try_run LINENO ++# ---------------------- ++# Try to link conftest.$ac_ext, and return whether this succeeded. Assumes ++# that executables *can* be run. ++ac_fn_c_try_run () ++{ ++ as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack ++ if { { ac_try="$ac_link" ++case "(($ac_try" in ++ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; ++ *) ac_try_echo=$ac_try;; ++esac ++eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" ++$as_echo "$ac_try_echo"; } >&5 ++ (eval "$ac_link") 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' ++ { { case "(($ac_try" in ++ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; ++ *) ac_try_echo=$ac_try;; ++esac ++eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" ++$as_echo "$ac_try_echo"; } >&5 ++ (eval "$ac_try") 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; }; }; then : ++ ac_retval=0 ++else ++ $as_echo "$as_me: program exited with status $ac_status" >&5 ++ $as_echo "$as_me: failed program was:" >&5 ++sed 's/^/| /' conftest.$ac_ext >&5 ++ ++ ac_retval=$ac_status ++fi ++ rm -rf conftest.dSYM conftest_ipa8_conftest.oo ++ eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} ++ return $ac_retval ++ ++} # ac_fn_c_try_run ++ ++# ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES ++# ------------------------------------------------------- ++# Tests whether HEADER exists and can be compiled using the include files in ++# INCLUDES, setting the cache variable VAR accordingly. ++ac_fn_c_check_header_compile () ++{ ++ as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 ++$as_echo_n "checking for $2... " >&6; } ++if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++$4 ++#include <$2> ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ eval "$3=yes" ++else ++ eval "$3=no" ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++fi ++eval ac_res=\$$3 ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 ++$as_echo "$ac_res" >&6; } ++ eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} ++ ++} # ac_fn_c_check_header_compile ++ ++# ac_fn_c_try_link LINENO ++# ----------------------- ++# Try to link conftest.$ac_ext, and return whether this succeeded. ++ac_fn_c_try_link () ++{ ++ as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack ++ rm -f conftest.$ac_objext conftest$ac_exeext ++ if { { ac_try="$ac_link" ++case "(($ac_try" in ++ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; ++ *) ac_try_echo=$ac_try;; ++esac ++eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" ++$as_echo "$ac_try_echo"; } >&5 ++ (eval "$ac_link") 2>conftest.err ++ ac_status=$? ++ if test -s conftest.err; then ++ grep -v '^ *+' conftest.err >conftest.er1 ++ cat conftest.er1 >&5 ++ mv -f conftest.er1 conftest.err ++ fi ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } && { ++ test -z "$ac_c_werror_flag" || ++ test ! -s conftest.err ++ } && test -s conftest$ac_exeext && { ++ test "$cross_compiling" = yes || ++ $as_test_x conftest$ac_exeext ++ }; then : ++ ac_retval=0 ++else ++ $as_echo "$as_me: failed program was:" >&5 ++sed 's/^/| /' conftest.$ac_ext >&5 ++ ++ ac_retval=1 ++fi ++ # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information ++ # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would ++ # interfere with the next link command; also delete a directory that is ++ # left behind by Apple's compiler. We do this before executing the actions. ++ rm -rf conftest.dSYM conftest_ipa8_conftest.oo ++ eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} ++ return $ac_retval ++ ++} # ac_fn_c_try_link ++ ++# ac_fn_c_check_func LINENO FUNC VAR ++# ---------------------------------- ++# Tests whether FUNC exists, setting the cache variable VAR accordingly ++ac_fn_c_check_func () ++{ ++ as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 ++$as_echo_n "checking for $2... " >&6; } ++if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++/* Define $2 to an innocuous variant, in case declares $2. ++ For example, HP-UX 11i declares gettimeofday. */ ++#define $2 innocuous_$2 ++ ++/* System header to define __stub macros and hopefully few prototypes, ++ which can conflict with char $2 (); below. ++ Prefer to if __STDC__ is defined, since ++ exists even on freestanding compilers. */ ++ ++#ifdef __STDC__ ++# include ++#else ++# include ++#endif ++ ++#undef $2 ++ ++/* Override any GCC internal prototype to avoid an error. ++ Use char because int might match the return type of a GCC ++ builtin and then its argument prototype would still apply. */ ++#ifdef __cplusplus ++extern "C" ++#endif ++char $2 (); ++/* The GNU C library defines this for functions which it implements ++ to always fail with ENOSYS. Some functions are actually named ++ something starting with __ and the normal name is an alias. */ ++#if defined __stub_$2 || defined __stub___$2 ++choke me ++#endif ++ ++int ++main () ++{ ++return $2 (); ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_link "$LINENO"; then : ++ eval "$3=yes" ++else ++ eval "$3=no" ++fi ++rm -f core conftest.err conftest.$ac_objext \ ++ conftest$ac_exeext conftest.$ac_ext ++fi ++eval ac_res=\$$3 ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 ++$as_echo "$ac_res" >&6; } ++ eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} ++ ++} # ac_fn_c_check_func ++ ++# ac_fn_c_check_type LINENO TYPE VAR INCLUDES ++# ------------------------------------------- ++# Tests whether TYPE exists after having included INCLUDES, setting cache ++# variable VAR accordingly. ++ac_fn_c_check_type () ++{ ++ as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 ++$as_echo_n "checking for $2... " >&6; } ++if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then : ++ $as_echo_n "(cached) " >&6 ++else ++ eval "$3=no" ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++$4 ++int ++main () ++{ ++if (sizeof ($2)) ++ return 0; ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++$4 ++int ++main () ++{ ++if (sizeof (($2))) ++ return 0; ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ ++else ++ eval "$3=yes" ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++fi ++eval ac_res=\$$3 ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 ++$as_echo "$ac_res" >&6; } ++ eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} ++ ++} # ac_fn_c_check_type ++ ++# ac_fn_c_compute_int LINENO EXPR VAR INCLUDES ++# -------------------------------------------- ++# Tries to find the compile-time value of EXPR in a program that includes ++# INCLUDES, setting VAR accordingly. Returns whether the value could be ++# computed ++ac_fn_c_compute_int () ++{ ++ as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack ++ if test "$cross_compiling" = yes; then ++ # Depending upon the size, compute the lo and hi bounds. ++cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++$4 ++int ++main () ++{ ++static int test_array [1 - 2 * !(($2) >= 0)]; ++test_array [0] = 0 ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ ac_lo=0 ac_mid=0 ++ while :; do ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++$4 ++int ++main () ++{ ++static int test_array [1 - 2 * !(($2) <= $ac_mid)]; ++test_array [0] = 0 ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ ac_hi=$ac_mid; break ++else ++ as_fn_arith $ac_mid + 1 && ac_lo=$as_val ++ if test $ac_lo -le $ac_mid; then ++ ac_lo= ac_hi= ++ break ++ fi ++ as_fn_arith 2 '*' $ac_mid + 1 && ac_mid=$as_val ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++ done ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++$4 ++int ++main () ++{ ++static int test_array [1 - 2 * !(($2) < 0)]; ++test_array [0] = 0 ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ ac_hi=-1 ac_mid=-1 ++ while :; do ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++$4 ++int ++main () ++{ ++static int test_array [1 - 2 * !(($2) >= $ac_mid)]; ++test_array [0] = 0 ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ ac_lo=$ac_mid; break ++else ++ as_fn_arith '(' $ac_mid ')' - 1 && ac_hi=$as_val ++ if test $ac_mid -le $ac_hi; then ++ ac_lo= ac_hi= ++ break ++ fi ++ as_fn_arith 2 '*' $ac_mid && ac_mid=$as_val ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++ done ++else ++ ac_lo= ac_hi= ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++# Binary search between lo and hi bounds. ++while test "x$ac_lo" != "x$ac_hi"; do ++ as_fn_arith '(' $ac_hi - $ac_lo ')' / 2 + $ac_lo && ac_mid=$as_val ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++$4 ++int ++main () ++{ ++static int test_array [1 - 2 * !(($2) <= $ac_mid)]; ++test_array [0] = 0 ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ ac_hi=$ac_mid ++else ++ as_fn_arith '(' $ac_mid ')' + 1 && ac_lo=$as_val ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++done ++case $ac_lo in #(( ++?*) eval "$3=\$ac_lo"; ac_retval=0 ;; ++'') ac_retval=1 ;; ++esac ++ else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++$4 ++static long int longval () { return $2; } ++static unsigned long int ulongval () { return $2; } ++#include ++#include ++int ++main () ++{ ++ ++ FILE *f = fopen ("conftest.val", "w"); ++ if (! f) ++ return 1; ++ if (($2) < 0) ++ { ++ long int i = longval (); ++ if (i != ($2)) ++ return 1; ++ fprintf (f, "%ld", i); ++ } ++ else ++ { ++ unsigned long int i = ulongval (); ++ if (i != ($2)) ++ return 1; ++ fprintf (f, "%lu", i); ++ } ++ /* Do not output a trailing newline, as this causes \r\n confusion ++ on some platforms. */ ++ return ferror (f) || fclose (f) != 0; ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_run "$LINENO"; then : ++ echo >>conftest.val; read $3 &5 ++$as_echo_n "checking whether $as_decl_name is declared... " >&6; } ++if { as_var=$3; eval "test \"\${$as_var+set}\" = set"; }; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++$4 ++int ++main () ++{ ++#ifndef $as_decl_name ++#ifdef __cplusplus ++ (void) $as_decl_use; ++#else ++ (void) $as_decl_name; ++#endif ++#endif ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ eval "$3=yes" ++else ++ eval "$3=no" ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++fi ++eval ac_res=\$$3 ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 ++$as_echo "$ac_res" >&6; } ++ eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} ++ ++} # ac_fn_c_check_decl ++cat >config.log <<_ACEOF ++This file contains any messages produced by compilers while ++running configure, to aid debugging if configure makes a mistake. ++ ++It was created by package-unused $as_me version-unused, which was ++generated by GNU Autoconf 2.64. Invocation command line was ++ ++ $ $0 $@ ++ ++_ACEOF ++exec 5>>config.log ++{ ++cat <<_ASUNAME ++## --------- ## ++## Platform. ## ++## --------- ## ++ ++hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` ++uname -m = `(uname -m) 2>/dev/null || echo unknown` ++uname -r = `(uname -r) 2>/dev/null || echo unknown` ++uname -s = `(uname -s) 2>/dev/null || echo unknown` ++uname -v = `(uname -v) 2>/dev/null || echo unknown` ++ ++/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` ++/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` ++ ++/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` ++/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` ++/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` ++/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` ++/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` ++/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` ++/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` ++ ++_ASUNAME ++ ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ $as_echo "PATH: $as_dir" ++ done ++IFS=$as_save_IFS ++ ++} >&5 ++ ++cat >&5 <<_ACEOF ++ ++ ++## ----------- ## ++## Core tests. ## ++## ----------- ## ++ ++_ACEOF ++ ++ ++# Keep a trace of the command line. ++# Strip out --no-create and --no-recursion so they do not pile up. ++# Strip out --silent because we don't want to record it for future runs. ++# Also quote any args containing shell meta-characters. ++# Make two passes to allow for proper duplicate-argument suppression. ++ac_configure_args= ++ac_configure_args0= ++ac_configure_args1= ++ac_must_keep_next=false ++for ac_pass in 1 2 ++do ++ for ac_arg ++ do ++ case $ac_arg in ++ -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; ++ -q | -quiet | --quiet | --quie | --qui | --qu | --q \ ++ | -silent | --silent | --silen | --sile | --sil) ++ continue ;; ++ *\'*) ++ ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; ++ esac ++ case $ac_pass in ++ 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; ++ 2) ++ as_fn_append ac_configure_args1 " '$ac_arg'" ++ if test $ac_must_keep_next = true; then ++ ac_must_keep_next=false # Got value, back to normal. ++ else ++ case $ac_arg in ++ *=* | --config-cache | -C | -disable-* | --disable-* \ ++ | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ ++ | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ ++ | -with-* | --with-* | -without-* | --without-* | --x) ++ case "$ac_configure_args0 " in ++ "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; ++ esac ++ ;; ++ -* ) ac_must_keep_next=true ;; ++ esac ++ fi ++ as_fn_append ac_configure_args " '$ac_arg'" ++ ;; ++ esac ++ done ++done ++{ ac_configure_args0=; unset ac_configure_args0;} ++{ ac_configure_args1=; unset ac_configure_args1;} ++ ++# When interrupted or exit'd, cleanup temporary files, and complete ++# config.log. We remove comments because anyway the quotes in there ++# would cause problems or look ugly. ++# WARNING: Use '\'' to represent an apostrophe within the trap. ++# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. ++trap 'exit_status=$? ++ # Save into config.log some information that might help in debugging. ++ { ++ echo ++ ++ cat <<\_ASBOX ++## ---------------- ## ++## Cache variables. ## ++## ---------------- ## ++_ASBOX ++ echo ++ # The following way of writing the cache mishandles newlines in values, ++( ++ for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do ++ eval ac_val=\$$ac_var ++ case $ac_val in #( ++ *${as_nl}*) ++ case $ac_var in #( ++ *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 ++$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; ++ esac ++ case $ac_var in #( ++ _ | IFS | as_nl) ;; #( ++ BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( ++ *) { eval $ac_var=; unset $ac_var;} ;; ++ esac ;; ++ esac ++ done ++ (set) 2>&1 | ++ case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( ++ *${as_nl}ac_space=\ *) ++ sed -n \ ++ "s/'\''/'\''\\\\'\'''\''/g; ++ s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" ++ ;; #( ++ *) ++ sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" ++ ;; ++ esac | ++ sort ++) ++ echo ++ ++ cat <<\_ASBOX ++## ----------------- ## ++## Output variables. ## ++## ----------------- ## ++_ASBOX ++ echo ++ for ac_var in $ac_subst_vars ++ do ++ eval ac_val=\$$ac_var ++ case $ac_val in ++ *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; ++ esac ++ $as_echo "$ac_var='\''$ac_val'\''" ++ done | sort ++ echo ++ ++ if test -n "$ac_subst_files"; then ++ cat <<\_ASBOX ++## ------------------- ## ++## File substitutions. ## ++## ------------------- ## ++_ASBOX ++ echo ++ for ac_var in $ac_subst_files ++ do ++ eval ac_val=\$$ac_var ++ case $ac_val in ++ *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; ++ esac ++ $as_echo "$ac_var='\''$ac_val'\''" ++ done | sort ++ echo ++ fi ++ ++ if test -s confdefs.h; then ++ cat <<\_ASBOX ++## ----------- ## ++## confdefs.h. ## ++## ----------- ## ++_ASBOX ++ echo ++ cat confdefs.h ++ echo ++ fi ++ test "$ac_signal" != 0 && ++ $as_echo "$as_me: caught signal $ac_signal" ++ $as_echo "$as_me: exit $exit_status" ++ } >&5 ++ rm -f core *.core core.conftest.* && ++ rm -f -r conftest* confdefs* conf$$* $ac_clean_files && ++ exit $exit_status ++' 0 ++for ac_signal in 1 2 13 15; do ++ trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal ++done ++ac_signal=0 ++ ++# confdefs.h avoids OS command line length limits that DEFS can exceed. ++rm -f -r conftest* confdefs.h ++ ++$as_echo "/* confdefs.h */" > confdefs.h ++ ++# Predefined preprocessor variables. ++ ++cat >>confdefs.h <<_ACEOF ++#define PACKAGE_NAME "$PACKAGE_NAME" ++_ACEOF ++ ++cat >>confdefs.h <<_ACEOF ++#define PACKAGE_TARNAME "$PACKAGE_TARNAME" ++_ACEOF ++ ++cat >>confdefs.h <<_ACEOF ++#define PACKAGE_VERSION "$PACKAGE_VERSION" ++_ACEOF ++ ++cat >>confdefs.h <<_ACEOF ++#define PACKAGE_STRING "$PACKAGE_STRING" ++_ACEOF ++ ++cat >>confdefs.h <<_ACEOF ++#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" ++_ACEOF ++ ++cat >>confdefs.h <<_ACEOF ++#define PACKAGE_URL "$PACKAGE_URL" ++_ACEOF ++ ++ ++# Let the site file select an alternate cache file if it wants to. ++# Prefer an explicitly selected file to automatically selected ones. ++ac_site_file1=NONE ++ac_site_file2=NONE ++if test -n "$CONFIG_SITE"; then ++ ac_site_file1=$CONFIG_SITE ++elif test "x$prefix" != xNONE; then ++ ac_site_file1=$prefix/share/config.site ++ ac_site_file2=$prefix/etc/config.site ++else ++ ac_site_file1=$ac_default_prefix/share/config.site ++ ac_site_file2=$ac_default_prefix/etc/config.site ++fi ++for ac_site_file in "$ac_site_file1" "$ac_site_file2" ++do ++ test "x$ac_site_file" = xNONE && continue ++ if test -r "$ac_site_file"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 ++$as_echo "$as_me: loading site script $ac_site_file" >&6;} ++ sed 's/^/| /' "$ac_site_file" >&5 ++ . "$ac_site_file" ++ fi ++done ++ ++if test -r "$cache_file"; then ++ # Some versions of bash will fail to source /dev/null (special ++ # files actually), so we avoid doing that. ++ if test -f "$cache_file"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 ++$as_echo "$as_me: loading cache $cache_file" >&6;} ++ case $cache_file in ++ [\\/]* | ?:[\\/]* ) . "$cache_file";; ++ *) . "./$cache_file";; ++ esac ++ fi ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 ++$as_echo "$as_me: creating cache $cache_file" >&6;} ++ >$cache_file ++fi ++ ++# Check that the precious variables saved in the cache have kept the same ++# value. ++ac_cache_corrupted=false ++for ac_var in $ac_precious_vars; do ++ eval ac_old_set=\$ac_cv_env_${ac_var}_set ++ eval ac_new_set=\$ac_env_${ac_var}_set ++ eval ac_old_val=\$ac_cv_env_${ac_var}_value ++ eval ac_new_val=\$ac_env_${ac_var}_value ++ case $ac_old_set,$ac_new_set in ++ set,) ++ { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 ++$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} ++ ac_cache_corrupted=: ;; ++ ,set) ++ { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 ++$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} ++ ac_cache_corrupted=: ;; ++ ,);; ++ *) ++ if test "x$ac_old_val" != "x$ac_new_val"; then ++ # differences in whitespace do not lead to failure. ++ ac_old_val_w=`echo x $ac_old_val` ++ ac_new_val_w=`echo x $ac_new_val` ++ if test "$ac_old_val_w" != "$ac_new_val_w"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 ++$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} ++ ac_cache_corrupted=: ++ else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 ++$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} ++ eval $ac_var=\$ac_old_val ++ fi ++ { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 ++$as_echo "$as_me: former value: \`$ac_old_val'" >&2;} ++ { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 ++$as_echo "$as_me: current value: \`$ac_new_val'" >&2;} ++ fi;; ++ esac ++ # Pass precious variables to config.status. ++ if test "$ac_new_set" = set; then ++ case $ac_new_val in ++ *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; ++ *) ac_arg=$ac_var=$ac_new_val ;; ++ esac ++ case " $ac_configure_args " in ++ *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. ++ *) as_fn_append ac_configure_args " '$ac_arg'" ;; ++ esac ++ fi ++done ++if $ac_cache_corrupted; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 ++$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} ++ { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 ++$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;} ++ as_fn_error "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 ++fi ++## -------------------- ## ++## Main body of script. ## ++## -------------------- ## ++ ++ac_ext=c ++ac_cpp='$CPP $CPPFLAGS' ++ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ++ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ++ac_compiler_gnu=$ac_cv_c_compiler_gnu ++ ++ ++ ++ ++ ++ ++ac_config_headers="$ac_config_headers config.h" ++ ++ ++if test -n "${with_target_subdir}"; then ++ # Default to --enable-multilib ++# Check whether --enable-multilib was given. ++if test "${enable_multilib+set}" = set; then : ++ enableval=$enable_multilib; case "$enableval" in ++ yes) multilib=yes ;; ++ no) multilib=no ;; ++ *) as_fn_error "bad value $enableval for multilib option" "$LINENO" 5 ;; ++ esac ++else ++ multilib=yes ++fi ++ ++ ++# We may get other options which we leave undocumented: ++# --with-target-subdir, --with-multisrctop, --with-multisubdir ++# See config-ml.in if you want the gory details. ++ ++if test "$srcdir" = "."; then ++ if test "$with_target_subdir" != "."; then ++ multi_basedir="$srcdir/$with_multisrctop../.." ++ else ++ multi_basedir="$srcdir/$with_multisrctop.." ++ fi ++else ++ multi_basedir="$srcdir/.." ++fi ++ ++ ++# Even if the default multilib is not a cross compilation, ++# it may be that some of the other multilibs are. ++if test $cross_compiling = no && test $multilib = yes \ ++ && test "x${with_multisubdir}" != x ; then ++ cross_compiling=maybe ++fi ++ ++ac_config_commands="$ac_config_commands default-1" ++ ++fi ++ ++ac_aux_dir= ++for ac_dir in "$srcdir" "$srcdir/.." "$srcdir/../.."; do ++ for ac_t in install-sh install.sh shtool; do ++ if test -f "$ac_dir/$ac_t"; then ++ ac_aux_dir=$ac_dir ++ ac_install_sh="$ac_aux_dir/$ac_t -c" ++ break 2 ++ fi ++ done ++done ++if test -z "$ac_aux_dir"; then ++ as_fn_error "cannot find install-sh, install.sh, or shtool in \"$srcdir\" \"$srcdir/..\" \"$srcdir/../..\"" "$LINENO" 5 ++fi ++ ++# These three variables are undocumented and unsupported, ++# and are intended to be withdrawn in a future Autoconf release. ++# They can cause serious problems if a builder's source tree is in a directory ++# whose full name contains unusual characters. ++ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var. ++ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var. ++ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var. ++ ++ ++# Make sure we can run config.sub. ++$SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 || ++ as_fn_error "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5 ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5 ++$as_echo_n "checking build system type... " >&6; } ++if test "${ac_cv_build+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ ac_build_alias=$build_alias ++test "x$ac_build_alias" = x && ++ ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"` ++test "x$ac_build_alias" = x && ++ as_fn_error "cannot guess build type; you must specify one" "$LINENO" 5 ++ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` || ++ as_fn_error "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5 ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5 ++$as_echo "$ac_cv_build" >&6; } ++case $ac_cv_build in ++*-*-*) ;; ++*) as_fn_error "invalid value of canonical build" "$LINENO" 5;; ++esac ++build=$ac_cv_build ++ac_save_IFS=$IFS; IFS='-' ++set x $ac_cv_build ++shift ++build_cpu=$1 ++build_vendor=$2 ++shift; shift ++# Remember, the first character of IFS is used to create $*, ++# except with old shells: ++build_os=$* ++IFS=$ac_save_IFS ++case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5 ++$as_echo_n "checking host system type... " >&6; } ++if test "${ac_cv_host+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test "x$host_alias" = x; then ++ ac_cv_host=$ac_cv_build ++else ++ ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` || ++ as_fn_error "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5 ++fi ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5 ++$as_echo "$ac_cv_host" >&6; } ++case $ac_cv_host in ++*-*-*) ;; ++*) as_fn_error "invalid value of canonical host" "$LINENO" 5;; ++esac ++host=$ac_cv_host ++ac_save_IFS=$IFS; IFS='-' ++set x $ac_cv_host ++shift ++host_cpu=$1 ++host_vendor=$2 ++shift; shift ++# Remember, the first character of IFS is used to create $*, ++# except with old shells: ++host_os=$* ++IFS=$ac_save_IFS ++case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking target system type" >&5 ++$as_echo_n "checking target system type... " >&6; } ++if test "${ac_cv_target+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test "x$target_alias" = x; then ++ ac_cv_target=$ac_cv_host ++else ++ ac_cv_target=`$SHELL "$ac_aux_dir/config.sub" $target_alias` || ++ as_fn_error "$SHELL $ac_aux_dir/config.sub $target_alias failed" "$LINENO" 5 ++fi ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_target" >&5 ++$as_echo "$ac_cv_target" >&6; } ++case $ac_cv_target in ++*-*-*) ;; ++*) as_fn_error "invalid value of canonical target" "$LINENO" 5;; ++esac ++target=$ac_cv_target ++ac_save_IFS=$IFS; IFS='-' ++set x $ac_cv_target ++shift ++target_cpu=$1 ++target_vendor=$2 ++shift; shift ++# Remember, the first character of IFS is used to create $*, ++# except with old shells: ++target_os=$* ++IFS=$ac_save_IFS ++case $target_os in *\ *) target_os=`echo "$target_os" | sed 's/ /-/g'`;; esac ++ ++ ++# The aliases save the names the user supplied, while $host etc. ++# will get canonicalized. ++test -n "$target_alias" && ++ test "$program_prefix$program_suffix$program_transform_name" = \ ++ NONENONEs,x,x, && ++ program_prefix=${target_alias}- ++ ++target_alias=${target_alias-$host_alias} ++ ++ac_ext=c ++ac_cpp='$CPP $CPPFLAGS' ++ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ++ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ++ac_compiler_gnu=$ac_cv_c_compiler_gnu ++if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. ++set dummy ${ac_tool_prefix}gcc; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_CC+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$CC"; then ++ ac_cv_prog_CC="$CC" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_CC="${ac_tool_prefix}gcc" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++CC=$ac_cv_prog_CC ++if test -n "$CC"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 ++$as_echo "$CC" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_CC"; then ++ ac_ct_CC=$CC ++ # Extract the first word of "gcc", so it can be a program name with args. ++set dummy gcc; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_CC+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_CC"; then ++ ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_CC="gcc" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_CC=$ac_cv_prog_ac_ct_CC ++if test -n "$ac_ct_CC"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 ++$as_echo "$ac_ct_CC" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_CC" = x; then ++ CC="" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ CC=$ac_ct_CC ++ fi ++else ++ CC="$ac_cv_prog_CC" ++fi ++ ++if test -z "$CC"; then ++ if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. ++set dummy ${ac_tool_prefix}cc; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_CC+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$CC"; then ++ ac_cv_prog_CC="$CC" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_CC="${ac_tool_prefix}cc" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++CC=$ac_cv_prog_CC ++if test -n "$CC"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 ++$as_echo "$CC" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++ fi ++fi ++if test -z "$CC"; then ++ # Extract the first word of "cc", so it can be a program name with args. ++set dummy cc; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_CC+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$CC"; then ++ ac_cv_prog_CC="$CC" # Let the user override the test. ++else ++ ac_prog_rejected=no ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then ++ ac_prog_rejected=yes ++ continue ++ fi ++ ac_cv_prog_CC="cc" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++if test $ac_prog_rejected = yes; then ++ # We found a bogon in the path, so make sure we never use it. ++ set dummy $ac_cv_prog_CC ++ shift ++ if test $# != 0; then ++ # We chose a different compiler from the bogus one. ++ # However, it has the same basename, so the bogon will be chosen ++ # first if we set CC to just the basename; use the full file name. ++ shift ++ ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" ++ fi ++fi ++fi ++fi ++CC=$ac_cv_prog_CC ++if test -n "$CC"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 ++$as_echo "$CC" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$CC"; then ++ if test -n "$ac_tool_prefix"; then ++ for ac_prog in cl.exe ++ do ++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. ++set dummy $ac_tool_prefix$ac_prog; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_CC+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$CC"; then ++ ac_cv_prog_CC="$CC" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_CC="$ac_tool_prefix$ac_prog" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++CC=$ac_cv_prog_CC ++if test -n "$CC"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 ++$as_echo "$CC" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++ test -n "$CC" && break ++ done ++fi ++if test -z "$CC"; then ++ ac_ct_CC=$CC ++ for ac_prog in cl.exe ++do ++ # Extract the first word of "$ac_prog", so it can be a program name with args. ++set dummy $ac_prog; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_CC+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_CC"; then ++ ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_CC="$ac_prog" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_CC=$ac_cv_prog_ac_ct_CC ++if test -n "$ac_ct_CC"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 ++$as_echo "$ac_ct_CC" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++ test -n "$ac_ct_CC" && break ++done ++ ++ if test "x$ac_ct_CC" = x; then ++ CC="" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ CC=$ac_ct_CC ++ fi ++fi ++ ++fi ++ ++ ++test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 ++$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} ++as_fn_error "no acceptable C compiler found in \$PATH ++See \`config.log' for more details." "$LINENO" 5; } ++ ++# Provide some information about the compiler. ++$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 ++set X $ac_compile ++ac_compiler=$2 ++for ac_option in --version -v -V -qversion; do ++ { { ac_try="$ac_compiler $ac_option >&5" ++case "(($ac_try" in ++ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; ++ *) ac_try_echo=$ac_try;; ++esac ++eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" ++$as_echo "$ac_try_echo"; } >&5 ++ (eval "$ac_compiler $ac_option >&5") 2>conftest.err ++ ac_status=$? ++ if test -s conftest.err; then ++ sed '10a\ ++... rest of stderr output deleted ... ++ 10q' conftest.err >conftest.er1 ++ cat conftest.er1 >&5 ++ rm -f conftest.er1 conftest.err ++ fi ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } ++done ++ ++cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++ac_clean_files_save=$ac_clean_files ++ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out conftest.out" ++# Try to create an executable without -o first, disregard a.out. ++# It will help us diagnose broken compilers, and finding out an intuition ++# of exeext. ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5 ++$as_echo_n "checking for C compiler default output file name... " >&6; } ++ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` ++ ++# The possible output files: ++ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*" ++ ++ac_rmfiles= ++for ac_file in $ac_files ++do ++ case $ac_file in ++ *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; ++ * ) ac_rmfiles="$ac_rmfiles $ac_file";; ++ esac ++done ++rm -f $ac_rmfiles ++ ++if { { ac_try="$ac_link_default" ++case "(($ac_try" in ++ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; ++ *) ac_try_echo=$ac_try;; ++esac ++eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" ++$as_echo "$ac_try_echo"; } >&5 ++ (eval "$ac_link_default") 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; }; then : ++ # Autoconf-2.13 could set the ac_cv_exeext variable to `no'. ++# So ignore a value of `no', otherwise this would lead to `EXEEXT = no' ++# in a Makefile. We should not override ac_cv_exeext if it was cached, ++# so that the user can short-circuit this test for compilers unknown to ++# Autoconf. ++for ac_file in $ac_files '' ++do ++ test -f "$ac_file" || continue ++ case $ac_file in ++ *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ++ ;; ++ [ab].out ) ++ # We found the default executable, but exeext='' is most ++ # certainly right. ++ break;; ++ *.* ) ++ if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no; ++ then :; else ++ ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` ++ fi ++ # We set ac_cv_exeext here because the later test for it is not ++ # safe: cross compilers may not add the suffix if given an `-o' ++ # argument, so we may need to know it at that point already. ++ # Even if this section looks crufty: it has the advantage of ++ # actually working. ++ break;; ++ * ) ++ break;; ++ esac ++done ++test "$ac_cv_exeext" = no && ac_cv_exeext= ++ ++else ++ ac_file='' ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5 ++$as_echo "$ac_file" >&6; } ++if test -z "$ac_file"; then : ++ $as_echo "$as_me: failed program was:" >&5 ++sed 's/^/| /' conftest.$ac_ext >&5 ++ ++{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 ++$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} ++{ as_fn_set_status 77 ++as_fn_error "C compiler cannot create executables ++See \`config.log' for more details." "$LINENO" 5; }; } ++fi ++ac_exeext=$ac_cv_exeext ++ ++# Check that the compiler produces executables we can run. If not, either ++# the compiler is broken, or we cross compile. ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5 ++$as_echo_n "checking whether the C compiler works... " >&6; } ++# If not cross compiling, check that we can run a simple program. ++if test "$cross_compiling" != yes; then ++ if { ac_try='./$ac_file' ++ { { case "(($ac_try" in ++ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; ++ *) ac_try_echo=$ac_try;; ++esac ++eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" ++$as_echo "$ac_try_echo"; } >&5 ++ (eval "$ac_try") 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; }; }; then ++ cross_compiling=no ++ else ++ if test "$cross_compiling" = maybe; then ++ cross_compiling=yes ++ else ++ { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 ++$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} ++as_fn_error "cannot run C compiled programs. ++If you meant to cross compile, use \`--host'. ++See \`config.log' for more details." "$LINENO" 5; } ++ fi ++ fi ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 ++$as_echo "yes" >&6; } ++ ++rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out conftest.out ++ac_clean_files=$ac_clean_files_save ++# Check that the compiler produces executables we can run. If not, either ++# the compiler is broken, or we cross compile. ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5 ++$as_echo_n "checking whether we are cross compiling... " >&6; } ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5 ++$as_echo "$cross_compiling" >&6; } ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5 ++$as_echo_n "checking for suffix of executables... " >&6; } ++if { { ac_try="$ac_link" ++case "(($ac_try" in ++ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; ++ *) ac_try_echo=$ac_try;; ++esac ++eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" ++$as_echo "$ac_try_echo"; } >&5 ++ (eval "$ac_link") 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; }; then : ++ # If both `conftest.exe' and `conftest' are `present' (well, observable) ++# catch `conftest.exe'. For instance with Cygwin, `ls conftest' will ++# work properly (i.e., refer to `conftest.exe'), while it won't with ++# `rm'. ++for ac_file in conftest.exe conftest conftest.*; do ++ test -f "$ac_file" || continue ++ case $ac_file in ++ *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; ++ *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` ++ break;; ++ * ) break;; ++ esac ++done ++else ++ { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 ++$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} ++as_fn_error "cannot compute suffix of executables: cannot compile and link ++See \`config.log' for more details." "$LINENO" 5; } ++fi ++rm -f conftest$ac_cv_exeext ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5 ++$as_echo "$ac_cv_exeext" >&6; } ++ ++rm -f conftest.$ac_ext ++EXEEXT=$ac_cv_exeext ++ac_exeext=$EXEEXT ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5 ++$as_echo_n "checking for suffix of object files... " >&6; } ++if test "${ac_cv_objext+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++rm -f conftest.o conftest.obj ++if { { ac_try="$ac_compile" ++case "(($ac_try" in ++ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; ++ *) ac_try_echo=$ac_try;; ++esac ++eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" ++$as_echo "$ac_try_echo"; } >&5 ++ (eval "$ac_compile") 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; }; then : ++ for ac_file in conftest.o conftest.obj conftest.*; do ++ test -f "$ac_file" || continue; ++ case $ac_file in ++ *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;; ++ *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` ++ break;; ++ esac ++done ++else ++ $as_echo "$as_me: failed program was:" >&5 ++sed 's/^/| /' conftest.$ac_ext >&5 ++ ++{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 ++$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} ++as_fn_error "cannot compute suffix of object files: cannot compile ++See \`config.log' for more details." "$LINENO" 5; } ++fi ++rm -f conftest.$ac_cv_objext conftest.$ac_ext ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5 ++$as_echo "$ac_cv_objext" >&6; } ++OBJEXT=$ac_cv_objext ++ac_objext=$OBJEXT ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 ++$as_echo_n "checking whether we are using the GNU C compiler... " >&6; } ++if test "${ac_cv_c_compiler_gnu+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++#ifndef __GNUC__ ++ choke me ++#endif ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ ac_compiler_gnu=yes ++else ++ ac_compiler_gnu=no ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++ac_cv_c_compiler_gnu=$ac_compiler_gnu ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 ++$as_echo "$ac_cv_c_compiler_gnu" >&6; } ++if test $ac_compiler_gnu = yes; then ++ GCC=yes ++else ++ GCC= ++fi ++ac_test_CFLAGS=${CFLAGS+set} ++ac_save_CFLAGS=$CFLAGS ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 ++$as_echo_n "checking whether $CC accepts -g... " >&6; } ++if test "${ac_cv_prog_cc_g+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ ac_save_c_werror_flag=$ac_c_werror_flag ++ ac_c_werror_flag=yes ++ ac_cv_prog_cc_g=no ++ CFLAGS="-g" ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ ac_cv_prog_cc_g=yes ++else ++ CFLAGS="" ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ ++else ++ ac_c_werror_flag=$ac_save_c_werror_flag ++ CFLAGS="-g" ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ ac_cv_prog_cc_g=yes ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++ ac_c_werror_flag=$ac_save_c_werror_flag ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 ++$as_echo "$ac_cv_prog_cc_g" >&6; } ++if test "$ac_test_CFLAGS" = set; then ++ CFLAGS=$ac_save_CFLAGS ++elif test $ac_cv_prog_cc_g = yes; then ++ if test "$GCC" = yes; then ++ CFLAGS="-g -O2" ++ else ++ CFLAGS="-g" ++ fi ++else ++ if test "$GCC" = yes; then ++ CFLAGS="-O2" ++ else ++ CFLAGS= ++ fi ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 ++$as_echo_n "checking for $CC option to accept ISO C89... " >&6; } ++if test "${ac_cv_prog_cc_c89+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ ac_cv_prog_cc_c89=no ++ac_save_CC=$CC ++cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++#include ++#include ++#include ++#include ++/* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ ++struct buf { int x; }; ++FILE * (*rcsopen) (struct buf *, struct stat *, int); ++static char *e (p, i) ++ char **p; ++ int i; ++{ ++ return p[i]; ++} ++static char *f (char * (*g) (char **, int), char **p, ...) ++{ ++ char *s; ++ va_list v; ++ va_start (v,p); ++ s = g (p, va_arg (v,int)); ++ va_end (v); ++ return s; ++} ++ ++/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has ++ function prototypes and stuff, but not '\xHH' hex character constants. ++ These don't provoke an error unfortunately, instead are silently treated ++ as 'x'. The following induces an error, until -std is added to get ++ proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an ++ array size at least. It's necessary to write '\x00'==0 to get something ++ that's true only with -std. */ ++int osf4_cc_array ['\x00' == 0 ? 1 : -1]; ++ ++/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters ++ inside strings and character constants. */ ++#define FOO(x) 'x' ++int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; ++ ++int test (int i, double x); ++struct s1 {int (*f) (int a);}; ++struct s2 {int (*f) (double a);}; ++int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); ++int argc; ++char **argv; ++int ++main () ++{ ++return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; ++ ; ++ return 0; ++} ++_ACEOF ++for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ ++ -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" ++do ++ CC="$ac_save_CC $ac_arg" ++ if ac_fn_c_try_compile "$LINENO"; then : ++ ac_cv_prog_cc_c89=$ac_arg ++fi ++rm -f core conftest.err conftest.$ac_objext ++ test "x$ac_cv_prog_cc_c89" != "xno" && break ++done ++rm -f conftest.$ac_ext ++CC=$ac_save_CC ++ ++fi ++# AC_CACHE_VAL ++case "x$ac_cv_prog_cc_c89" in ++ x) ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 ++$as_echo "none needed" >&6; } ;; ++ xno) ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 ++$as_echo "unsupported" >&6; } ;; ++ *) ++ CC="$CC $ac_cv_prog_cc_c89" ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 ++$as_echo "$ac_cv_prog_cc_c89" >&6; } ;; ++esac ++if test "x$ac_cv_prog_cc_c89" != xno; then : ++ ++fi ++ ++ac_ext=c ++ac_cpp='$CPP $CPPFLAGS' ++ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ++ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ++ac_compiler_gnu=$ac_cv_c_compiler_gnu ++ ++ ++ac_ext=c ++ac_cpp='$CPP $CPPFLAGS' ++ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ++ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ++ac_compiler_gnu=$ac_cv_c_compiler_gnu ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5 ++$as_echo_n "checking how to run the C preprocessor... " >&6; } ++# On Suns, sometimes $CPP names a directory. ++if test -n "$CPP" && test -d "$CPP"; then ++ CPP= ++fi ++if test -z "$CPP"; then ++ if test "${ac_cv_prog_CPP+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ # Double quotes because CPP needs to be expanded ++ for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp" ++ do ++ ac_preproc_ok=false ++for ac_c_preproc_warn_flag in '' yes ++do ++ # Use a header file that comes with gcc, so configuring glibc ++ # with a fresh cross-compiler works. ++ # Prefer to if __STDC__ is defined, since ++ # exists even on freestanding compilers. ++ # On the NeXT, cc -E runs the code through the compiler's parser, ++ # not just through cpp. "Syntax error" is here to catch this case. ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++#ifdef __STDC__ ++# include ++#else ++# include ++#endif ++ Syntax error ++_ACEOF ++if ac_fn_c_try_cpp "$LINENO"; then : ++ ++else ++ # Broken: fails on valid input. ++continue ++fi ++rm -f conftest.err conftest.$ac_ext ++ ++ # OK, works on sane cases. Now check whether nonexistent headers ++ # can be detected and how. ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++#include ++_ACEOF ++if ac_fn_c_try_cpp "$LINENO"; then : ++ # Broken: success on invalid input. ++continue ++else ++ # Passes both tests. ++ac_preproc_ok=: ++break ++fi ++rm -f conftest.err conftest.$ac_ext ++ ++done ++# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. ++rm -f conftest.err conftest.$ac_ext ++if $ac_preproc_ok; then : ++ break ++fi ++ ++ done ++ ac_cv_prog_CPP=$CPP ++ ++fi ++ CPP=$ac_cv_prog_CPP ++else ++ ac_cv_prog_CPP=$CPP ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5 ++$as_echo "$CPP" >&6; } ++ac_preproc_ok=false ++for ac_c_preproc_warn_flag in '' yes ++do ++ # Use a header file that comes with gcc, so configuring glibc ++ # with a fresh cross-compiler works. ++ # Prefer to if __STDC__ is defined, since ++ # exists even on freestanding compilers. ++ # On the NeXT, cc -E runs the code through the compiler's parser, ++ # not just through cpp. "Syntax error" is here to catch this case. ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++#ifdef __STDC__ ++# include ++#else ++# include ++#endif ++ Syntax error ++_ACEOF ++if ac_fn_c_try_cpp "$LINENO"; then : ++ ++else ++ # Broken: fails on valid input. ++continue ++fi ++rm -f conftest.err conftest.$ac_ext ++ ++ # OK, works on sane cases. Now check whether nonexistent headers ++ # can be detected and how. ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++#include ++_ACEOF ++if ac_fn_c_try_cpp "$LINENO"; then : ++ # Broken: success on invalid input. ++continue ++else ++ # Passes both tests. ++ac_preproc_ok=: ++break ++fi ++rm -f conftest.err conftest.$ac_ext ++ ++done ++# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. ++rm -f conftest.err conftest.$ac_ext ++if $ac_preproc_ok; then : ++ ++else ++ { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 ++$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} ++as_fn_error "C preprocessor \"$CPP\" fails sanity check ++See \`config.log' for more details." "$LINENO" 5; } ++fi ++ ++ac_ext=c ++ac_cpp='$CPP $CPPFLAGS' ++ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ++ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ++ac_compiler_gnu=$ac_cv_c_compiler_gnu ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5 ++$as_echo_n "checking for grep that handles long lines and -e... " >&6; } ++if test "${ac_cv_path_GREP+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -z "$GREP"; then ++ ac_path_GREP_found=false ++ # Loop through the user's path and test for each of PROGNAME-LIST ++ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_prog in grep ggrep; do ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" ++ { test -f "$ac_path_GREP" && $as_test_x "$ac_path_GREP"; } || continue ++# Check for GNU ac_path_GREP and select it if it is found. ++ # Check for GNU $ac_path_GREP ++case `"$ac_path_GREP" --version 2>&1` in ++*GNU*) ++ ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;; ++*) ++ ac_count=0 ++ $as_echo_n 0123456789 >"conftest.in" ++ while : ++ do ++ cat "conftest.in" "conftest.in" >"conftest.tmp" ++ mv "conftest.tmp" "conftest.in" ++ cp "conftest.in" "conftest.nl" ++ $as_echo 'GREP' >> "conftest.nl" ++ "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break ++ diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break ++ as_fn_arith $ac_count + 1 && ac_count=$as_val ++ if test $ac_count -gt ${ac_path_GREP_max-0}; then ++ # Best one so far, save it but keep looking for a better one ++ ac_cv_path_GREP="$ac_path_GREP" ++ ac_path_GREP_max=$ac_count ++ fi ++ # 10*(2^10) chars as input seems more than enough ++ test $ac_count -gt 10 && break ++ done ++ rm -f conftest.in conftest.tmp conftest.nl conftest.out;; ++esac ++ ++ $ac_path_GREP_found && break 3 ++ done ++ done ++ done ++IFS=$as_save_IFS ++ if test -z "$ac_cv_path_GREP"; then ++ as_fn_error "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 ++ fi ++else ++ ac_cv_path_GREP=$GREP ++fi ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5 ++$as_echo "$ac_cv_path_GREP" >&6; } ++ GREP="$ac_cv_path_GREP" ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5 ++$as_echo_n "checking for egrep... " >&6; } ++if test "${ac_cv_path_EGREP+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if echo a | $GREP -E '(a|b)' >/dev/null 2>&1 ++ then ac_cv_path_EGREP="$GREP -E" ++ else ++ if test -z "$EGREP"; then ++ ac_path_EGREP_found=false ++ # Loop through the user's path and test for each of PROGNAME-LIST ++ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_prog in egrep; do ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" ++ { test -f "$ac_path_EGREP" && $as_test_x "$ac_path_EGREP"; } || continue ++# Check for GNU ac_path_EGREP and select it if it is found. ++ # Check for GNU $ac_path_EGREP ++case `"$ac_path_EGREP" --version 2>&1` in ++*GNU*) ++ ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;; ++*) ++ ac_count=0 ++ $as_echo_n 0123456789 >"conftest.in" ++ while : ++ do ++ cat "conftest.in" "conftest.in" >"conftest.tmp" ++ mv "conftest.tmp" "conftest.in" ++ cp "conftest.in" "conftest.nl" ++ $as_echo 'EGREP' >> "conftest.nl" ++ "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break ++ diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break ++ as_fn_arith $ac_count + 1 && ac_count=$as_val ++ if test $ac_count -gt ${ac_path_EGREP_max-0}; then ++ # Best one so far, save it but keep looking for a better one ++ ac_cv_path_EGREP="$ac_path_EGREP" ++ ac_path_EGREP_max=$ac_count ++ fi ++ # 10*(2^10) chars as input seems more than enough ++ test $ac_count -gt 10 && break ++ done ++ rm -f conftest.in conftest.tmp conftest.nl conftest.out;; ++esac ++ ++ $ac_path_EGREP_found && break 3 ++ done ++ done ++ done ++IFS=$as_save_IFS ++ if test -z "$ac_cv_path_EGREP"; then ++ as_fn_error "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 ++ fi ++else ++ ac_cv_path_EGREP=$EGREP ++fi ++ ++ fi ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5 ++$as_echo "$ac_cv_path_EGREP" >&6; } ++ EGREP="$ac_cv_path_EGREP" ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5 ++$as_echo_n "checking for ANSI C header files... " >&6; } ++if test "${ac_cv_header_stdc+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++#include ++#include ++#include ++#include ++ ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ ac_cv_header_stdc=yes ++else ++ ac_cv_header_stdc=no ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++ ++if test $ac_cv_header_stdc = yes; then ++ # SunOS 4.x string.h does not declare mem*, contrary to ANSI. ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++#include ++ ++_ACEOF ++if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | ++ $EGREP "memchr" >/dev/null 2>&1; then : ++ ++else ++ ac_cv_header_stdc=no ++fi ++rm -f conftest* ++ ++fi ++ ++if test $ac_cv_header_stdc = yes; then ++ # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++#include ++ ++_ACEOF ++if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | ++ $EGREP "free" >/dev/null 2>&1; then : ++ ++else ++ ac_cv_header_stdc=no ++fi ++rm -f conftest* ++ ++fi ++ ++if test $ac_cv_header_stdc = yes; then ++ # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. ++ if test "$cross_compiling" = yes; then : ++ : ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++#include ++#include ++#if ((' ' & 0x0FF) == 0x020) ++# define ISLOWER(c) ('a' <= (c) && (c) <= 'z') ++# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) ++#else ++# define ISLOWER(c) \ ++ (('a' <= (c) && (c) <= 'i') \ ++ || ('j' <= (c) && (c) <= 'r') \ ++ || ('s' <= (c) && (c) <= 'z')) ++# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) ++#endif ++ ++#define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) ++int ++main () ++{ ++ int i; ++ for (i = 0; i < 256; i++) ++ if (XOR (islower (i), ISLOWER (i)) ++ || toupper (i) != TOUPPER (i)) ++ return 2; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_run "$LINENO"; then : ++ ++else ++ ac_cv_header_stdc=no ++fi ++rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ ++ conftest.$ac_objext conftest.beam conftest.$ac_ext ++fi ++ ++fi ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5 ++$as_echo "$ac_cv_header_stdc" >&6; } ++if test $ac_cv_header_stdc = yes; then ++ ++$as_echo "#define STDC_HEADERS 1" >>confdefs.h ++ ++fi ++ ++# On IRIX 5.3, sys/types and inttypes.h are conflicting. ++for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \ ++ inttypes.h stdint.h unistd.h ++do : ++ as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ++ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default ++" ++eval as_val=\$$as_ac_Header ++ if test "x$as_val" = x""yes; then : ++ cat >>confdefs.h <<_ACEOF ++#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 ++_ACEOF ++ ++fi ++ ++done ++ ++ ++ ++ ac_fn_c_check_header_mongrel "$LINENO" "minix/config.h" "ac_cv_header_minix_config_h" "$ac_includes_default" ++if test "x$ac_cv_header_minix_config_h" = x""yes; then : ++ MINIX=yes ++else ++ MINIX= ++fi ++ ++ ++ if test "$MINIX" = yes; then ++ ++$as_echo "#define _POSIX_SOURCE 1" >>confdefs.h ++ ++ ++$as_echo "#define _POSIX_1_SOURCE 2" >>confdefs.h ++ ++ ++$as_echo "#define _MINIX 1" >>confdefs.h ++ ++ fi ++ ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether it is safe to define __EXTENSIONS__" >&5 ++$as_echo_n "checking whether it is safe to define __EXTENSIONS__... " >&6; } ++if test "${ac_cv_safe_to_define___extensions__+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++# define __EXTENSIONS__ 1 ++ $ac_includes_default ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ ac_cv_safe_to_define___extensions__=yes ++else ++ ac_cv_safe_to_define___extensions__=no ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_safe_to_define___extensions__" >&5 ++$as_echo "$ac_cv_safe_to_define___extensions__" >&6; } ++ test $ac_cv_safe_to_define___extensions__ = yes && ++ $as_echo "#define __EXTENSIONS__ 1" >>confdefs.h ++ ++ $as_echo "#define _ALL_SOURCE 1" >>confdefs.h ++ ++ $as_echo "#define _GNU_SOURCE 1" >>confdefs.h ++ ++ $as_echo "#define _POSIX_PTHREAD_SEMANTICS 1" >>confdefs.h ++ ++ $as_echo "#define _TANDEM_SOURCE 1" >>confdefs.h ++ ++ ++ ++libtool_VERSION=1:0:0 ++ ++ ++# 1.11.1: Require that version of automake. ++# foreign: Don't require README, INSTALL, NEWS, etc. ++# no-define: Don't define PACKAGE and VERSION. ++# no-dependencies: Don't generate automatic dependencies. ++# (because it breaks when using bootstrap-lean, since some of the ++# headers are gone at "make install" time). ++# -Wall: Issue all automake warnings. ++# -Wno-portability: Don't warn about constructs supported by GNU make. ++# (because GCC requires GNU make anyhow). ++am__api_version='1.11' ++ ++# Find a good install program. We prefer a C program (faster), ++# so one script is as good as another. But avoid the broken or ++# incompatible versions: ++# SysV /etc/install, /usr/sbin/install ++# SunOS /usr/etc/install ++# IRIX /sbin/install ++# AIX /bin/install ++# AmigaOS /C/install, which installs bootblocks on floppy discs ++# AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag ++# AFS /usr/afsws/bin/install, which mishandles nonexistent args ++# SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" ++# OS/2's system install, which has a completely different semantic ++# ./install, which can be erroneously created by make from ./install.sh. ++# Reject install programs that cannot install multiple files. ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5 ++$as_echo_n "checking for a BSD-compatible install... " >&6; } ++if test -z "$INSTALL"; then ++if test "${ac_cv_path_install+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ # Account for people who put trailing slashes in PATH elements. ++case $as_dir/ in #(( ++ ./ | .// | /[cC]/* | \ ++ /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ ++ ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \ ++ /usr/ucb/* ) ;; ++ *) ++ # OSF1 and SCO ODT 3.0 have their own names for install. ++ # Don't use installbsd from OSF since it installs stuff as root ++ # by default. ++ for ac_prog in ginstall scoinst install; do ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; }; then ++ if test $ac_prog = install && ++ grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then ++ # AIX install. It has an incompatible calling convention. ++ : ++ elif test $ac_prog = install && ++ grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then ++ # program-specific install script used by HP pwplus--don't use. ++ : ++ else ++ rm -rf conftest.one conftest.two conftest.dir ++ echo one > conftest.one ++ echo two > conftest.two ++ mkdir conftest.dir ++ if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" && ++ test -s conftest.one && test -s conftest.two && ++ test -s conftest.dir/conftest.one && ++ test -s conftest.dir/conftest.two ++ then ++ ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" ++ break 3 ++ fi ++ fi ++ fi ++ done ++ done ++ ;; ++esac ++ ++ done ++IFS=$as_save_IFS ++ ++rm -rf conftest.one conftest.two conftest.dir ++ ++fi ++ if test "${ac_cv_path_install+set}" = set; then ++ INSTALL=$ac_cv_path_install ++ else ++ # As a last resort, use the slow shell script. Don't cache a ++ # value for INSTALL within a source directory, because that will ++ # break other packages using the cache if that directory is ++ # removed, or if the value is a relative name. ++ INSTALL=$ac_install_sh ++ fi ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5 ++$as_echo "$INSTALL" >&6; } ++ ++# Use test -z because SunOS4 sh mishandles braces in ${var-val}. ++# It thinks the first close brace ends the variable substitution. ++test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' ++ ++test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' ++ ++test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5 ++$as_echo_n "checking whether build environment is sane... " >&6; } ++# Just in case ++sleep 1 ++echo timestamp > conftest.file ++# Reject unsafe characters in $srcdir or the absolute working directory ++# name. Accept space and tab only in the latter. ++am_lf=' ++' ++case `pwd` in ++ *[\\\"\#\$\&\'\`$am_lf]*) ++ as_fn_error "unsafe absolute working directory name" "$LINENO" 5;; ++esac ++case $srcdir in ++ *[\\\"\#\$\&\'\`$am_lf\ \ ]*) ++ as_fn_error "unsafe srcdir value: \`$srcdir'" "$LINENO" 5;; ++esac ++ ++# Do `set' in a subshell so we don't clobber the current shell's ++# arguments. Must try -L first in case configure is actually a ++# symlink; some systems play weird games with the mod time of symlinks ++# (eg FreeBSD returns the mod time of the symlink's containing ++# directory). ++if ( ++ set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` ++ if test "$*" = "X"; then ++ # -L didn't work. ++ set X `ls -t "$srcdir/configure" conftest.file` ++ fi ++ rm -f conftest.file ++ if test "$*" != "X $srcdir/configure conftest.file" \ ++ && test "$*" != "X conftest.file $srcdir/configure"; then ++ ++ # If neither matched, then we have a broken ls. This can happen ++ # if, for instance, CONFIG_SHELL is bash and it inherits a ++ # broken ls alias from the environment. This has actually ++ # happened. Such a system could not be considered "sane". ++ as_fn_error "ls -t appears to fail. Make sure there is not a broken ++alias in your environment" "$LINENO" 5 ++ fi ++ ++ test "$2" = conftest.file ++ ) ++then ++ # Ok. ++ : ++else ++ as_fn_error "newly created file is older than distributed files! ++Check your system clock" "$LINENO" 5 ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 ++$as_echo "yes" >&6; } ++test "$program_prefix" != NONE && ++ program_transform_name="s&^&$program_prefix&;$program_transform_name" ++# Use a double $ so make ignores it. ++test "$program_suffix" != NONE && ++ program_transform_name="s&\$&$program_suffix&;$program_transform_name" ++# Double any \ or $. ++# By default was `s,x,x', remove it if useless. ++ac_script='s/[\\$]/&&/g;s/;s,x,x,$//' ++program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"` ++ ++# expand $ac_aux_dir to an absolute path ++am_aux_dir=`cd $ac_aux_dir && pwd` ++ ++if test x"${MISSING+set}" != xset; then ++ case $am_aux_dir in ++ *\ * | *\ *) ++ MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; ++ *) ++ MISSING="\${SHELL} $am_aux_dir/missing" ;; ++ esac ++fi ++# Use eval to expand $SHELL ++if eval "$MISSING --run true"; then ++ am_missing_run="$MISSING --run " ++else ++ am_missing_run= ++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`missing' script is too old or missing" >&5 ++$as_echo "$as_me: WARNING: \`missing' script is too old or missing" >&2;} ++fi ++ ++if test x"${install_sh}" != xset; then ++ case $am_aux_dir in ++ *\ * | *\ *) ++ install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; ++ *) ++ install_sh="\${SHELL} $am_aux_dir/install-sh" ++ esac ++fi ++ ++# Installed binaries are usually stripped using `strip' when the user ++# run `make install-strip'. However `strip' might not be the right ++# tool to use in cross-compilation environments, therefore Automake ++# will honor the `STRIP' environment variable to overrule this program. ++if test "$cross_compiling" != no; then ++ if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. ++set dummy ${ac_tool_prefix}strip; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_STRIP+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$STRIP"; then ++ ac_cv_prog_STRIP="$STRIP" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_STRIP="${ac_tool_prefix}strip" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++STRIP=$ac_cv_prog_STRIP ++if test -n "$STRIP"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 ++$as_echo "$STRIP" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_STRIP"; then ++ ac_ct_STRIP=$STRIP ++ # Extract the first word of "strip", so it can be a program name with args. ++set dummy strip; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_STRIP+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_STRIP"; then ++ ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_STRIP="strip" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP ++if test -n "$ac_ct_STRIP"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 ++$as_echo "$ac_ct_STRIP" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_STRIP" = x; then ++ STRIP=":" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ STRIP=$ac_ct_STRIP ++ fi ++else ++ STRIP="$ac_cv_prog_STRIP" ++fi ++ ++fi ++INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5 ++$as_echo_n "checking for a thread-safe mkdir -p... " >&6; } ++if test -z "$MKDIR_P"; then ++ if test "${ac_cv_path_mkdir+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_prog in mkdir gmkdir; do ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; } || continue ++ case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #( ++ 'mkdir (GNU coreutils) '* | \ ++ 'mkdir (coreutils) '* | \ ++ 'mkdir (fileutils) '4.1*) ++ ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext ++ break 3;; ++ esac ++ done ++ done ++ done ++IFS=$as_save_IFS ++ ++fi ++ ++ if test "${ac_cv_path_mkdir+set}" = set; then ++ MKDIR_P="$ac_cv_path_mkdir -p" ++ else ++ # As a last resort, use the slow shell script. Don't cache a ++ # value for MKDIR_P within a source directory, because that will ++ # break other packages using the cache if that directory is ++ # removed, or if the value is a relative name. ++ test -d ./--version && rmdir ./--version ++ MKDIR_P="$ac_install_sh -d" ++ fi ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5 ++$as_echo "$MKDIR_P" >&6; } ++ ++mkdir_p="$MKDIR_P" ++case $mkdir_p in ++ [\\/$]* | ?:[\\/]*) ;; ++ */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;; ++esac ++ ++for ac_prog in gawk mawk nawk awk ++do ++ # Extract the first word of "$ac_prog", so it can be a program name with args. ++set dummy $ac_prog; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_AWK+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$AWK"; then ++ ac_cv_prog_AWK="$AWK" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_AWK="$ac_prog" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++AWK=$ac_cv_prog_AWK ++if test -n "$AWK"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5 ++$as_echo "$AWK" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++ test -n "$AWK" && break ++done ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5 ++$as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; } ++set x ${MAKE-make} ++ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` ++if { as_var=ac_cv_prog_make_${ac_make}_set; eval "test \"\${$as_var+set}\" = set"; }; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat >conftest.make <<\_ACEOF ++SHELL = /bin/sh ++all: ++ @echo '@@@%%%=$(MAKE)=@@@%%%' ++_ACEOF ++# GNU make sometimes prints "make[1]: Entering...", which would confuse us. ++case `${MAKE-make} -f conftest.make 2>/dev/null` in ++ *@@@%%%=?*=@@@%%%*) ++ eval ac_cv_prog_make_${ac_make}_set=yes;; ++ *) ++ eval ac_cv_prog_make_${ac_make}_set=no;; ++esac ++rm -f conftest.make ++fi ++if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 ++$as_echo "yes" >&6; } ++ SET_MAKE= ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++ SET_MAKE="MAKE=${MAKE-make}" ++fi ++ ++rm -rf .tst 2>/dev/null ++mkdir .tst 2>/dev/null ++if test -d .tst; then ++ am__leading_dot=. ++else ++ am__leading_dot=_ ++fi ++rmdir .tst 2>/dev/null ++ ++if test "`cd $srcdir && pwd`" != "`pwd`"; then ++ # Use -I$(srcdir) only when $(srcdir) != ., so that make's output ++ # is not polluted with repeated "-I." ++ am__isrc=' -I$(srcdir)' ++ # test to see if srcdir already configured ++ if test -f $srcdir/config.status; then ++ as_fn_error "source directory already configured; run \"make distclean\" there first" "$LINENO" 5 ++ fi ++fi ++ ++# test whether we have cygpath ++if test -z "$CYGPATH_W"; then ++ if (cygpath --version) >/dev/null 2>/dev/null; then ++ CYGPATH_W='cygpath -w' ++ else ++ CYGPATH_W=echo ++ fi ++fi ++ ++ ++# Define the identity of the package. ++ PACKAGE='libbacktrace' ++ VERSION='version-unused' ++ ++ ++# Some tools Automake needs. ++ ++ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"} ++ ++ ++AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"} ++ ++ ++AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"} ++ ++ ++AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"} ++ ++ ++MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"} ++ ++# We need awk for the "check" target. The system "awk" is bad on ++# some platforms. ++# Always define AMTAR for backward compatibility. Yes, it's still used ++# in the wild :-( We should find a proper way to deprecate it ... ++AMTAR='$${TAR-tar}' ++ ++am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -' ++ ++ ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to enable maintainer-specific portions of Makefiles" >&5 ++$as_echo_n "checking whether to enable maintainer-specific portions of Makefiles... " >&6; } ++ # Check whether --enable-maintainer-mode was given. ++if test "${enable_maintainer_mode+set}" = set; then : ++ enableval=$enable_maintainer_mode; USE_MAINTAINER_MODE=$enableval ++else ++ USE_MAINTAINER_MODE=no ++fi ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $USE_MAINTAINER_MODE" >&5 ++$as_echo "$USE_MAINTAINER_MODE" >&6; } ++ if test $USE_MAINTAINER_MODE = yes; then ++ MAINTAINER_MODE_TRUE= ++ MAINTAINER_MODE_FALSE='#' ++else ++ MAINTAINER_MODE_TRUE='#' ++ MAINTAINER_MODE_FALSE= ++fi ++ ++ MAINT=$MAINTAINER_MODE_TRUE ++ ++ ++ ++ ++# Check whether --with-target-subdir was given. ++if test "${with_target_subdir+set}" = set; then : ++ withval=$with_target_subdir; ++fi ++ ++ ++# We must force CC to /not/ be precious variables; otherwise ++# the wrong, non-multilib-adjusted value will be used in multilibs. ++# As a side effect, we have to subst CFLAGS ourselves. ++ ++ ++ac_ext=c ++ac_cpp='$CPP $CPPFLAGS' ++ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ++ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ++ac_compiler_gnu=$ac_cv_c_compiler_gnu ++if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. ++set dummy ${ac_tool_prefix}gcc; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_CC+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$CC"; then ++ ac_cv_prog_CC="$CC" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_CC="${ac_tool_prefix}gcc" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++CC=$ac_cv_prog_CC ++if test -n "$CC"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 ++$as_echo "$CC" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_CC"; then ++ ac_ct_CC=$CC ++ # Extract the first word of "gcc", so it can be a program name with args. ++set dummy gcc; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_CC+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_CC"; then ++ ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_CC="gcc" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_CC=$ac_cv_prog_ac_ct_CC ++if test -n "$ac_ct_CC"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 ++$as_echo "$ac_ct_CC" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_CC" = x; then ++ CC="" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ CC=$ac_ct_CC ++ fi ++else ++ CC="$ac_cv_prog_CC" ++fi ++ ++if test -z "$CC"; then ++ if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. ++set dummy ${ac_tool_prefix}cc; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_CC+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$CC"; then ++ ac_cv_prog_CC="$CC" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_CC="${ac_tool_prefix}cc" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++CC=$ac_cv_prog_CC ++if test -n "$CC"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 ++$as_echo "$CC" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++ fi ++fi ++if test -z "$CC"; then ++ # Extract the first word of "cc", so it can be a program name with args. ++set dummy cc; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_CC+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$CC"; then ++ ac_cv_prog_CC="$CC" # Let the user override the test. ++else ++ ac_prog_rejected=no ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then ++ ac_prog_rejected=yes ++ continue ++ fi ++ ac_cv_prog_CC="cc" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++if test $ac_prog_rejected = yes; then ++ # We found a bogon in the path, so make sure we never use it. ++ set dummy $ac_cv_prog_CC ++ shift ++ if test $# != 0; then ++ # We chose a different compiler from the bogus one. ++ # However, it has the same basename, so the bogon will be chosen ++ # first if we set CC to just the basename; use the full file name. ++ shift ++ ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" ++ fi ++fi ++fi ++fi ++CC=$ac_cv_prog_CC ++if test -n "$CC"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 ++$as_echo "$CC" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$CC"; then ++ if test -n "$ac_tool_prefix"; then ++ for ac_prog in cl.exe ++ do ++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. ++set dummy $ac_tool_prefix$ac_prog; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_CC+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$CC"; then ++ ac_cv_prog_CC="$CC" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_CC="$ac_tool_prefix$ac_prog" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++CC=$ac_cv_prog_CC ++if test -n "$CC"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 ++$as_echo "$CC" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++ test -n "$CC" && break ++ done ++fi ++if test -z "$CC"; then ++ ac_ct_CC=$CC ++ for ac_prog in cl.exe ++do ++ # Extract the first word of "$ac_prog", so it can be a program name with args. ++set dummy $ac_prog; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_CC+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_CC"; then ++ ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_CC="$ac_prog" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_CC=$ac_cv_prog_ac_ct_CC ++if test -n "$ac_ct_CC"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 ++$as_echo "$ac_ct_CC" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++ test -n "$ac_ct_CC" && break ++done ++ ++ if test "x$ac_ct_CC" = x; then ++ CC="" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ CC=$ac_ct_CC ++ fi ++fi ++ ++fi ++ ++ ++test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 ++$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} ++as_fn_error "no acceptable C compiler found in \$PATH ++See \`config.log' for more details." "$LINENO" 5; } ++ ++# Provide some information about the compiler. ++$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 ++set X $ac_compile ++ac_compiler=$2 ++for ac_option in --version -v -V -qversion; do ++ { { ac_try="$ac_compiler $ac_option >&5" ++case "(($ac_try" in ++ *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; ++ *) ac_try_echo=$ac_try;; ++esac ++eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" ++$as_echo "$ac_try_echo"; } >&5 ++ (eval "$ac_compiler $ac_option >&5") 2>conftest.err ++ ac_status=$? ++ if test -s conftest.err; then ++ sed '10a\ ++... rest of stderr output deleted ... ++ 10q' conftest.err >conftest.er1 ++ cat conftest.er1 >&5 ++ rm -f conftest.er1 conftest.err ++ fi ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } ++done ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 ++$as_echo_n "checking whether we are using the GNU C compiler... " >&6; } ++if test "${ac_cv_c_compiler_gnu+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++#ifndef __GNUC__ ++ choke me ++#endif ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ ac_compiler_gnu=yes ++else ++ ac_compiler_gnu=no ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++ac_cv_c_compiler_gnu=$ac_compiler_gnu ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 ++$as_echo "$ac_cv_c_compiler_gnu" >&6; } ++if test $ac_compiler_gnu = yes; then ++ GCC=yes ++else ++ GCC= ++fi ++ac_test_CFLAGS=${CFLAGS+set} ++ac_save_CFLAGS=$CFLAGS ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 ++$as_echo_n "checking whether $CC accepts -g... " >&6; } ++if test "${ac_cv_prog_cc_g+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ ac_save_c_werror_flag=$ac_c_werror_flag ++ ac_c_werror_flag=yes ++ ac_cv_prog_cc_g=no ++ CFLAGS="-g" ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ ac_cv_prog_cc_g=yes ++else ++ CFLAGS="" ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ ++else ++ ac_c_werror_flag=$ac_save_c_werror_flag ++ CFLAGS="-g" ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ ac_cv_prog_cc_g=yes ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++ ac_c_werror_flag=$ac_save_c_werror_flag ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 ++$as_echo "$ac_cv_prog_cc_g" >&6; } ++if test "$ac_test_CFLAGS" = set; then ++ CFLAGS=$ac_save_CFLAGS ++elif test $ac_cv_prog_cc_g = yes; then ++ if test "$GCC" = yes; then ++ CFLAGS="-g -O2" ++ else ++ CFLAGS="-g" ++ fi ++else ++ if test "$GCC" = yes; then ++ CFLAGS="-O2" ++ else ++ CFLAGS= ++ fi ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 ++$as_echo_n "checking for $CC option to accept ISO C89... " >&6; } ++if test "${ac_cv_prog_cc_c89+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ ac_cv_prog_cc_c89=no ++ac_save_CC=$CC ++cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++#include ++#include ++#include ++#include ++/* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ ++struct buf { int x; }; ++FILE * (*rcsopen) (struct buf *, struct stat *, int); ++static char *e (p, i) ++ char **p; ++ int i; ++{ ++ return p[i]; ++} ++static char *f (char * (*g) (char **, int), char **p, ...) ++{ ++ char *s; ++ va_list v; ++ va_start (v,p); ++ s = g (p, va_arg (v,int)); ++ va_end (v); ++ return s; ++} ++ ++/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has ++ function prototypes and stuff, but not '\xHH' hex character constants. ++ These don't provoke an error unfortunately, instead are silently treated ++ as 'x'. The following induces an error, until -std is added to get ++ proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an ++ array size at least. It's necessary to write '\x00'==0 to get something ++ that's true only with -std. */ ++int osf4_cc_array ['\x00' == 0 ? 1 : -1]; ++ ++/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters ++ inside strings and character constants. */ ++#define FOO(x) 'x' ++int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; ++ ++int test (int i, double x); ++struct s1 {int (*f) (int a);}; ++struct s2 {int (*f) (double a);}; ++int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); ++int argc; ++char **argv; ++int ++main () ++{ ++return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; ++ ; ++ return 0; ++} ++_ACEOF ++for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ ++ -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" ++do ++ CC="$ac_save_CC $ac_arg" ++ if ac_fn_c_try_compile "$LINENO"; then : ++ ac_cv_prog_cc_c89=$ac_arg ++fi ++rm -f core conftest.err conftest.$ac_objext ++ test "x$ac_cv_prog_cc_c89" != "xno" && break ++done ++rm -f conftest.$ac_ext ++CC=$ac_save_CC ++ ++fi ++# AC_CACHE_VAL ++case "x$ac_cv_prog_cc_c89" in ++ x) ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 ++$as_echo "none needed" >&6; } ;; ++ xno) ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 ++$as_echo "unsupported" >&6; } ;; ++ *) ++ CC="$CC $ac_cv_prog_cc_c89" ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 ++$as_echo "$ac_cv_prog_cc_c89" >&6; } ;; ++esac ++if test "x$ac_cv_prog_cc_c89" != xno; then : ++ ++fi ++ ++ac_ext=c ++ac_cpp='$CPP $CPPFLAGS' ++ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ++ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ++ac_compiler_gnu=$ac_cv_c_compiler_gnu ++ ++ ++ ++ ++ ++if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args. ++set dummy ${ac_tool_prefix}ranlib; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_RANLIB+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$RANLIB"; then ++ ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++RANLIB=$ac_cv_prog_RANLIB ++if test -n "$RANLIB"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5 ++$as_echo "$RANLIB" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_RANLIB"; then ++ ac_ct_RANLIB=$RANLIB ++ # Extract the first word of "ranlib", so it can be a program name with args. ++set dummy ranlib; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_RANLIB+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_RANLIB"; then ++ ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_RANLIB="ranlib" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB ++if test -n "$ac_ct_RANLIB"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5 ++$as_echo "$ac_ct_RANLIB" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_RANLIB" = x; then ++ RANLIB=":" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ RANLIB=$ac_ct_RANLIB ++ fi ++else ++ RANLIB="$ac_cv_prog_RANLIB" ++fi ++ ++ ++for ac_prog in gawk mawk nawk awk ++do ++ # Extract the first word of "$ac_prog", so it can be a program name with args. ++set dummy $ac_prog; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_AWK+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$AWK"; then ++ ac_cv_prog_AWK="$AWK" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_AWK="$ac_prog" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++AWK=$ac_cv_prog_AWK ++if test -n "$AWK"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5 ++$as_echo "$AWK" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++ test -n "$AWK" && break ++done ++ ++case "$AWK" in ++"") as_fn_error "can't build without awk" "$LINENO" 5 ;; ++esac ++ ++case `pwd` in ++ *\ * | *\ *) ++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&5 ++$as_echo "$as_me: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&2;} ;; ++esac ++ ++ ++ ++macro_version='2.2.7a' ++macro_revision='1.3134' ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ltmain="$ac_aux_dir/ltmain.sh" ++ ++# Backslashify metacharacters that are still active within ++# double-quoted strings. ++sed_quote_subst='s/\(["`$\\]\)/\\\1/g' ++ ++# Same as above, but do not quote variable references. ++double_quote_subst='s/\(["`\\]\)/\\\1/g' ++ ++# Sed substitution to delay expansion of an escaped shell variable in a ++# double_quote_subst'ed string. ++delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g' ++ ++# Sed substitution to delay expansion of an escaped single quote. ++delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g' ++ ++# Sed substitution to avoid accidental globbing in evaled expressions ++no_glob_subst='s/\*/\\\*/g' ++ ++ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' ++ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO ++ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 ++$as_echo_n "checking how to print strings... " >&6; } ++# Test print first, because it will be a builtin if present. ++if test "X`print -r -- -n 2>/dev/null`" = X-n && \ ++ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then ++ ECHO='print -r --' ++elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then ++ ECHO='printf %s\n' ++else ++ # Use this function as a fallback that always works. ++ func_fallback_echo () ++ { ++ eval 'cat <<_LTECHO_EOF ++$1 ++_LTECHO_EOF' ++ } ++ ECHO='func_fallback_echo' ++fi ++ ++# func_echo_all arg... ++# Invoke $ECHO with all args, space-separated. ++func_echo_all () ++{ ++ $ECHO "" ++} ++ ++case "$ECHO" in ++ printf*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: printf" >&5 ++$as_echo "printf" >&6; } ;; ++ print*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: print -r" >&5 ++$as_echo "print -r" >&6; } ;; ++ *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: cat" >&5 ++$as_echo "cat" >&6; } ;; ++esac ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a sed that does not truncate output" >&5 ++$as_echo_n "checking for a sed that does not truncate output... " >&6; } ++if test "${ac_cv_path_SED+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ ac_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/ ++ for ac_i in 1 2 3 4 5 6 7; do ++ ac_script="$ac_script$as_nl$ac_script" ++ done ++ echo "$ac_script" 2>/dev/null | sed 99q >conftest.sed ++ { ac_script=; unset ac_script;} ++ if test -z "$SED"; then ++ ac_path_SED_found=false ++ # Loop through the user's path and test for each of PROGNAME-LIST ++ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_prog in sed gsed; do ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ ac_path_SED="$as_dir/$ac_prog$ac_exec_ext" ++ { test -f "$ac_path_SED" && $as_test_x "$ac_path_SED"; } || continue ++# Check for GNU ac_path_SED and select it if it is found. ++ # Check for GNU $ac_path_SED ++case `"$ac_path_SED" --version 2>&1` in ++*GNU*) ++ ac_cv_path_SED="$ac_path_SED" ac_path_SED_found=:;; ++*) ++ ac_count=0 ++ $as_echo_n 0123456789 >"conftest.in" ++ while : ++ do ++ cat "conftest.in" "conftest.in" >"conftest.tmp" ++ mv "conftest.tmp" "conftest.in" ++ cp "conftest.in" "conftest.nl" ++ $as_echo '' >> "conftest.nl" ++ "$ac_path_SED" -f conftest.sed < "conftest.nl" >"conftest.out" 2>/dev/null || break ++ diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break ++ as_fn_arith $ac_count + 1 && ac_count=$as_val ++ if test $ac_count -gt ${ac_path_SED_max-0}; then ++ # Best one so far, save it but keep looking for a better one ++ ac_cv_path_SED="$ac_path_SED" ++ ac_path_SED_max=$ac_count ++ fi ++ # 10*(2^10) chars as input seems more than enough ++ test $ac_count -gt 10 && break ++ done ++ rm -f conftest.in conftest.tmp conftest.nl conftest.out;; ++esac ++ ++ $ac_path_SED_found && break 3 ++ done ++ done ++ done ++IFS=$as_save_IFS ++ if test -z "$ac_cv_path_SED"; then ++ as_fn_error "no acceptable sed could be found in \$PATH" "$LINENO" 5 ++ fi ++else ++ ac_cv_path_SED=$SED ++fi ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_SED" >&5 ++$as_echo "$ac_cv_path_SED" >&6; } ++ SED="$ac_cv_path_SED" ++ rm -f conftest.sed ++ ++test -z "$SED" && SED=sed ++Xsed="$SED -e 1s/^X//" ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgrep" >&5 ++$as_echo_n "checking for fgrep... " >&6; } ++if test "${ac_cv_path_FGREP+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if echo 'ab*c' | $GREP -F 'ab*c' >/dev/null 2>&1 ++ then ac_cv_path_FGREP="$GREP -F" ++ else ++ if test -z "$FGREP"; then ++ ac_path_FGREP_found=false ++ # Loop through the user's path and test for each of PROGNAME-LIST ++ as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_prog in fgrep; do ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ ac_path_FGREP="$as_dir/$ac_prog$ac_exec_ext" ++ { test -f "$ac_path_FGREP" && $as_test_x "$ac_path_FGREP"; } || continue ++# Check for GNU ac_path_FGREP and select it if it is found. ++ # Check for GNU $ac_path_FGREP ++case `"$ac_path_FGREP" --version 2>&1` in ++*GNU*) ++ ac_cv_path_FGREP="$ac_path_FGREP" ac_path_FGREP_found=:;; ++*) ++ ac_count=0 ++ $as_echo_n 0123456789 >"conftest.in" ++ while : ++ do ++ cat "conftest.in" "conftest.in" >"conftest.tmp" ++ mv "conftest.tmp" "conftest.in" ++ cp "conftest.in" "conftest.nl" ++ $as_echo 'FGREP' >> "conftest.nl" ++ "$ac_path_FGREP" FGREP < "conftest.nl" >"conftest.out" 2>/dev/null || break ++ diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break ++ as_fn_arith $ac_count + 1 && ac_count=$as_val ++ if test $ac_count -gt ${ac_path_FGREP_max-0}; then ++ # Best one so far, save it but keep looking for a better one ++ ac_cv_path_FGREP="$ac_path_FGREP" ++ ac_path_FGREP_max=$ac_count ++ fi ++ # 10*(2^10) chars as input seems more than enough ++ test $ac_count -gt 10 && break ++ done ++ rm -f conftest.in conftest.tmp conftest.nl conftest.out;; ++esac ++ ++ $ac_path_FGREP_found && break 3 ++ done ++ done ++ done ++IFS=$as_save_IFS ++ if test -z "$ac_cv_path_FGREP"; then ++ as_fn_error "no acceptable fgrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 ++ fi ++else ++ ac_cv_path_FGREP=$FGREP ++fi ++ ++ fi ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_FGREP" >&5 ++$as_echo "$ac_cv_path_FGREP" >&6; } ++ FGREP="$ac_cv_path_FGREP" ++ ++ ++test -z "$GREP" && GREP=grep ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++# Check whether --with-gnu-ld was given. ++if test "${with_gnu_ld+set}" = set; then : ++ withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes ++else ++ with_gnu_ld=no ++fi ++ ++ac_prog=ld ++if test "$GCC" = yes; then ++ # Check if gcc -print-prog-name=ld gives a path. ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5 ++$as_echo_n "checking for ld used by $CC... " >&6; } ++ case $host in ++ *-*-mingw*) ++ # gcc leaves a trailing carriage return which upsets mingw ++ ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; ++ *) ++ ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; ++ esac ++ case $ac_prog in ++ # Accept absolute paths. ++ [\\/]* | ?:[\\/]*) ++ re_direlt='/[^/][^/]*/\.\./' ++ # Canonicalize the pathname of ld ++ ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'` ++ while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do ++ ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` ++ done ++ test -z "$LD" && LD="$ac_prog" ++ ;; ++ "") ++ # If it fails, then pretend we aren't using GCC. ++ ac_prog=ld ++ ;; ++ *) ++ # If it is relative, then search for the first ld in PATH. ++ with_gnu_ld=unknown ++ ;; ++ esac ++elif test "$with_gnu_ld" = yes; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 ++$as_echo_n "checking for GNU ld... " >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5 ++$as_echo_n "checking for non-GNU ld... " >&6; } ++fi ++if test "${lt_cv_path_LD+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -z "$LD"; then ++ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR ++ for ac_dir in $PATH; do ++ IFS="$lt_save_ifs" ++ test -z "$ac_dir" && ac_dir=. ++ if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then ++ lt_cv_path_LD="$ac_dir/$ac_prog" ++ # Check to see if the program is GNU ld. I'd rather use --version, ++ # but apparently some variants of GNU ld only accept -v. ++ # Break only if it was the GNU/non-GNU ld that we prefer. ++ case `"$lt_cv_path_LD" -v 2>&1 &5 ++$as_echo "$LD" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++test -z "$LD" && as_fn_error "no acceptable ld found in \$PATH" "$LINENO" 5 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5 ++$as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; } ++if test "${lt_cv_prog_gnu_ld+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ # I'd rather use --version here, but apparently some GNU lds only accept -v. ++case `$LD -v 2>&1 &5 ++$as_echo "$lt_cv_prog_gnu_ld" >&6; } ++with_gnu_ld=$lt_cv_prog_gnu_ld ++ ++ ++ ++ ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for BSD- or MS-compatible name lister (nm)" >&5 ++$as_echo_n "checking for BSD- or MS-compatible name lister (nm)... " >&6; } ++if test "${lt_cv_path_NM+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$NM"; then ++ # Let the user override the test. ++ lt_cv_path_NM="$NM" ++else ++ lt_nm_to_check="${ac_tool_prefix}nm" ++ if test -n "$ac_tool_prefix" && test "$build" = "$host"; then ++ lt_nm_to_check="$lt_nm_to_check nm" ++ fi ++ for lt_tmp_nm in $lt_nm_to_check; do ++ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR ++ for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do ++ IFS="$lt_save_ifs" ++ test -z "$ac_dir" && ac_dir=. ++ tmp_nm="$ac_dir/$lt_tmp_nm" ++ if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then ++ # Check to see if the nm accepts a BSD-compat flag. ++ # Adding the `sed 1q' prevents false positives on HP-UX, which says: ++ # nm: unknown option "B" ignored ++ # Tru64's nm complains that /dev/null is an invalid object file ++ case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in ++ */dev/null* | *'Invalid file or object type'*) ++ lt_cv_path_NM="$tmp_nm -B" ++ break ++ ;; ++ *) ++ case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in ++ */dev/null*) ++ lt_cv_path_NM="$tmp_nm -p" ++ break ++ ;; ++ *) ++ lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but ++ continue # so that we can try to find one that supports BSD flags ++ ;; ++ esac ++ ;; ++ esac ++ fi ++ done ++ IFS="$lt_save_ifs" ++ done ++ : ${lt_cv_path_NM=no} ++fi ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_NM" >&5 ++$as_echo "$lt_cv_path_NM" >&6; } ++if test "$lt_cv_path_NM" != "no"; then ++ NM="$lt_cv_path_NM" ++else ++ # Didn't find any BSD compatible name lister, look for dumpbin. ++ if test -n "$DUMPBIN"; then : ++ # Let the user override the test. ++ else ++ if test -n "$ac_tool_prefix"; then ++ for ac_prog in dumpbin "link -dump" ++ do ++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. ++set dummy $ac_tool_prefix$ac_prog; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_DUMPBIN+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$DUMPBIN"; then ++ ac_cv_prog_DUMPBIN="$DUMPBIN" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_DUMPBIN="$ac_tool_prefix$ac_prog" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++DUMPBIN=$ac_cv_prog_DUMPBIN ++if test -n "$DUMPBIN"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DUMPBIN" >&5 ++$as_echo "$DUMPBIN" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++ test -n "$DUMPBIN" && break ++ done ++fi ++if test -z "$DUMPBIN"; then ++ ac_ct_DUMPBIN=$DUMPBIN ++ for ac_prog in dumpbin "link -dump" ++do ++ # Extract the first word of "$ac_prog", so it can be a program name with args. ++set dummy $ac_prog; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_DUMPBIN+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_DUMPBIN"; then ++ ac_cv_prog_ac_ct_DUMPBIN="$ac_ct_DUMPBIN" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_DUMPBIN="$ac_prog" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_DUMPBIN=$ac_cv_prog_ac_ct_DUMPBIN ++if test -n "$ac_ct_DUMPBIN"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DUMPBIN" >&5 ++$as_echo "$ac_ct_DUMPBIN" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++ test -n "$ac_ct_DUMPBIN" && break ++done ++ ++ if test "x$ac_ct_DUMPBIN" = x; then ++ DUMPBIN=":" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ DUMPBIN=$ac_ct_DUMPBIN ++ fi ++fi ++ ++ case `$DUMPBIN -symbols /dev/null 2>&1 | sed '1q'` in ++ *COFF*) ++ DUMPBIN="$DUMPBIN -symbols" ++ ;; ++ *) ++ DUMPBIN=: ++ ;; ++ esac ++ fi ++ ++ if test "$DUMPBIN" != ":"; then ++ NM="$DUMPBIN" ++ fi ++fi ++test -z "$NM" && NM=nm ++ ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the name lister ($NM) interface" >&5 ++$as_echo_n "checking the name lister ($NM) interface... " >&6; } ++if test "${lt_cv_nm_interface+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_nm_interface="BSD nm" ++ echo "int some_variable = 0;" > conftest.$ac_ext ++ (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&5) ++ (eval "$ac_compile" 2>conftest.err) ++ cat conftest.err >&5 ++ (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&5) ++ (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out) ++ cat conftest.err >&5 ++ (eval echo "\"\$as_me:$LINENO: output\"" >&5) ++ cat conftest.out >&5 ++ if $GREP 'External.*some_variable' conftest.out > /dev/null; then ++ lt_cv_nm_interface="MS dumpbin" ++ fi ++ rm -f conftest* ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_nm_interface" >&5 ++$as_echo "$lt_cv_nm_interface" >&6; } ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ln -s works" >&5 ++$as_echo_n "checking whether ln -s works... " >&6; } ++LN_S=$as_ln_s ++if test "$LN_S" = "ln -s"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 ++$as_echo "yes" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no, using $LN_S" >&5 ++$as_echo "no, using $LN_S" >&6; } ++fi ++ ++# find the maximum length of command line arguments ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the maximum length of command line arguments" >&5 ++$as_echo_n "checking the maximum length of command line arguments... " >&6; } ++if test "${lt_cv_sys_max_cmd_len+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ i=0 ++ teststring="ABCD" ++ ++ case $build_os in ++ msdosdjgpp*) ++ # On DJGPP, this test can blow up pretty badly due to problems in libc ++ # (any single argument exceeding 2000 bytes causes a buffer overrun ++ # during glob expansion). Even if it were fixed, the result of this ++ # check would be larger than it should be. ++ lt_cv_sys_max_cmd_len=12288; # 12K is about right ++ ;; ++ ++ gnu*) ++ # Under GNU Hurd, this test is not required because there is ++ # no limit to the length of command line arguments. ++ # Libtool will interpret -1 as no limit whatsoever ++ lt_cv_sys_max_cmd_len=-1; ++ ;; ++ ++ cygwin* | mingw* | cegcc*) ++ # On Win9x/ME, this test blows up -- it succeeds, but takes ++ # about 5 minutes as the teststring grows exponentially. ++ # Worse, since 9x/ME are not pre-emptively multitasking, ++ # you end up with a "frozen" computer, even though with patience ++ # the test eventually succeeds (with a max line length of 256k). ++ # Instead, let's just punt: use the minimum linelength reported by ++ # all of the supported platforms: 8192 (on NT/2K/XP). ++ lt_cv_sys_max_cmd_len=8192; ++ ;; ++ ++ mint*) ++ # On MiNT this can take a long time and run out of memory. ++ lt_cv_sys_max_cmd_len=8192; ++ ;; ++ ++ amigaos*) ++ # On AmigaOS with pdksh, this test takes hours, literally. ++ # So we just punt and use a minimum line length of 8192. ++ lt_cv_sys_max_cmd_len=8192; ++ ;; ++ ++ netbsd* | freebsd* | openbsd* | darwin* | dragonfly*) ++ # This has been around since 386BSD, at least. Likely further. ++ if test -x /sbin/sysctl; then ++ lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax` ++ elif test -x /usr/sbin/sysctl; then ++ lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax` ++ else ++ lt_cv_sys_max_cmd_len=65536 # usable default for all BSDs ++ fi ++ # And add a safety zone ++ lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` ++ lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` ++ ;; ++ ++ interix*) ++ # We know the value 262144 and hardcode it with a safety zone (like BSD) ++ lt_cv_sys_max_cmd_len=196608 ++ ;; ++ ++ osf*) ++ # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure ++ # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not ++ # nice to cause kernel panics so lets avoid the loop below. ++ # First set a reasonable default. ++ lt_cv_sys_max_cmd_len=16384 ++ # ++ if test -x /sbin/sysconfig; then ++ case `/sbin/sysconfig -q proc exec_disable_arg_limit` in ++ *1*) lt_cv_sys_max_cmd_len=-1 ;; ++ esac ++ fi ++ ;; ++ sco3.2v5*) ++ lt_cv_sys_max_cmd_len=102400 ++ ;; ++ sysv5* | sco5v6* | sysv4.2uw2*) ++ kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null` ++ if test -n "$kargmax"; then ++ lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[ ]//'` ++ else ++ lt_cv_sys_max_cmd_len=32768 ++ fi ++ ;; ++ *) ++ lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null` ++ if test -n "$lt_cv_sys_max_cmd_len"; then ++ lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` ++ lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` ++ else ++ # Make teststring a little bigger before we do anything with it. ++ # a 1K string should be a reasonable start. ++ for i in 1 2 3 4 5 6 7 8 ; do ++ teststring=$teststring$teststring ++ done ++ SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}} ++ # If test is not a shell built-in, we'll probably end up computing a ++ # maximum length that is only half of the actual maximum length, but ++ # we can't tell. ++ while { test "X"`func_fallback_echo "$teststring$teststring" 2>/dev/null` \ ++ = "X$teststring$teststring"; } >/dev/null 2>&1 && ++ test $i != 17 # 1/2 MB should be enough ++ do ++ i=`expr $i + 1` ++ teststring=$teststring$teststring ++ done ++ # Only check the string length outside the loop. ++ lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1` ++ teststring= ++ # Add a significant safety factor because C++ compilers can tack on ++ # massive amounts of additional arguments before passing them to the ++ # linker. It appears as though 1/2 is a usable value. ++ lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2` ++ fi ++ ;; ++ esac ++ ++fi ++ ++if test -n $lt_cv_sys_max_cmd_len ; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_max_cmd_len" >&5 ++$as_echo "$lt_cv_sys_max_cmd_len" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: none" >&5 ++$as_echo "none" >&6; } ++fi ++max_cmd_len=$lt_cv_sys_max_cmd_len ++ ++ ++ ++ ++ ++ ++: ${CP="cp -f"} ++: ${MV="mv -f"} ++: ${RM="rm -f"} ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands some XSI constructs" >&5 ++$as_echo_n "checking whether the shell understands some XSI constructs... " >&6; } ++# Try some XSI features ++xsi_shell=no ++( _lt_dummy="a/b/c" ++ test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ ++ = c,a/b,, \ ++ && eval 'test $(( 1 + 1 )) -eq 2 \ ++ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ ++ && xsi_shell=yes ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $xsi_shell" >&5 ++$as_echo "$xsi_shell" >&6; } ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands \"+=\"" >&5 ++$as_echo_n "checking whether the shell understands \"+=\"... " >&6; } ++lt_shell_append=no ++( foo=bar; set foo baz; eval "$1+=\$2" && test "$foo" = barbaz ) \ ++ >/dev/null 2>&1 \ ++ && lt_shell_append=yes ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_shell_append" >&5 ++$as_echo "$lt_shell_append" >&6; } ++ ++ ++if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then ++ lt_unset=unset ++else ++ lt_unset=false ++fi ++ ++ ++ ++ ++ ++# test EBCDIC or ASCII ++case `echo X|tr X '\101'` in ++ A) # ASCII based system ++ # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr ++ lt_SP2NL='tr \040 \012' ++ lt_NL2SP='tr \015\012 \040\040' ++ ;; ++ *) # EBCDIC based system ++ lt_SP2NL='tr \100 \n' ++ lt_NL2SP='tr \r\n \100\100' ++ ;; ++esac ++ ++ ++ ++ ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 ++$as_echo_n "checking for $LD option to reload object files... " >&6; } ++if test "${lt_cv_ld_reload_flag+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_ld_reload_flag='-r' ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_reload_flag" >&5 ++$as_echo "$lt_cv_ld_reload_flag" >&6; } ++reload_flag=$lt_cv_ld_reload_flag ++case $reload_flag in ++"" | " "*) ;; ++*) reload_flag=" $reload_flag" ;; ++esac ++reload_cmds='$LD$reload_flag -o $output$reload_objs' ++case $host_os in ++ darwin*) ++ if test "$GCC" = yes; then ++ reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' ++ else ++ reload_cmds='$LD$reload_flag -o $output$reload_objs' ++ fi ++ ;; ++esac ++ ++ ++ ++ ++ ++ ++ ++ ++ ++if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}objdump", so it can be a program name with args. ++set dummy ${ac_tool_prefix}objdump; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_OBJDUMP+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$OBJDUMP"; then ++ ac_cv_prog_OBJDUMP="$OBJDUMP" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_OBJDUMP="${ac_tool_prefix}objdump" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++OBJDUMP=$ac_cv_prog_OBJDUMP ++if test -n "$OBJDUMP"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJDUMP" >&5 ++$as_echo "$OBJDUMP" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_OBJDUMP"; then ++ ac_ct_OBJDUMP=$OBJDUMP ++ # Extract the first word of "objdump", so it can be a program name with args. ++set dummy objdump; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_OBJDUMP+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_OBJDUMP"; then ++ ac_cv_prog_ac_ct_OBJDUMP="$ac_ct_OBJDUMP" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_OBJDUMP="objdump" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_OBJDUMP=$ac_cv_prog_ac_ct_OBJDUMP ++if test -n "$ac_ct_OBJDUMP"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJDUMP" >&5 ++$as_echo "$ac_ct_OBJDUMP" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_OBJDUMP" = x; then ++ OBJDUMP="false" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ OBJDUMP=$ac_ct_OBJDUMP ++ fi ++else ++ OBJDUMP="$ac_cv_prog_OBJDUMP" ++fi ++ ++test -z "$OBJDUMP" && OBJDUMP=objdump ++ ++ ++ ++ ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to recognize dependent libraries" >&5 ++$as_echo_n "checking how to recognize dependent libraries... " >&6; } ++if test "${lt_cv_deplibs_check_method+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_file_magic_cmd='$MAGIC_CMD' ++lt_cv_file_magic_test_file= ++lt_cv_deplibs_check_method='unknown' ++# Need to set the preceding variable on all platforms that support ++# interlibrary dependencies. ++# 'none' -- dependencies not supported. ++# `unknown' -- same as none, but documents that we really don't know. ++# 'pass_all' -- all dependencies passed with no checks. ++# 'test_compile' -- check by making test program. ++# 'file_magic [[regex]]' -- check by looking for files in library path ++# which responds to the $file_magic_cmd with a given extended regex. ++# If you have `file' or equivalent on your system and you're not sure ++# whether `pass_all' will *always* work, you probably want this one. ++ ++case $host_os in ++aix[4-9]*) ++ lt_cv_deplibs_check_method=pass_all ++ ;; ++ ++beos*) ++ lt_cv_deplibs_check_method=pass_all ++ ;; ++ ++bsdi[45]*) ++ lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib)' ++ lt_cv_file_magic_cmd='/usr/bin/file -L' ++ lt_cv_file_magic_test_file=/shlib/libc.so ++ ;; ++ ++cygwin*) ++ # func_win32_libid is a shell function defined in ltmain.sh ++ lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' ++ lt_cv_file_magic_cmd='func_win32_libid' ++ ;; ++ ++mingw* | pw32*) ++ # Base MSYS/MinGW do not provide the 'file' command needed by ++ # func_win32_libid shell function, so use a weaker test based on 'objdump', ++ # unless we find 'file', for example because we are cross-compiling. ++ # func_win32_libid assumes BSD nm, so disallow it if using MS dumpbin. ++ if ( test "$lt_cv_nm_interface" = "BSD nm" && file / ) >/dev/null 2>&1; then ++ lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' ++ lt_cv_file_magic_cmd='func_win32_libid' ++ else ++ lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' ++ lt_cv_file_magic_cmd='$OBJDUMP -f' ++ fi ++ ;; ++ ++cegcc*) ++ # use the weaker test based on 'objdump'. See mingw*. ++ lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?' ++ lt_cv_file_magic_cmd='$OBJDUMP -f' ++ ;; ++ ++darwin* | rhapsody*) ++ lt_cv_deplibs_check_method=pass_all ++ ;; ++ ++freebsd* | dragonfly*) ++ if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then ++ case $host_cpu in ++ i*86 ) ++ # Not sure whether the presence of OpenBSD here was a mistake. ++ # Let's accept both of them until this is cleared up. ++ lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[3-9]86 (compact )?demand paged shared library' ++ lt_cv_file_magic_cmd=/usr/bin/file ++ lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*` ++ ;; ++ esac ++ else ++ lt_cv_deplibs_check_method=pass_all ++ fi ++ ;; ++ ++gnu*) ++ lt_cv_deplibs_check_method=pass_all ++ ;; ++ ++haiku*) ++ lt_cv_deplibs_check_method=pass_all ++ ;; ++ ++hpux10.20* | hpux11*) ++ lt_cv_file_magic_cmd=/usr/bin/file ++ case $host_cpu in ++ ia64*) ++ lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - IA64' ++ lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so ++ ;; ++ hppa*64*) ++ lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]' ++ lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl ++ ;; ++ *) ++ lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|PA-RISC[0-9]\.[0-9]) shared library' ++ lt_cv_file_magic_test_file=/usr/lib/libc.sl ++ ;; ++ esac ++ ;; ++ ++interix[3-9]*) ++ # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here ++ lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|\.a)$' ++ ;; ++ ++irix5* | irix6* | nonstopux*) ++ case $LD in ++ *-32|*"-32 ") libmagic=32-bit;; ++ *-n32|*"-n32 ") libmagic=N32;; ++ *-64|*"-64 ") libmagic=64-bit;; ++ *) libmagic=never-match;; ++ esac ++ lt_cv_deplibs_check_method=pass_all ++ ;; ++ ++# This must be Linux ELF. ++linux* | k*bsd*-gnu | kopensolaris*-gnu) ++ lt_cv_deplibs_check_method=pass_all ++ ;; ++ ++netbsd*) ++ if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then ++ lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' ++ else ++ lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|_pic\.a)$' ++ fi ++ ;; ++ ++newos6*) ++ lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (executable|dynamic lib)' ++ lt_cv_file_magic_cmd=/usr/bin/file ++ lt_cv_file_magic_test_file=/usr/lib/libnls.so ++ ;; ++ ++*nto* | *qnx*) ++ lt_cv_deplibs_check_method=pass_all ++ ;; ++ ++openbsd*) ++ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then ++ lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|\.so|_pic\.a)$' ++ else ++ lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' ++ fi ++ ;; ++ ++osf3* | osf4* | osf5*) ++ lt_cv_deplibs_check_method=pass_all ++ ;; ++ ++rdos*) ++ lt_cv_deplibs_check_method=pass_all ++ ;; ++ ++solaris*) ++ lt_cv_deplibs_check_method=pass_all ++ ;; ++ ++sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) ++ lt_cv_deplibs_check_method=pass_all ++ ;; ++ ++sysv4 | sysv4.3*) ++ case $host_vendor in ++ motorola) ++ lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib) M[0-9][0-9]* Version [0-9]' ++ lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*` ++ ;; ++ ncr) ++ lt_cv_deplibs_check_method=pass_all ++ ;; ++ sequent) ++ lt_cv_file_magic_cmd='/bin/file' ++ lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [LM]SB (shared object|dynamic lib )' ++ ;; ++ sni) ++ lt_cv_file_magic_cmd='/bin/file' ++ lt_cv_deplibs_check_method="file_magic ELF [0-9][0-9]*-bit [LM]SB dynamic lib" ++ lt_cv_file_magic_test_file=/lib/libc.so ++ ;; ++ siemens) ++ lt_cv_deplibs_check_method=pass_all ++ ;; ++ pc) ++ lt_cv_deplibs_check_method=pass_all ++ ;; ++ esac ++ ;; ++ ++tpf*) ++ lt_cv_deplibs_check_method=pass_all ++ ;; ++esac ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 ++$as_echo "$lt_cv_deplibs_check_method" >&6; } ++file_magic_cmd=$lt_cv_file_magic_cmd ++deplibs_check_method=$lt_cv_deplibs_check_method ++test -z "$deplibs_check_method" && deplibs_check_method=unknown ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args. ++set dummy ${ac_tool_prefix}ar; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_AR+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$AR"; then ++ ac_cv_prog_AR="$AR" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_AR="${ac_tool_prefix}ar" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++AR=$ac_cv_prog_AR ++if test -n "$AR"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AR" >&5 ++$as_echo "$AR" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_AR"; then ++ ac_ct_AR=$AR ++ # Extract the first word of "ar", so it can be a program name with args. ++set dummy ar; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_AR+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_AR"; then ++ ac_cv_prog_ac_ct_AR="$ac_ct_AR" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_AR="ar" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_AR=$ac_cv_prog_ac_ct_AR ++if test -n "$ac_ct_AR"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AR" >&5 ++$as_echo "$ac_ct_AR" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_AR" = x; then ++ AR="false" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ AR=$ac_ct_AR ++ fi ++else ++ AR="$ac_cv_prog_AR" ++fi ++ ++test -z "$AR" && AR=ar ++test -z "$AR_FLAGS" && AR_FLAGS=cru ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. ++set dummy ${ac_tool_prefix}strip; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_STRIP+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$STRIP"; then ++ ac_cv_prog_STRIP="$STRIP" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_STRIP="${ac_tool_prefix}strip" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++STRIP=$ac_cv_prog_STRIP ++if test -n "$STRIP"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 ++$as_echo "$STRIP" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_STRIP"; then ++ ac_ct_STRIP=$STRIP ++ # Extract the first word of "strip", so it can be a program name with args. ++set dummy strip; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_STRIP+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_STRIP"; then ++ ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_STRIP="strip" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP ++if test -n "$ac_ct_STRIP"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 ++$as_echo "$ac_ct_STRIP" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_STRIP" = x; then ++ STRIP=":" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ STRIP=$ac_ct_STRIP ++ fi ++else ++ STRIP="$ac_cv_prog_STRIP" ++fi ++ ++test -z "$STRIP" && STRIP=: ++ ++ ++ ++ ++ ++ ++if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args. ++set dummy ${ac_tool_prefix}ranlib; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_RANLIB+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$RANLIB"; then ++ ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++RANLIB=$ac_cv_prog_RANLIB ++if test -n "$RANLIB"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5 ++$as_echo "$RANLIB" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_RANLIB"; then ++ ac_ct_RANLIB=$RANLIB ++ # Extract the first word of "ranlib", so it can be a program name with args. ++set dummy ranlib; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_RANLIB+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_RANLIB"; then ++ ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_RANLIB="ranlib" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB ++if test -n "$ac_ct_RANLIB"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5 ++$as_echo "$ac_ct_RANLIB" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_RANLIB" = x; then ++ RANLIB=":" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ RANLIB=$ac_ct_RANLIB ++ fi ++else ++ RANLIB="$ac_cv_prog_RANLIB" ++fi ++ ++test -z "$RANLIB" && RANLIB=: ++ ++ ++ ++ ++ ++ ++# Determine commands to create old-style static archives. ++old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs' ++old_postinstall_cmds='chmod 644 $oldlib' ++old_postuninstall_cmds= ++ ++if test -n "$RANLIB"; then ++ case $host_os in ++ openbsd*) ++ old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$oldlib" ++ ;; ++ *) ++ old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$oldlib" ++ ;; ++ esac ++ old_archive_cmds="$old_archive_cmds~\$RANLIB \$oldlib" ++fi ++ ++case $host_os in ++ darwin*) ++ lock_old_archive_extraction=yes ;; ++ *) ++ lock_old_archive_extraction=no ;; ++esac ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++# If no C compiler was specified, use CC. ++LTCC=${LTCC-"$CC"} ++ ++# If no C compiler flags were specified, use CFLAGS. ++LTCFLAGS=${LTCFLAGS-"$CFLAGS"} ++ ++# Allow CC to be a program name with arguments. ++compiler=$CC ++ ++ ++# Check for command to grab the raw symbol name followed by C symbol from nm. ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking command to parse $NM output from $compiler object" >&5 ++$as_echo_n "checking command to parse $NM output from $compiler object... " >&6; } ++if test "${lt_cv_sys_global_symbol_pipe+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ ++# These are sane defaults that work on at least a few old systems. ++# [They come from Ultrix. What could be older than Ultrix?!! ;)] ++ ++# Character class describing NM global symbol codes. ++symcode='[BCDEGRST]' ++ ++# Regexp to match symbols that can be accessed directly from C. ++sympat='\([_A-Za-z][_A-Za-z0-9]*\)' ++ ++# Define system-specific variables. ++case $host_os in ++aix*) ++ symcode='[BCDT]' ++ ;; ++cygwin* | mingw* | pw32* | cegcc*) ++ symcode='[ABCDGISTW]' ++ ;; ++hpux*) ++ if test "$host_cpu" = ia64; then ++ symcode='[ABCDEGRST]' ++ fi ++ ;; ++irix* | nonstopux*) ++ symcode='[BCDEGRST]' ++ ;; ++osf*) ++ symcode='[BCDEGQRST]' ++ ;; ++solaris*) ++ symcode='[BDRT]' ++ ;; ++sco3.2v5*) ++ symcode='[DT]' ++ ;; ++sysv4.2uw2*) ++ symcode='[DT]' ++ ;; ++sysv5* | sco5v6* | unixware* | OpenUNIX*) ++ symcode='[ABDT]' ++ ;; ++sysv4) ++ symcode='[DFNSTU]' ++ ;; ++esac ++ ++# If we're using GNU nm, then use its standard symbol codes. ++case `$NM -V 2>&1` in ++*GNU* | *'with BFD'*) ++ symcode='[ABCDGIRSTW]' ;; ++esac ++ ++# Transform an extracted symbol line into a proper C declaration. ++# Some systems (esp. on ia64) link data and code symbols differently, ++# so use this general approach. ++lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" ++ ++# Transform an extracted symbol line into symbol name and symbol address ++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" ++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" ++ ++# Handle CRLF in mingw tool chain ++opt_cr= ++case $build_os in ++mingw*) ++ opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp ++ ;; ++esac ++ ++# Try without a prefix underscore, then with it. ++for ac_symprfx in "" "_"; do ++ ++ # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol. ++ symxfrm="\\1 $ac_symprfx\\2 \\2" ++ ++ # Write the raw and C identifiers. ++ if test "$lt_cv_nm_interface" = "MS dumpbin"; then ++ # Fake it for dumpbin and say T for any non-static function ++ # and D for any global variable. ++ # Also find C++ and __fastcall symbols from MSVC++, ++ # which start with @ or ?. ++ lt_cv_sys_global_symbol_pipe="$AWK '"\ ++" {last_section=section; section=\$ 3};"\ ++" /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\ ++" \$ 0!~/External *\|/{next};"\ ++" / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\ ++" {if(hide[section]) next};"\ ++" {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\ ++" {split(\$ 0, a, /\||\r/); split(a[2], s)};"\ ++" s[1]~/^[@?]/{print s[1], s[1]; next};"\ ++" s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\ ++" ' prfx=^$ac_symprfx" ++ else ++ lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" ++ fi ++ ++ # Check to see that the pipe works correctly. ++ pipe_works=no ++ ++ rm -f conftest* ++ cat > conftest.$ac_ext <<_LT_EOF ++#ifdef __cplusplus ++extern "C" { ++#endif ++char nm_test_var; ++void nm_test_func(void); ++void nm_test_func(void){} ++#ifdef __cplusplus ++} ++#endif ++int main(){nm_test_var='a';nm_test_func();return(0);} ++_LT_EOF ++ ++ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 ++ (eval $ac_compile) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; }; then ++ # Now try to grab the symbols. ++ nlist=conftest.nm ++ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist\""; } >&5 ++ (eval $NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } && test -s "$nlist"; then ++ # Try sorting and uniquifying the output. ++ if sort "$nlist" | uniq > "$nlist"T; then ++ mv -f "$nlist"T "$nlist" ++ else ++ rm -f "$nlist"T ++ fi ++ ++ # Make sure that we snagged all the symbols we need. ++ if $GREP ' nm_test_var$' "$nlist" >/dev/null; then ++ if $GREP ' nm_test_func$' "$nlist" >/dev/null; then ++ cat <<_LT_EOF > conftest.$ac_ext ++#ifdef __cplusplus ++extern "C" { ++#endif ++ ++_LT_EOF ++ # Now generate the symbol file. ++ eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext' ++ ++ cat <<_LT_EOF >> conftest.$ac_ext ++ ++/* The mapping between symbol names and symbols. */ ++const struct { ++ const char *name; ++ void *address; ++} ++lt__PROGRAM__LTX_preloaded_symbols[] = ++{ ++ { "@PROGRAM@", (void *) 0 }, ++_LT_EOF ++ $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/ {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext ++ cat <<\_LT_EOF >> conftest.$ac_ext ++ {0, (void *) 0} ++}; ++ ++/* This works around a problem in FreeBSD linker */ ++#ifdef FREEBSD_WORKAROUND ++static const void *lt_preloaded_setup() { ++ return lt__PROGRAM__LTX_preloaded_symbols; ++} ++#endif ++ ++#ifdef __cplusplus ++} ++#endif ++_LT_EOF ++ # Now try linking the two files. ++ mv conftest.$ac_objext conftstm.$ac_objext ++ lt_save_LIBS="$LIBS" ++ lt_save_CFLAGS="$CFLAGS" ++ LIBS="conftstm.$ac_objext" ++ CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" ++ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 ++ (eval $ac_link) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } && test -s conftest${ac_exeext}; then ++ pipe_works=yes ++ fi ++ LIBS="$lt_save_LIBS" ++ CFLAGS="$lt_save_CFLAGS" ++ else ++ echo "cannot find nm_test_func in $nlist" >&5 ++ fi ++ else ++ echo "cannot find nm_test_var in $nlist" >&5 ++ fi ++ else ++ echo "cannot run $lt_cv_sys_global_symbol_pipe" >&5 ++ fi ++ else ++ echo "$progname: failed program was:" >&5 ++ cat conftest.$ac_ext >&5 ++ fi ++ rm -rf conftest* conftst* ++ ++ # Do not use the global_symbol_pipe unless it works. ++ if test "$pipe_works" = yes; then ++ break ++ else ++ lt_cv_sys_global_symbol_pipe= ++ fi ++done ++ ++fi ++ ++if test -z "$lt_cv_sys_global_symbol_pipe"; then ++ lt_cv_sys_global_symbol_to_cdecl= ++fi ++if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed" >&5 ++$as_echo "failed" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5 ++$as_echo "ok" >&6; } ++fi ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++# Check whether --enable-libtool-lock was given. ++if test "${enable_libtool_lock+set}" = set; then : ++ enableval=$enable_libtool_lock; ++fi ++ ++test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes ++ ++# Some flags need to be propagated to the compiler or linker for good ++# libtool support. ++case $host in ++ia64-*-hpux*) ++ # Find out which ABI we are using. ++ echo 'int i;' > conftest.$ac_ext ++ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 ++ (eval $ac_compile) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; }; then ++ case `/usr/bin/file conftest.$ac_objext` in ++ *ELF-32*) ++ HPUX_IA64_MODE="32" ++ ;; ++ *ELF-64*) ++ HPUX_IA64_MODE="64" ++ ;; ++ esac ++ fi ++ rm -rf conftest* ++ ;; ++*-*-irix6*) ++ # Find out which ABI we are using. ++ echo '#line '$LINENO' "configure"' > conftest.$ac_ext ++ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 ++ (eval $ac_compile) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; }; then ++ if test "$lt_cv_prog_gnu_ld" = yes; then ++ case `/usr/bin/file conftest.$ac_objext` in ++ *32-bit*) ++ LD="${LD-ld} -melf32bsmip" ++ ;; ++ *N32*) ++ LD="${LD-ld} -melf32bmipn32" ++ ;; ++ *64-bit*) ++ LD="${LD-ld} -melf64bmip" ++ ;; ++ esac ++ else ++ case `/usr/bin/file conftest.$ac_objext` in ++ *32-bit*) ++ LD="${LD-ld} -32" ++ ;; ++ *N32*) ++ LD="${LD-ld} -n32" ++ ;; ++ *64-bit*) ++ LD="${LD-ld} -64" ++ ;; ++ esac ++ fi ++ fi ++ rm -rf conftest* ++ ;; ++ ++x86_64-*kfreebsd*-gnu|x86_64-*linux*|powerpc*-*linux*| \ ++s390*-*linux*|s390*-*tpf*|sparc*-*linux*) ++ # Find out which ABI we are using. ++ echo 'int i;' > conftest.$ac_ext ++ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 ++ (eval $ac_compile) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; }; then ++ case `/usr/bin/file conftest.o` in ++ *32-bit*) ++ case $host in ++ x86_64-*kfreebsd*-gnu) ++ LD="${LD-ld} -m elf_i386_fbsd" ++ ;; ++ x86_64-*linux*) ++ case `/usr/bin/file conftest.o` in ++ *x86-64*) ++ LD="${LD-ld} -m elf32_x86_64" ++ ;; ++ *) ++ LD="${LD-ld} -m elf_i386" ++ ;; ++ esac ++ ;; ++ powerpc64le-*linux*) ++ LD="${LD-ld} -m elf32lppclinux" ++ ;; ++ powerpc64-*linux*) ++ LD="${LD-ld} -m elf32ppclinux" ++ ;; ++ s390x-*linux*) ++ LD="${LD-ld} -m elf_s390" ++ ;; ++ sparc64-*linux*) ++ LD="${LD-ld} -m elf32_sparc" ++ ;; ++ esac ++ ;; ++ *64-bit*) ++ case $host in ++ x86_64-*kfreebsd*-gnu) ++ LD="${LD-ld} -m elf_x86_64_fbsd" ++ ;; ++ x86_64-*linux*) ++ LD="${LD-ld} -m elf_x86_64" ++ ;; ++ powerpcle-*linux*) ++ LD="${LD-ld} -m elf64lppc" ++ ;; ++ powerpc-*linux*) ++ LD="${LD-ld} -m elf64ppc" ++ ;; ++ s390*-*linux*|s390*-*tpf*) ++ LD="${LD-ld} -m elf64_s390" ++ ;; ++ sparc*-*linux*) ++ LD="${LD-ld} -m elf64_sparc" ++ ;; ++ esac ++ ;; ++ esac ++ fi ++ rm -rf conftest* ++ ;; ++ ++*-*-sco3.2v5*) ++ # On SCO OpenServer 5, we need -belf to get full-featured binaries. ++ SAVE_CFLAGS="$CFLAGS" ++ CFLAGS="$CFLAGS -belf" ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler needs -belf" >&5 ++$as_echo_n "checking whether the C compiler needs -belf... " >&6; } ++if test "${lt_cv_cc_needs_belf+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ ac_ext=c ++ac_cpp='$CPP $CPPFLAGS' ++ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ++ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ++ac_compiler_gnu=$ac_cv_c_compiler_gnu ++ ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_link "$LINENO"; then : ++ lt_cv_cc_needs_belf=yes ++else ++ lt_cv_cc_needs_belf=no ++fi ++rm -f core conftest.err conftest.$ac_objext \ ++ conftest$ac_exeext conftest.$ac_ext ++ ac_ext=c ++ac_cpp='$CPP $CPPFLAGS' ++ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ++ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ++ac_compiler_gnu=$ac_cv_c_compiler_gnu ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5 ++$as_echo "$lt_cv_cc_needs_belf" >&6; } ++ if test x"$lt_cv_cc_needs_belf" != x"yes"; then ++ # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf ++ CFLAGS="$SAVE_CFLAGS" ++ fi ++ ;; ++sparc*-*solaris*) ++ # Find out which ABI we are using. ++ echo 'int i;' > conftest.$ac_ext ++ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 ++ (eval $ac_compile) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; }; then ++ case `/usr/bin/file conftest.o` in ++ *64-bit*) ++ case $lt_cv_prog_gnu_ld in ++ yes*) LD="${LD-ld} -m elf64_sparc" ;; ++ *) ++ if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then ++ LD="${LD-ld} -64" ++ fi ++ ;; ++ esac ++ ;; ++ esac ++ fi ++ rm -rf conftest* ++ ;; ++esac ++ ++need_locks="$enable_libtool_lock" ++ ++ ++ case $host_os in ++ rhapsody* | darwin*) ++ if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}dsymutil", so it can be a program name with args. ++set dummy ${ac_tool_prefix}dsymutil; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_DSYMUTIL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$DSYMUTIL"; then ++ ac_cv_prog_DSYMUTIL="$DSYMUTIL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_DSYMUTIL="${ac_tool_prefix}dsymutil" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++DSYMUTIL=$ac_cv_prog_DSYMUTIL ++if test -n "$DSYMUTIL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DSYMUTIL" >&5 ++$as_echo "$DSYMUTIL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_DSYMUTIL"; then ++ ac_ct_DSYMUTIL=$DSYMUTIL ++ # Extract the first word of "dsymutil", so it can be a program name with args. ++set dummy dsymutil; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_DSYMUTIL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_DSYMUTIL"; then ++ ac_cv_prog_ac_ct_DSYMUTIL="$ac_ct_DSYMUTIL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_DSYMUTIL="dsymutil" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_DSYMUTIL=$ac_cv_prog_ac_ct_DSYMUTIL ++if test -n "$ac_ct_DSYMUTIL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DSYMUTIL" >&5 ++$as_echo "$ac_ct_DSYMUTIL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_DSYMUTIL" = x; then ++ DSYMUTIL=":" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ DSYMUTIL=$ac_ct_DSYMUTIL ++ fi ++else ++ DSYMUTIL="$ac_cv_prog_DSYMUTIL" ++fi ++ ++ if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}nmedit", so it can be a program name with args. ++set dummy ${ac_tool_prefix}nmedit; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_NMEDIT+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$NMEDIT"; then ++ ac_cv_prog_NMEDIT="$NMEDIT" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_NMEDIT="${ac_tool_prefix}nmedit" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++NMEDIT=$ac_cv_prog_NMEDIT ++if test -n "$NMEDIT"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $NMEDIT" >&5 ++$as_echo "$NMEDIT" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_NMEDIT"; then ++ ac_ct_NMEDIT=$NMEDIT ++ # Extract the first word of "nmedit", so it can be a program name with args. ++set dummy nmedit; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_NMEDIT+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_NMEDIT"; then ++ ac_cv_prog_ac_ct_NMEDIT="$ac_ct_NMEDIT" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_NMEDIT="nmedit" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_NMEDIT=$ac_cv_prog_ac_ct_NMEDIT ++if test -n "$ac_ct_NMEDIT"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_NMEDIT" >&5 ++$as_echo "$ac_ct_NMEDIT" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_NMEDIT" = x; then ++ NMEDIT=":" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ NMEDIT=$ac_ct_NMEDIT ++ fi ++else ++ NMEDIT="$ac_cv_prog_NMEDIT" ++fi ++ ++ if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}lipo", so it can be a program name with args. ++set dummy ${ac_tool_prefix}lipo; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_LIPO+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$LIPO"; then ++ ac_cv_prog_LIPO="$LIPO" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_LIPO="${ac_tool_prefix}lipo" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++LIPO=$ac_cv_prog_LIPO ++if test -n "$LIPO"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIPO" >&5 ++$as_echo "$LIPO" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_LIPO"; then ++ ac_ct_LIPO=$LIPO ++ # Extract the first word of "lipo", so it can be a program name with args. ++set dummy lipo; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_LIPO+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_LIPO"; then ++ ac_cv_prog_ac_ct_LIPO="$ac_ct_LIPO" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_LIPO="lipo" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_LIPO=$ac_cv_prog_ac_ct_LIPO ++if test -n "$ac_ct_LIPO"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_LIPO" >&5 ++$as_echo "$ac_ct_LIPO" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_LIPO" = x; then ++ LIPO=":" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ LIPO=$ac_ct_LIPO ++ fi ++else ++ LIPO="$ac_cv_prog_LIPO" ++fi ++ ++ if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}otool", so it can be a program name with args. ++set dummy ${ac_tool_prefix}otool; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_OTOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$OTOOL"; then ++ ac_cv_prog_OTOOL="$OTOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_OTOOL="${ac_tool_prefix}otool" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++OTOOL=$ac_cv_prog_OTOOL ++if test -n "$OTOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL" >&5 ++$as_echo "$OTOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_OTOOL"; then ++ ac_ct_OTOOL=$OTOOL ++ # Extract the first word of "otool", so it can be a program name with args. ++set dummy otool; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_OTOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_OTOOL"; then ++ ac_cv_prog_ac_ct_OTOOL="$ac_ct_OTOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_OTOOL="otool" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_OTOOL=$ac_cv_prog_ac_ct_OTOOL ++if test -n "$ac_ct_OTOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL" >&5 ++$as_echo "$ac_ct_OTOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_OTOOL" = x; then ++ OTOOL=":" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ OTOOL=$ac_ct_OTOOL ++ fi ++else ++ OTOOL="$ac_cv_prog_OTOOL" ++fi ++ ++ if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}otool64", so it can be a program name with args. ++set dummy ${ac_tool_prefix}otool64; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_OTOOL64+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$OTOOL64"; then ++ ac_cv_prog_OTOOL64="$OTOOL64" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_OTOOL64="${ac_tool_prefix}otool64" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++OTOOL64=$ac_cv_prog_OTOOL64 ++if test -n "$OTOOL64"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL64" >&5 ++$as_echo "$OTOOL64" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_OTOOL64"; then ++ ac_ct_OTOOL64=$OTOOL64 ++ # Extract the first word of "otool64", so it can be a program name with args. ++set dummy otool64; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_OTOOL64+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_OTOOL64"; then ++ ac_cv_prog_ac_ct_OTOOL64="$ac_ct_OTOOL64" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_OTOOL64="otool64" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_OTOOL64=$ac_cv_prog_ac_ct_OTOOL64 ++if test -n "$ac_ct_OTOOL64"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL64" >&5 ++$as_echo "$ac_ct_OTOOL64" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_OTOOL64" = x; then ++ OTOOL64=":" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ OTOOL64=$ac_ct_OTOOL64 ++ fi ++else ++ OTOOL64="$ac_cv_prog_OTOOL64" ++fi ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -single_module linker flag" >&5 ++$as_echo_n "checking for -single_module linker flag... " >&6; } ++if test "${lt_cv_apple_cc_single_mod+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_apple_cc_single_mod=no ++ if test -z "${LT_MULTI_MODULE}"; then ++ # By default we will add the -single_module flag. You can override ++ # by either setting the environment variable LT_MULTI_MODULE ++ # non-empty at configure time, or by adding -multi_module to the ++ # link flags. ++ rm -rf libconftest.dylib* ++ echo "int foo(void){return 1;}" > conftest.c ++ echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ ++-dynamiclib -Wl,-single_module conftest.c" >&5 ++ $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ ++ -dynamiclib -Wl,-single_module conftest.c 2>conftest.err ++ _lt_result=$? ++ if test -f libconftest.dylib && test ! -s conftest.err && test $_lt_result = 0; then ++ lt_cv_apple_cc_single_mod=yes ++ else ++ cat conftest.err >&5 ++ fi ++ rm -rf libconftest.dylib* ++ rm -f conftest.* ++ fi ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_apple_cc_single_mod" >&5 ++$as_echo "$lt_cv_apple_cc_single_mod" >&6; } ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -exported_symbols_list linker flag" >&5 ++$as_echo_n "checking for -exported_symbols_list linker flag... " >&6; } ++if test "${lt_cv_ld_exported_symbols_list+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_ld_exported_symbols_list=no ++ save_LDFLAGS=$LDFLAGS ++ echo "_main" > conftest.sym ++ LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym" ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_link "$LINENO"; then : ++ lt_cv_ld_exported_symbols_list=yes ++else ++ lt_cv_ld_exported_symbols_list=no ++fi ++rm -f core conftest.err conftest.$ac_objext \ ++ conftest$ac_exeext conftest.$ac_ext ++ LDFLAGS="$save_LDFLAGS" ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_exported_symbols_list" >&5 ++$as_echo "$lt_cv_ld_exported_symbols_list" >&6; } ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -force_load linker flag" >&5 ++$as_echo_n "checking for -force_load linker flag... " >&6; } ++if test "${lt_cv_ld_force_load+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_ld_force_load=no ++ cat > conftest.c << _LT_EOF ++int forced_loaded() { return 2;} ++_LT_EOF ++ echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&5 ++ $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 ++ echo "$AR cru libconftest.a conftest.o" >&5 ++ $AR cru libconftest.a conftest.o 2>&5 ++ cat > conftest.c << _LT_EOF ++int main() { return 0;} ++_LT_EOF ++ echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&5 ++ $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err ++ _lt_result=$? ++ if test -f conftest && test ! -s conftest.err && test $_lt_result = 0 && $GREP forced_load conftest 2>&1 >/dev/null; then ++ lt_cv_ld_force_load=yes ++ else ++ cat conftest.err >&5 ++ fi ++ rm -f conftest.err libconftest.a conftest conftest.c ++ rm -rf conftest.dSYM ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_force_load" >&5 ++$as_echo "$lt_cv_ld_force_load" >&6; } ++ case $host_os in ++ rhapsody* | darwin1.[012]) ++ _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;; ++ darwin1.*) ++ _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; ++ darwin*) # darwin 5.x on ++ # if running on 10.5 or later, the deployment target defaults ++ # to the OS version, if on x86, and 10.4, the deployment ++ # target defaults to 10.4. Don't you love it? ++ case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in ++ 10.0,*86*-darwin8*|10.0,*-darwin[91]*) ++ _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; ++ 10.[012][,.]*) ++ _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; ++ 10.*) ++ _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; ++ esac ++ ;; ++ esac ++ if test "$lt_cv_apple_cc_single_mod" = "yes"; then ++ _lt_dar_single_mod='$single_module' ++ fi ++ if test "$lt_cv_ld_exported_symbols_list" = "yes"; then ++ _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym' ++ else ++ _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}' ++ fi ++ if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then ++ _lt_dsymutil='~$DSYMUTIL $lib || :' ++ else ++ _lt_dsymutil= ++ fi ++ ;; ++ esac ++ ++for ac_header in dlfcn.h ++do : ++ ac_fn_c_check_header_compile "$LINENO" "dlfcn.h" "ac_cv_header_dlfcn_h" "$ac_includes_default ++" ++if test "x$ac_cv_header_dlfcn_h" = x""yes; then : ++ cat >>confdefs.h <<_ACEOF ++#define HAVE_DLFCN_H 1 ++_ACEOF ++ ++fi ++ ++done ++ ++ ++ ++ ++ ++# Set options ++ ++ ++ ++ enable_dlopen=no ++ ++ ++ enable_win32_dll=no ++ ++ ++ # Check whether --enable-shared was given. ++if test "${enable_shared+set}" = set; then : ++ enableval=$enable_shared; p=${PACKAGE-default} ++ case $enableval in ++ yes) enable_shared=yes ;; ++ no) enable_shared=no ;; ++ *) ++ enable_shared=no ++ # Look at the argument we got. We use all the common list separators. ++ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," ++ for pkg in $enableval; do ++ IFS="$lt_save_ifs" ++ if test "X$pkg" = "X$p"; then ++ enable_shared=yes ++ fi ++ done ++ IFS="$lt_save_ifs" ++ ;; ++ esac ++else ++ enable_shared=yes ++fi ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ # Check whether --enable-static was given. ++if test "${enable_static+set}" = set; then : ++ enableval=$enable_static; p=${PACKAGE-default} ++ case $enableval in ++ yes) enable_static=yes ;; ++ no) enable_static=no ;; ++ *) ++ enable_static=no ++ # Look at the argument we got. We use all the common list separators. ++ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," ++ for pkg in $enableval; do ++ IFS="$lt_save_ifs" ++ if test "X$pkg" = "X$p"; then ++ enable_static=yes ++ fi ++ done ++ IFS="$lt_save_ifs" ++ ;; ++ esac ++else ++ enable_static=yes ++fi ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++# Check whether --with-pic was given. ++if test "${with_pic+set}" = set; then : ++ withval=$with_pic; pic_mode="$withval" ++else ++ pic_mode=default ++fi ++ ++ ++test -z "$pic_mode" && pic_mode=default ++ ++ ++ ++ ++ ++ ++ ++ # Check whether --enable-fast-install was given. ++if test "${enable_fast_install+set}" = set; then : ++ enableval=$enable_fast_install; p=${PACKAGE-default} ++ case $enableval in ++ yes) enable_fast_install=yes ;; ++ no) enable_fast_install=no ;; ++ *) ++ enable_fast_install=no ++ # Look at the argument we got. We use all the common list separators. ++ lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," ++ for pkg in $enableval; do ++ IFS="$lt_save_ifs" ++ if test "X$pkg" = "X$p"; then ++ enable_fast_install=yes ++ fi ++ done ++ IFS="$lt_save_ifs" ++ ;; ++ esac ++else ++ enable_fast_install=yes ++fi ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++# This can be used to rebuild libtool when needed ++LIBTOOL_DEPS="$ltmain" ++ ++# Always use our own libtool. ++LIBTOOL='$(SHELL) $(top_builddir)/libtool' ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++test -z "$LN_S" && LN_S="ln -s" ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++if test -n "${ZSH_VERSION+set}" ; then ++ setopt NO_GLOB_SUBST ++fi ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for objdir" >&5 ++$as_echo_n "checking for objdir... " >&6; } ++if test "${lt_cv_objdir+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ rm -f .libs 2>/dev/null ++mkdir .libs 2>/dev/null ++if test -d .libs; then ++ lt_cv_objdir=.libs ++else ++ # MS-DOS does not allow filenames that begin with a dot. ++ lt_cv_objdir=_libs ++fi ++rmdir .libs 2>/dev/null ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_objdir" >&5 ++$as_echo "$lt_cv_objdir" >&6; } ++objdir=$lt_cv_objdir ++ ++ ++ ++ ++ ++cat >>confdefs.h <<_ACEOF ++#define LT_OBJDIR "$lt_cv_objdir/" ++_ACEOF ++ ++ ++ ++ ++case $host_os in ++aix3*) ++ # AIX sometimes has problems with the GCC collect2 program. For some ++ # reason, if we set the COLLECT_NAMES environment variable, the problems ++ # vanish in a puff of smoke. ++ if test "X${COLLECT_NAMES+set}" != Xset; then ++ COLLECT_NAMES= ++ export COLLECT_NAMES ++ fi ++ ;; ++esac ++ ++# Global variables: ++ofile=libtool ++can_build_shared=yes ++ ++# All known linkers require a `.a' archive for static linking (except MSVC, ++# which needs '.lib'). ++libext=a ++ ++with_gnu_ld="$lt_cv_prog_gnu_ld" ++ ++old_CC="$CC" ++old_CFLAGS="$CFLAGS" ++ ++# Set sane defaults for various variables ++test -z "$CC" && CC=cc ++test -z "$LTCC" && LTCC=$CC ++test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS ++test -z "$LD" && LD=ld ++test -z "$ac_objext" && ac_objext=o ++ ++for cc_temp in $compiler""; do ++ case $cc_temp in ++ compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; ++ distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; ++ \-*) ;; ++ *) break;; ++ esac ++done ++cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` ++ ++ ++# Only perform the check for file, if the check method requires it ++test -z "$MAGIC_CMD" && MAGIC_CMD=file ++case $deplibs_check_method in ++file_magic*) ++ if test "$file_magic_cmd" = '$MAGIC_CMD'; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ${ac_tool_prefix}file" >&5 ++$as_echo_n "checking for ${ac_tool_prefix}file... " >&6; } ++if test "${lt_cv_path_MAGIC_CMD+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ case $MAGIC_CMD in ++[\\/*] | ?:[\\/]*) ++ lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. ++ ;; ++*) ++ lt_save_MAGIC_CMD="$MAGIC_CMD" ++ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR ++ ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" ++ for ac_dir in $ac_dummy; do ++ IFS="$lt_save_ifs" ++ test -z "$ac_dir" && ac_dir=. ++ if test -f $ac_dir/${ac_tool_prefix}file; then ++ lt_cv_path_MAGIC_CMD="$ac_dir/${ac_tool_prefix}file" ++ if test -n "$file_magic_test_file"; then ++ case $deplibs_check_method in ++ "file_magic "*) ++ file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` ++ MAGIC_CMD="$lt_cv_path_MAGIC_CMD" ++ if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | ++ $EGREP "$file_magic_regex" > /dev/null; then ++ : ++ else ++ cat <<_LT_EOF 1>&2 ++ ++*** Warning: the command libtool uses to detect shared libraries, ++*** $file_magic_cmd, produces output that libtool cannot recognize. ++*** The result is that libtool may fail to recognize shared libraries ++*** as such. This will affect the creation of libtool libraries that ++*** depend on shared libraries, but programs linked with such libtool ++*** libraries will work regardless of this problem. Nevertheless, you ++*** may want to report the problem to your system manager and/or to ++*** bug-libtool@gnu.org ++ ++_LT_EOF ++ fi ;; ++ esac ++ fi ++ break ++ fi ++ done ++ IFS="$lt_save_ifs" ++ MAGIC_CMD="$lt_save_MAGIC_CMD" ++ ;; ++esac ++fi ++ ++MAGIC_CMD="$lt_cv_path_MAGIC_CMD" ++if test -n "$MAGIC_CMD"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 ++$as_echo "$MAGIC_CMD" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++ ++ ++ ++if test -z "$lt_cv_path_MAGIC_CMD"; then ++ if test -n "$ac_tool_prefix"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for file" >&5 ++$as_echo_n "checking for file... " >&6; } ++if test "${lt_cv_path_MAGIC_CMD+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ case $MAGIC_CMD in ++[\\/*] | ?:[\\/]*) ++ lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. ++ ;; ++*) ++ lt_save_MAGIC_CMD="$MAGIC_CMD" ++ lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR ++ ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" ++ for ac_dir in $ac_dummy; do ++ IFS="$lt_save_ifs" ++ test -z "$ac_dir" && ac_dir=. ++ if test -f $ac_dir/file; then ++ lt_cv_path_MAGIC_CMD="$ac_dir/file" ++ if test -n "$file_magic_test_file"; then ++ case $deplibs_check_method in ++ "file_magic "*) ++ file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` ++ MAGIC_CMD="$lt_cv_path_MAGIC_CMD" ++ if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | ++ $EGREP "$file_magic_regex" > /dev/null; then ++ : ++ else ++ cat <<_LT_EOF 1>&2 ++ ++*** Warning: the command libtool uses to detect shared libraries, ++*** $file_magic_cmd, produces output that libtool cannot recognize. ++*** The result is that libtool may fail to recognize shared libraries ++*** as such. This will affect the creation of libtool libraries that ++*** depend on shared libraries, but programs linked with such libtool ++*** libraries will work regardless of this problem. Nevertheless, you ++*** may want to report the problem to your system manager and/or to ++*** bug-libtool@gnu.org ++ ++_LT_EOF ++ fi ;; ++ esac ++ fi ++ break ++ fi ++ done ++ IFS="$lt_save_ifs" ++ MAGIC_CMD="$lt_save_MAGIC_CMD" ++ ;; ++esac ++fi ++ ++MAGIC_CMD="$lt_cv_path_MAGIC_CMD" ++if test -n "$MAGIC_CMD"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 ++$as_echo "$MAGIC_CMD" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++ else ++ MAGIC_CMD=: ++ fi ++fi ++ ++ fi ++ ;; ++esac ++ ++# Use C for the default configuration in the libtool script ++ ++lt_save_CC="$CC" ++ac_ext=c ++ac_cpp='$CPP $CPPFLAGS' ++ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ++ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ++ac_compiler_gnu=$ac_cv_c_compiler_gnu ++ ++ ++# Source file extension for C test sources. ++ac_ext=c ++ ++# Object file extension for compiled C test sources. ++objext=o ++objext=$objext ++ ++# Code to be used in simple compile tests ++lt_simple_compile_test_code="int some_variable = 0;" ++ ++# Code to be used in simple link tests ++lt_simple_link_test_code='int main(){return(0);}' ++ ++ ++ ++ ++ ++ ++ ++# If no C compiler was specified, use CC. ++LTCC=${LTCC-"$CC"} ++ ++# If no C compiler flags were specified, use CFLAGS. ++LTCFLAGS=${LTCFLAGS-"$CFLAGS"} ++ ++# Allow CC to be a program name with arguments. ++compiler=$CC ++ ++# Save the default compiler, since it gets overwritten when the other ++# tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP. ++compiler_DEFAULT=$CC ++ ++# save warnings/boilerplate of simple test code ++ac_outfile=conftest.$ac_objext ++echo "$lt_simple_compile_test_code" >conftest.$ac_ext ++eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err ++_lt_compiler_boilerplate=`cat conftest.err` ++$RM conftest* ++ ++ac_outfile=conftest.$ac_objext ++echo "$lt_simple_link_test_code" >conftest.$ac_ext ++eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err ++_lt_linker_boilerplate=`cat conftest.err` ++$RM -r conftest* ++ ++ ++## CAVEAT EMPTOR: ++## There is no encapsulation within the following macros, do not change ++## the running order or otherwise move them around unless you know exactly ++## what you are doing... ++if test -n "$compiler"; then ++ ++lt_prog_compiler_no_builtin_flag= ++ ++if test "$GCC" = yes; then ++ case $cc_basename in ++ nvcc*) ++ lt_prog_compiler_no_builtin_flag=' -Xcompiler -fno-builtin' ;; ++ *) ++ lt_prog_compiler_no_builtin_flag=' -fno-builtin' ;; ++ esac ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -fno-rtti -fno-exceptions" >&5 ++$as_echo_n "checking if $compiler supports -fno-rtti -fno-exceptions... " >&6; } ++if test "${lt_cv_prog_compiler_rtti_exceptions+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_prog_compiler_rtti_exceptions=no ++ ac_outfile=conftest.$ac_objext ++ echo "$lt_simple_compile_test_code" > conftest.$ac_ext ++ lt_compiler_flag="-fno-rtti -fno-exceptions" ++ # Insert the option either (1) after the last *FLAGS variable, or ++ # (2) before a word containing "conftest.", or (3) at the end. ++ # Note that $ac_compile itself does not contain backslashes and begins ++ # with a dollar sign (not a hyphen), so the echo should work correctly. ++ # The option is referenced via a variable to avoid confusing sed. ++ lt_compile=`echo "$ac_compile" | $SED \ ++ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ ++ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ ++ -e 's:$: $lt_compiler_flag:'` ++ (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) ++ (eval "$lt_compile" 2>conftest.err) ++ ac_status=$? ++ cat conftest.err >&5 ++ echo "$as_me:$LINENO: \$? = $ac_status" >&5 ++ if (exit $ac_status) && test -s "$ac_outfile"; then ++ # The compiler can only warn and ignore the option if not recognized ++ # So say no if there are warnings other than the usual output. ++ $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp ++ $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 ++ if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then ++ lt_cv_prog_compiler_rtti_exceptions=yes ++ fi ++ fi ++ $RM conftest* ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_rtti_exceptions" >&5 ++$as_echo "$lt_cv_prog_compiler_rtti_exceptions" >&6; } ++ ++if test x"$lt_cv_prog_compiler_rtti_exceptions" = xyes; then ++ lt_prog_compiler_no_builtin_flag="$lt_prog_compiler_no_builtin_flag -fno-rtti -fno-exceptions" ++else ++ : ++fi ++ ++fi ++ ++ ++ ++ ++ ++ ++ lt_prog_compiler_wl= ++lt_prog_compiler_pic= ++lt_prog_compiler_static= ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 ++$as_echo_n "checking for $compiler option to produce PIC... " >&6; } ++ ++ if test "$GCC" = yes; then ++ lt_prog_compiler_wl='-Wl,' ++ lt_prog_compiler_static='-static' ++ ++ case $host_os in ++ aix*) ++ # All AIX code is PIC. ++ if test "$host_cpu" = ia64; then ++ # AIX 5 now supports IA64 processor ++ lt_prog_compiler_static='-Bstatic' ++ fi ++ lt_prog_compiler_pic='-fPIC' ++ ;; ++ ++ amigaos*) ++ case $host_cpu in ++ powerpc) ++ # see comment about AmigaOS4 .so support ++ lt_prog_compiler_pic='-fPIC' ++ ;; ++ m68k) ++ # FIXME: we need at least 68020 code to build shared libraries, but ++ # adding the `-m68020' flag to GCC prevents building anything better, ++ # like `-m68040'. ++ lt_prog_compiler_pic='-m68020 -resident32 -malways-restore-a4' ++ ;; ++ esac ++ ;; ++ ++ beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) ++ # PIC is the default for these OSes. ++ ;; ++ ++ mingw* | cygwin* | pw32* | os2* | cegcc*) ++ # This hack is so that the source file can tell whether it is being ++ # built for inclusion in a dll (and should export symbols for example). ++ # Although the cygwin gcc ignores -fPIC, still need this for old-style ++ # (--disable-auto-import) libraries ++ lt_prog_compiler_pic='-DDLL_EXPORT' ++ ;; ++ ++ darwin* | rhapsody*) ++ # PIC is the default on this platform ++ # Common symbols not allowed in MH_DYLIB files ++ lt_prog_compiler_pic='-fno-common' ++ ;; ++ ++ haiku*) ++ # PIC is the default for Haiku. ++ # The "-static" flag exists, but is broken. ++ lt_prog_compiler_static= ++ ;; ++ ++ hpux*) ++ # PIC is the default for 64-bit PA HP-UX, but not for 32-bit ++ # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag ++ # sets the default TLS model and affects inlining. ++ case $host_cpu in ++ hppa*64*) ++ # +Z the default ++ ;; ++ *) ++ lt_prog_compiler_pic='-fPIC' ++ ;; ++ esac ++ ;; ++ ++ interix[3-9]*) ++ # Interix 3.x gcc -fpic/-fPIC options generate broken code. ++ # Instead, we relocate shared libraries at runtime. ++ ;; ++ ++ msdosdjgpp*) ++ # Just because we use GCC doesn't mean we suddenly get shared libraries ++ # on systems that don't support them. ++ lt_prog_compiler_can_build_shared=no ++ enable_shared=no ++ ;; ++ ++ *nto* | *qnx*) ++ # QNX uses GNU C++, but need to define -shared option too, otherwise ++ # it will coredump. ++ lt_prog_compiler_pic='-fPIC -shared' ++ ;; ++ ++ sysv4*MP*) ++ if test -d /usr/nec; then ++ lt_prog_compiler_pic=-Kconform_pic ++ fi ++ ;; ++ ++ *) ++ lt_prog_compiler_pic='-fPIC' ++ ;; ++ esac ++ ++ case $cc_basename in ++ nvcc*) # Cuda Compiler Driver 2.2 ++ lt_prog_compiler_wl='-Xlinker ' ++ lt_prog_compiler_pic='-Xcompiler -fPIC' ++ ;; ++ esac ++ else ++ # PORTME Check for flag to pass linker flags through the system compiler. ++ case $host_os in ++ aix*) ++ lt_prog_compiler_wl='-Wl,' ++ if test "$host_cpu" = ia64; then ++ # AIX 5 now supports IA64 processor ++ lt_prog_compiler_static='-Bstatic' ++ else ++ lt_prog_compiler_static='-bnso -bI:/lib/syscalls.exp' ++ fi ++ ;; ++ ++ mingw* | cygwin* | pw32* | os2* | cegcc*) ++ # This hack is so that the source file can tell whether it is being ++ # built for inclusion in a dll (and should export symbols for example). ++ lt_prog_compiler_pic='-DDLL_EXPORT' ++ ;; ++ ++ hpux9* | hpux10* | hpux11*) ++ lt_prog_compiler_wl='-Wl,' ++ # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but ++ # not for PA HP-UX. ++ case $host_cpu in ++ hppa*64*|ia64*) ++ # +Z the default ++ ;; ++ *) ++ lt_prog_compiler_pic='+Z' ++ ;; ++ esac ++ # Is there a better lt_prog_compiler_static that works with the bundled CC? ++ lt_prog_compiler_static='${wl}-a ${wl}archive' ++ ;; ++ ++ irix5* | irix6* | nonstopux*) ++ lt_prog_compiler_wl='-Wl,' ++ # PIC (with -KPIC) is the default. ++ lt_prog_compiler_static='-non_shared' ++ ;; ++ ++ linux* | k*bsd*-gnu | kopensolaris*-gnu) ++ case $cc_basename in ++ # old Intel for x86_64 which still supported -KPIC. ++ ecc*) ++ lt_prog_compiler_wl='-Wl,' ++ lt_prog_compiler_pic='-KPIC' ++ lt_prog_compiler_static='-static' ++ ;; ++ # icc used to be incompatible with GCC. ++ # ICC 10 doesn't accept -KPIC any more. ++ icc* | ifort*) ++ lt_prog_compiler_wl='-Wl,' ++ lt_prog_compiler_pic='-fPIC' ++ lt_prog_compiler_static='-static' ++ ;; ++ # Lahey Fortran 8.1. ++ lf95*) ++ lt_prog_compiler_wl='-Wl,' ++ lt_prog_compiler_pic='--shared' ++ lt_prog_compiler_static='--static' ++ ;; ++ pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) ++ # Portland Group compilers (*not* the Pentium gcc compiler, ++ # which looks to be a dead project) ++ lt_prog_compiler_wl='-Wl,' ++ lt_prog_compiler_pic='-fpic' ++ lt_prog_compiler_static='-Bstatic' ++ ;; ++ ccc*) ++ lt_prog_compiler_wl='-Wl,' ++ # All Alpha code is PIC. ++ lt_prog_compiler_static='-non_shared' ++ ;; ++ xl* | bgxl* | bgf* | mpixl*) ++ # IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene ++ lt_prog_compiler_wl='-Wl,' ++ lt_prog_compiler_pic='-qpic' ++ lt_prog_compiler_static='-qstaticlink' ++ ;; ++ *) ++ case `$CC -V 2>&1 | sed 5q` in ++ *Sun\ F* | *Sun*Fortran*) ++ # Sun Fortran 8.3 passes all unrecognized flags to the linker ++ lt_prog_compiler_pic='-KPIC' ++ lt_prog_compiler_static='-Bstatic' ++ lt_prog_compiler_wl='' ++ ;; ++ *Sun\ C*) ++ # Sun C 5.9 ++ lt_prog_compiler_pic='-KPIC' ++ lt_prog_compiler_static='-Bstatic' ++ lt_prog_compiler_wl='-Wl,' ++ ;; ++ esac ++ ;; ++ esac ++ ;; ++ ++ newsos6) ++ lt_prog_compiler_pic='-KPIC' ++ lt_prog_compiler_static='-Bstatic' ++ ;; ++ ++ *nto* | *qnx*) ++ # QNX uses GNU C++, but need to define -shared option too, otherwise ++ # it will coredump. ++ lt_prog_compiler_pic='-fPIC -shared' ++ ;; ++ ++ osf3* | osf4* | osf5*) ++ lt_prog_compiler_wl='-Wl,' ++ # All OSF/1 code is PIC. ++ lt_prog_compiler_static='-non_shared' ++ ;; ++ ++ rdos*) ++ lt_prog_compiler_static='-non_shared' ++ ;; ++ ++ solaris*) ++ lt_prog_compiler_pic='-KPIC' ++ lt_prog_compiler_static='-Bstatic' ++ case $cc_basename in ++ f77* | f90* | f95*) ++ lt_prog_compiler_wl='-Qoption ld ';; ++ *) ++ lt_prog_compiler_wl='-Wl,';; ++ esac ++ ;; ++ ++ sunos4*) ++ lt_prog_compiler_wl='-Qoption ld ' ++ lt_prog_compiler_pic='-PIC' ++ lt_prog_compiler_static='-Bstatic' ++ ;; ++ ++ sysv4 | sysv4.2uw2* | sysv4.3*) ++ lt_prog_compiler_wl='-Wl,' ++ lt_prog_compiler_pic='-KPIC' ++ lt_prog_compiler_static='-Bstatic' ++ ;; ++ ++ sysv4*MP*) ++ if test -d /usr/nec ;then ++ lt_prog_compiler_pic='-Kconform_pic' ++ lt_prog_compiler_static='-Bstatic' ++ fi ++ ;; ++ ++ sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) ++ lt_prog_compiler_wl='-Wl,' ++ lt_prog_compiler_pic='-KPIC' ++ lt_prog_compiler_static='-Bstatic' ++ ;; ++ ++ unicos*) ++ lt_prog_compiler_wl='-Wl,' ++ lt_prog_compiler_can_build_shared=no ++ ;; ++ ++ uts4*) ++ lt_prog_compiler_pic='-pic' ++ lt_prog_compiler_static='-Bstatic' ++ ;; ++ ++ *) ++ lt_prog_compiler_can_build_shared=no ++ ;; ++ esac ++ fi ++ ++case $host_os in ++ # For platforms which do not support PIC, -DPIC is meaningless: ++ *djgpp*) ++ lt_prog_compiler_pic= ++ ;; ++ *) ++ lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" ++ ;; ++esac ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5 ++$as_echo "$lt_prog_compiler_pic" >&6; } ++ ++ ++ ++ ++ ++ ++# ++# Check to make sure the PIC flag actually works. ++# ++if test -n "$lt_prog_compiler_pic"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic works" >&5 ++$as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic works... " >&6; } ++if test "${lt_cv_prog_compiler_pic_works+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_prog_compiler_pic_works=no ++ ac_outfile=conftest.$ac_objext ++ echo "$lt_simple_compile_test_code" > conftest.$ac_ext ++ lt_compiler_flag="$lt_prog_compiler_pic -DPIC" ++ # Insert the option either (1) after the last *FLAGS variable, or ++ # (2) before a word containing "conftest.", or (3) at the end. ++ # Note that $ac_compile itself does not contain backslashes and begins ++ # with a dollar sign (not a hyphen), so the echo should work correctly. ++ # The option is referenced via a variable to avoid confusing sed. ++ lt_compile=`echo "$ac_compile" | $SED \ ++ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ ++ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ ++ -e 's:$: $lt_compiler_flag:'` ++ (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) ++ (eval "$lt_compile" 2>conftest.err) ++ ac_status=$? ++ cat conftest.err >&5 ++ echo "$as_me:$LINENO: \$? = $ac_status" >&5 ++ if (exit $ac_status) && test -s "$ac_outfile"; then ++ # The compiler can only warn and ignore the option if not recognized ++ # So say no if there are warnings other than the usual output. ++ $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp ++ $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 ++ if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then ++ lt_cv_prog_compiler_pic_works=yes ++ fi ++ fi ++ $RM conftest* ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works" >&5 ++$as_echo "$lt_cv_prog_compiler_pic_works" >&6; } ++ ++if test x"$lt_cv_prog_compiler_pic_works" = xyes; then ++ case $lt_prog_compiler_pic in ++ "" | " "*) ;; ++ *) lt_prog_compiler_pic=" $lt_prog_compiler_pic" ;; ++ esac ++else ++ lt_prog_compiler_pic= ++ lt_prog_compiler_can_build_shared=no ++fi ++ ++fi ++ ++ ++ ++ ++ ++ ++# ++# Check to make sure the static flag actually works. ++# ++wl=$lt_prog_compiler_wl eval lt_tmp_static_flag=\"$lt_prog_compiler_static\" ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5 ++$as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; } ++if test "${lt_cv_prog_compiler_static_works+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_prog_compiler_static_works=no ++ save_LDFLAGS="$LDFLAGS" ++ LDFLAGS="$LDFLAGS $lt_tmp_static_flag" ++ echo "$lt_simple_link_test_code" > conftest.$ac_ext ++ if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then ++ # The linker can only warn and ignore the option if not recognized ++ # So say no if there are warnings ++ if test -s conftest.err; then ++ # Append any errors to the config.log. ++ cat conftest.err 1>&5 ++ $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp ++ $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 ++ if diff conftest.exp conftest.er2 >/dev/null; then ++ lt_cv_prog_compiler_static_works=yes ++ fi ++ else ++ lt_cv_prog_compiler_static_works=yes ++ fi ++ fi ++ $RM -r conftest* ++ LDFLAGS="$save_LDFLAGS" ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works" >&5 ++$as_echo "$lt_cv_prog_compiler_static_works" >&6; } ++ ++if test x"$lt_cv_prog_compiler_static_works" = xyes; then ++ : ++else ++ lt_prog_compiler_static= ++fi ++ ++ ++ ++ ++ ++ ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 ++$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } ++if test "${lt_cv_prog_compiler_c_o+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_prog_compiler_c_o=no ++ $RM -r conftest 2>/dev/null ++ mkdir conftest ++ cd conftest ++ mkdir out ++ echo "$lt_simple_compile_test_code" > conftest.$ac_ext ++ ++ lt_compiler_flag="-o out/conftest2.$ac_objext" ++ # Insert the option either (1) after the last *FLAGS variable, or ++ # (2) before a word containing "conftest.", or (3) at the end. ++ # Note that $ac_compile itself does not contain backslashes and begins ++ # with a dollar sign (not a hyphen), so the echo should work correctly. ++ lt_compile=`echo "$ac_compile" | $SED \ ++ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ ++ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ ++ -e 's:$: $lt_compiler_flag:'` ++ (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) ++ (eval "$lt_compile" 2>out/conftest.err) ++ ac_status=$? ++ cat out/conftest.err >&5 ++ echo "$as_me:$LINENO: \$? = $ac_status" >&5 ++ if (exit $ac_status) && test -s out/conftest2.$ac_objext ++ then ++ # The compiler can only warn and ignore the option if not recognized ++ # So say no if there are warnings ++ $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp ++ $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 ++ if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then ++ lt_cv_prog_compiler_c_o=yes ++ fi ++ fi ++ chmod u+w . 2>&5 ++ $RM conftest* ++ # SGI C++ compiler will create directory out/ii_files/ for ++ # template instantiation ++ test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files ++ $RM out/* && rmdir out ++ cd .. ++ $RM -r conftest ++ $RM conftest* ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5 ++$as_echo "$lt_cv_prog_compiler_c_o" >&6; } ++ ++ ++ ++ ++ ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 ++$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } ++if test "${lt_cv_prog_compiler_c_o+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_prog_compiler_c_o=no ++ $RM -r conftest 2>/dev/null ++ mkdir conftest ++ cd conftest ++ mkdir out ++ echo "$lt_simple_compile_test_code" > conftest.$ac_ext ++ ++ lt_compiler_flag="-o out/conftest2.$ac_objext" ++ # Insert the option either (1) after the last *FLAGS variable, or ++ # (2) before a word containing "conftest.", or (3) at the end. ++ # Note that $ac_compile itself does not contain backslashes and begins ++ # with a dollar sign (not a hyphen), so the echo should work correctly. ++ lt_compile=`echo "$ac_compile" | $SED \ ++ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ ++ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ ++ -e 's:$: $lt_compiler_flag:'` ++ (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) ++ (eval "$lt_compile" 2>out/conftest.err) ++ ac_status=$? ++ cat out/conftest.err >&5 ++ echo "$as_me:$LINENO: \$? = $ac_status" >&5 ++ if (exit $ac_status) && test -s out/conftest2.$ac_objext ++ then ++ # The compiler can only warn and ignore the option if not recognized ++ # So say no if there are warnings ++ $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp ++ $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 ++ if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then ++ lt_cv_prog_compiler_c_o=yes ++ fi ++ fi ++ chmod u+w . 2>&5 ++ $RM conftest* ++ # SGI C++ compiler will create directory out/ii_files/ for ++ # template instantiation ++ test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files ++ $RM out/* && rmdir out ++ cd .. ++ $RM -r conftest ++ $RM conftest* ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5 ++$as_echo "$lt_cv_prog_compiler_c_o" >&6; } ++ ++ ++ ++ ++hard_links="nottested" ++if test "$lt_cv_prog_compiler_c_o" = no && test "$need_locks" != no; then ++ # do not overwrite the value of need_locks provided by the user ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 ++$as_echo_n "checking if we can lock with hard links... " >&6; } ++ hard_links=yes ++ $RM conftest* ++ ln conftest.a conftest.b 2>/dev/null && hard_links=no ++ touch conftest.a ++ ln conftest.a conftest.b 2>&5 || hard_links=no ++ ln conftest.a conftest.b 2>/dev/null && hard_links=no ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 ++$as_echo "$hard_links" >&6; } ++ if test "$hard_links" = no; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 ++$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} ++ need_locks=warn ++ fi ++else ++ need_locks=no ++fi ++ ++ ++ ++ ++ ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 ++$as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } ++ ++ runpath_var= ++ allow_undefined_flag= ++ always_export_symbols=no ++ archive_cmds= ++ archive_expsym_cmds= ++ compiler_needs_object=no ++ enable_shared_with_static_runtimes=no ++ export_dynamic_flag_spec= ++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' ++ hardcode_automatic=no ++ hardcode_direct=no ++ hardcode_direct_absolute=no ++ hardcode_libdir_flag_spec= ++ hardcode_libdir_flag_spec_ld= ++ hardcode_libdir_separator= ++ hardcode_minus_L=no ++ hardcode_shlibpath_var=unsupported ++ inherit_rpath=no ++ link_all_deplibs=unknown ++ module_cmds= ++ module_expsym_cmds= ++ old_archive_from_new_cmds= ++ old_archive_from_expsyms_cmds= ++ thread_safe_flag_spec= ++ whole_archive_flag_spec= ++ # include_expsyms should be a list of space-separated symbols to be *always* ++ # included in the symbol list ++ include_expsyms= ++ # exclude_expsyms can be an extended regexp of symbols to exclude ++ # it will be wrapped by ` (' and `)$', so one must not match beginning or ++ # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', ++ # as well as any symbol that contains `d'. ++ exclude_expsyms='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' ++ # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out ++ # platforms (ab)use it in PIC code, but their linkers get confused if ++ # the symbol is explicitly referenced. Since portable code cannot ++ # rely on this symbol name, it's probably fine to never include it in ++ # preloaded symbol tables. ++ # Exclude shared library initialization/finalization symbols. ++ extract_expsyms_cmds= ++ ++ case $host_os in ++ cygwin* | mingw* | pw32* | cegcc*) ++ # FIXME: the MSVC++ port hasn't been tested in a loooong time ++ # When not using gcc, we currently assume that we are using ++ # Microsoft Visual C++. ++ if test "$GCC" != yes; then ++ with_gnu_ld=no ++ fi ++ ;; ++ interix*) ++ # we just hope/assume this is gcc and not c89 (= MSVC++) ++ with_gnu_ld=yes ++ ;; ++ openbsd*) ++ with_gnu_ld=no ++ ;; ++ esac ++ ++ ld_shlibs=yes ++ ++ # On some targets, GNU ld is compatible enough with the native linker ++ # that we're better off using the native interface for both. ++ lt_use_gnu_ld_interface=no ++ if test "$with_gnu_ld" = yes; then ++ case $host_os in ++ aix*) ++ # The AIX port of GNU ld has always aspired to compatibility ++ # with the native linker. However, as the warning in the GNU ld ++ # block says, versions before 2.19.5* couldn't really create working ++ # shared libraries, regardless of the interface used. ++ case `$LD -v 2>&1` in ++ *\ \(GNU\ Binutils\)\ 2.19.5*) ;; ++ *\ \(GNU\ Binutils\)\ 2.[2-9]*) ;; ++ *\ \(GNU\ Binutils\)\ [3-9]*) ;; ++ *) ++ lt_use_gnu_ld_interface=yes ++ ;; ++ esac ++ ;; ++ *) ++ lt_use_gnu_ld_interface=yes ++ ;; ++ esac ++ fi ++ ++ if test "$lt_use_gnu_ld_interface" = yes; then ++ # If archive_cmds runs LD, not CC, wlarc should be empty ++ wlarc='${wl}' ++ ++ # Set some defaults for GNU ld with shared library support. These ++ # are reset later if shared libraries are not supported. Putting them ++ # here allows them to be overridden if necessary. ++ runpath_var=LD_RUN_PATH ++ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' ++ export_dynamic_flag_spec='${wl}--export-dynamic' ++ # ancient GNU ld didn't support --whole-archive et. al. ++ if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then ++ whole_archive_flag_spec="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' ++ else ++ whole_archive_flag_spec= ++ fi ++ supports_anon_versioning=no ++ case `$LD -v 2>&1` in ++ *GNU\ gold*) supports_anon_versioning=yes ;; ++ *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11 ++ *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... ++ *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ... ++ *\ 2.11.*) ;; # other 2.11 versions ++ *) supports_anon_versioning=yes ;; ++ esac ++ ++ # See if GNU ld supports shared libraries. ++ case $host_os in ++ aix[3-9]*) ++ # On AIX/PPC, the GNU linker is very broken ++ if test "$host_cpu" != ia64; then ++ ld_shlibs=no ++ cat <<_LT_EOF 1>&2 ++ ++*** Warning: the GNU linker, at least up to release 2.19, is reported ++*** to be unable to reliably create shared libraries on AIX. ++*** Therefore, libtool is disabling shared libraries support. If you ++*** really care for shared libraries, you may want to install binutils ++*** 2.20 or above, or modify your PATH so that a non-GNU linker is found. ++*** You will then need to restart the configuration process. ++ ++_LT_EOF ++ fi ++ ;; ++ ++ amigaos*) ++ case $host_cpu in ++ powerpc) ++ # see comment about AmigaOS4 .so support ++ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='' ++ ;; ++ m68k) ++ archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' ++ hardcode_libdir_flag_spec='-L$libdir' ++ hardcode_minus_L=yes ++ ;; ++ esac ++ ;; ++ ++ beos*) ++ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then ++ allow_undefined_flag=unsupported ++ # Joseph Beckenbach says some releases of gcc ++ # support --undefined. This deserves some investigation. FIXME ++ archive_cmds='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ else ++ ld_shlibs=no ++ fi ++ ;; ++ ++ cygwin* | mingw* | pw32* | cegcc*) ++ # _LT_TAGVAR(hardcode_libdir_flag_spec, ) is actually meaningless, ++ # as there is no search path for DLLs. ++ hardcode_libdir_flag_spec='-L$libdir' ++ export_dynamic_flag_spec='${wl}--export-all-symbols' ++ allow_undefined_flag=unsupported ++ always_export_symbols=no ++ enable_shared_with_static_runtimes=yes ++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' ++ ++ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then ++ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' ++ # If the export-symbols file already is a .def file (1st line ++ # is EXPORTS), use it as is; otherwise, prepend... ++ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then ++ cp $export_symbols $output_objdir/$soname.def; ++ else ++ echo EXPORTS > $output_objdir/$soname.def; ++ cat $export_symbols >> $output_objdir/$soname.def; ++ fi~ ++ $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' ++ else ++ ld_shlibs=no ++ fi ++ ;; ++ ++ haiku*) ++ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ link_all_deplibs=yes ++ ;; ++ ++ interix[3-9]*) ++ hardcode_direct=no ++ hardcode_shlibpath_var=no ++ hardcode_libdir_flag_spec='${wl}-rpath,$libdir' ++ export_dynamic_flag_spec='${wl}-E' ++ # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. ++ # Instead, shared libraries are loaded at an image base (0x10000000 by ++ # default) and relocated if they conflict, which is a slow very memory ++ # consuming and fragmenting process. To avoid this, we pick a random, ++ # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link ++ # time. Moving up from 0x10000000 also allows more sbrk(2) space. ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' ++ archive_expsym_cmds='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' ++ ;; ++ ++ gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) ++ tmp_diet=no ++ if test "$host_os" = linux-dietlibc; then ++ case $cc_basename in ++ diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) ++ esac ++ fi ++ if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ ++ && test "$tmp_diet" = no ++ then ++ tmp_addflag=' $pic_flag' ++ tmp_sharedflag='-shared' ++ case $cc_basename,$host_cpu in ++ pgcc*) # Portland Group C compiler ++ whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' ++ tmp_addflag=' $pic_flag' ++ ;; ++ pgf77* | pgf90* | pgf95* | pgfortran*) ++ # Portland Group f77 and f90 compilers ++ whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' ++ tmp_addflag=' $pic_flag -Mnomain' ;; ++ ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 ++ tmp_addflag=' -i_dynamic' ;; ++ efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64 ++ tmp_addflag=' -i_dynamic -nofor_main' ;; ++ ifc* | ifort*) # Intel Fortran compiler ++ tmp_addflag=' -nofor_main' ;; ++ lf95*) # Lahey Fortran 8.1 ++ whole_archive_flag_spec= ++ tmp_sharedflag='--shared' ;; ++ xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) ++ tmp_sharedflag='-qmkshrobj' ++ tmp_addflag= ;; ++ nvcc*) # Cuda Compiler Driver 2.2 ++ whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' ++ compiler_needs_object=yes ++ ;; ++ esac ++ case `$CC -V 2>&1 | sed 5q` in ++ *Sun\ C*) # Sun C 5.9 ++ whole_archive_flag_spec='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' ++ compiler_needs_object=yes ++ tmp_sharedflag='-G' ;; ++ *Sun\ F*) # Sun Fortran 8.3 ++ tmp_sharedflag='-G' ;; ++ esac ++ archive_cmds='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ ++ if test "x$supports_anon_versioning" = xyes; then ++ archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ ++ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ ++ echo "local: *; };" >> $output_objdir/$libname.ver~ ++ $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' ++ fi ++ ++ case $cc_basename in ++ xlf* | bgf* | bgxlf* | mpixlf*) ++ # IBM XL Fortran 10.1 on PPC cannot create shared libs itself ++ whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' ++ hardcode_libdir_flag_spec= ++ hardcode_libdir_flag_spec_ld='-rpath $libdir' ++ archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' ++ if test "x$supports_anon_versioning" = xyes; then ++ archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ ++ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ ++ echo "local: *; };" >> $output_objdir/$libname.ver~ ++ $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' ++ fi ++ ;; ++ esac ++ else ++ ld_shlibs=no ++ fi ++ ;; ++ ++ netbsd*) ++ if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then ++ archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' ++ wlarc= ++ else ++ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ fi ++ ;; ++ ++ solaris*) ++ if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then ++ ld_shlibs=no ++ cat <<_LT_EOF 1>&2 ++ ++*** Warning: The releases 2.8.* of the GNU linker cannot reliably ++*** create shared libraries on Solaris systems. Therefore, libtool ++*** is disabling shared libraries support. We urge you to upgrade GNU ++*** binutils to release 2.9.1 or newer. Another option is to modify ++*** your PATH or compiler configuration so that the native linker is ++*** used, and then restart. ++ ++_LT_EOF ++ elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then ++ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ else ++ ld_shlibs=no ++ fi ++ ;; ++ ++ sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) ++ case `$LD -v 2>&1` in ++ *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*) ++ ld_shlibs=no ++ cat <<_LT_EOF 1>&2 ++ ++*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not ++*** reliably create shared libraries on SCO systems. Therefore, libtool ++*** is disabling shared libraries support. We urge you to upgrade GNU ++*** binutils to release 2.16.91.0.3 or newer. Another option is to modify ++*** your PATH or compiler configuration so that the native linker is ++*** used, and then restart. ++ ++_LT_EOF ++ ;; ++ *) ++ # For security reasons, it is highly recommended that you always ++ # use absolute paths for naming shared libraries, and exclude the ++ # DT_RUNPATH tag from executables and libraries. But doing so ++ # requires that you compile everything twice, which is a pain. ++ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then ++ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' ++ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ else ++ ld_shlibs=no ++ fi ++ ;; ++ esac ++ ;; ++ ++ sunos4*) ++ archive_cmds='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags' ++ wlarc= ++ hardcode_direct=yes ++ hardcode_shlibpath_var=no ++ ;; ++ ++ *) ++ if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then ++ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ else ++ ld_shlibs=no ++ fi ++ ;; ++ esac ++ ++ if test "$ld_shlibs" = no; then ++ runpath_var= ++ hardcode_libdir_flag_spec= ++ export_dynamic_flag_spec= ++ whole_archive_flag_spec= ++ fi ++ else ++ # PORTME fill in a description of your system's linker (not GNU ld) ++ case $host_os in ++ aix3*) ++ allow_undefined_flag=unsupported ++ always_export_symbols=yes ++ archive_expsym_cmds='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname' ++ # Note: this linker hardcodes the directories in LIBPATH if there ++ # are no directories specified by -L. ++ hardcode_minus_L=yes ++ if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then ++ # Neither direct hardcoding nor static linking is supported with a ++ # broken collect2. ++ hardcode_direct=unsupported ++ fi ++ ;; ++ ++ aix[4-9]*) ++ if test "$host_cpu" = ia64; then ++ # On IA64, the linker does run time linking by default, so we don't ++ # have to do anything special. ++ aix_use_runtimelinking=no ++ exp_sym_flag='-Bexport' ++ no_entry_flag="" ++ else ++ # If we're using GNU nm, then we don't want the "-C" option. ++ # -C means demangle to AIX nm, but means don't demangle with GNU nm ++ # Also, AIX nm treats weak defined symbols like other global ++ # defined symbols, whereas GNU nm marks them as "W". ++ if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then ++ export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' ++ else ++ export_symbols_cmds='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "L")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' ++ fi ++ aix_use_runtimelinking=no ++ ++ # Test if we are trying to use run time linking or normal ++ # AIX style linking. If -brtl is somewhere in LDFLAGS, we ++ # need to do runtime linking. ++ case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) ++ for ld_flag in $LDFLAGS; do ++ if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then ++ aix_use_runtimelinking=yes ++ break ++ fi ++ done ++ ;; ++ esac ++ ++ exp_sym_flag='-bexport' ++ no_entry_flag='-bnoentry' ++ fi ++ ++ # When large executables or shared objects are built, AIX ld can ++ # have problems creating the table of contents. If linking a library ++ # or program results in "error TOC overflow" add -mminimal-toc to ++ # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not ++ # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. ++ ++ archive_cmds='' ++ hardcode_direct=yes ++ hardcode_direct_absolute=yes ++ hardcode_libdir_separator=':' ++ link_all_deplibs=yes ++ file_list_spec='${wl}-f,' ++ ++ if test "$GCC" = yes; then ++ case $host_os in aix4.[012]|aix4.[012].*) ++ # We only want to do this on AIX 4.2 and lower, the check ++ # below for broken collect2 doesn't work under 4.3+ ++ collect2name=`${CC} -print-prog-name=collect2` ++ if test -f "$collect2name" && ++ strings "$collect2name" | $GREP resolve_lib_name >/dev/null ++ then ++ # We have reworked collect2 ++ : ++ else ++ # We have old collect2 ++ hardcode_direct=unsupported ++ # It fails to find uninstalled libraries when the uninstalled ++ # path is not listed in the libpath. Setting hardcode_minus_L ++ # to unsupported forces relinking ++ hardcode_minus_L=yes ++ hardcode_libdir_flag_spec='-L$libdir' ++ hardcode_libdir_separator= ++ fi ++ ;; ++ esac ++ shared_flag='-shared' ++ if test "$aix_use_runtimelinking" = yes; then ++ shared_flag="$shared_flag "'${wl}-G' ++ fi ++ else ++ # not using gcc ++ if test "$host_cpu" = ia64; then ++ # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release ++ # chokes on -Wl,-G. The following line is correct: ++ shared_flag='-G' ++ else ++ if test "$aix_use_runtimelinking" = yes; then ++ shared_flag='${wl}-G' ++ else ++ shared_flag='${wl}-bM:SRE' ++ fi ++ fi ++ fi ++ ++ export_dynamic_flag_spec='${wl}-bexpall' ++ # It seems that -bexpall does not export symbols beginning with ++ # underscore (_), so it is better to generate a list of symbols to export. ++ always_export_symbols=yes ++ if test "$aix_use_runtimelinking" = yes; then ++ # Warning - without using the other runtime loading flags (-brtl), ++ # -berok will link without error, but may produce a broken library. ++ allow_undefined_flag='-berok' ++ # Determine the default libpath from the value encoded in an ++ # empty executable. ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_link "$LINENO"; then : ++ ++lt_aix_libpath_sed=' ++ /Import File Strings/,/^$/ { ++ /^0/ { ++ s/^0 *\(.*\)$/\1/ ++ p ++ } ++ }' ++aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++# Check for a 64-bit object if we didn't find anything. ++if test -z "$aix_libpath"; then ++ aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++fi ++fi ++rm -f core conftest.err conftest.$ac_objext \ ++ conftest$ac_exeext conftest.$ac_ext ++if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi ++ ++ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" ++ archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" ++ else ++ if test "$host_cpu" = ia64; then ++ hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib' ++ allow_undefined_flag="-z nodefs" ++ archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" ++ else ++ # Determine the default libpath from the value encoded in an ++ # empty executable. ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_link "$LINENO"; then : ++ ++lt_aix_libpath_sed=' ++ /Import File Strings/,/^$/ { ++ /^0/ { ++ s/^0 *\(.*\)$/\1/ ++ p ++ } ++ }' ++aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++# Check for a 64-bit object if we didn't find anything. ++if test -z "$aix_libpath"; then ++ aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++fi ++fi ++rm -f core conftest.err conftest.$ac_objext \ ++ conftest$ac_exeext conftest.$ac_ext ++if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi ++ ++ hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" ++ # Warning - without using the other run time loading flags, ++ # -berok will link without error, but may produce a broken library. ++ no_undefined_flag=' ${wl}-bernotok' ++ allow_undefined_flag=' ${wl}-berok' ++ if test "$with_gnu_ld" = yes; then ++ # We only use this code for GNU lds that support --whole-archive. ++ whole_archive_flag_spec='${wl}--whole-archive$convenience ${wl}--no-whole-archive' ++ else ++ # Exported symbols can be pulled into shared objects from archives ++ whole_archive_flag_spec='$convenience' ++ fi ++ archive_cmds_need_lc=yes ++ # This is similar to how AIX traditionally builds its shared libraries. ++ archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' ++ fi ++ fi ++ ;; ++ ++ amigaos*) ++ case $host_cpu in ++ powerpc) ++ # see comment about AmigaOS4 .so support ++ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='' ++ ;; ++ m68k) ++ archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' ++ hardcode_libdir_flag_spec='-L$libdir' ++ hardcode_minus_L=yes ++ ;; ++ esac ++ ;; ++ ++ bsdi[45]*) ++ export_dynamic_flag_spec=-rdynamic ++ ;; ++ ++ cygwin* | mingw* | pw32* | cegcc*) ++ # When not using gcc, we currently assume that we are using ++ # Microsoft Visual C++. ++ # hardcode_libdir_flag_spec is actually meaningless, as there is ++ # no search path for DLLs. ++ hardcode_libdir_flag_spec=' ' ++ allow_undefined_flag=unsupported ++ # Tell ltmain to make .lib files, not .a files. ++ libext=lib ++ # Tell ltmain to make .dll files, not .so files. ++ shrext_cmds=".dll" ++ # FIXME: Setting linknames here is a bad hack. ++ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' ++ # The linker will automatically build a .lib file if we build a DLL. ++ old_archive_from_new_cmds='true' ++ # FIXME: Should let the user specify the lib program. ++ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' ++ fix_srcfile_path='`cygpath -w "$srcfile"`' ++ enable_shared_with_static_runtimes=yes ++ ;; ++ ++ darwin* | rhapsody*) ++ ++ ++ archive_cmds_need_lc=no ++ hardcode_direct=no ++ hardcode_automatic=yes ++ hardcode_shlibpath_var=unsupported ++ if test "$lt_cv_ld_force_load" = "yes"; then ++ whole_archive_flag_spec='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' ++ else ++ whole_archive_flag_spec='' ++ fi ++ link_all_deplibs=yes ++ allow_undefined_flag="$_lt_dar_allow_undefined" ++ case $cc_basename in ++ ifort*) _lt_dar_can_shared=yes ;; ++ *) _lt_dar_can_shared=$GCC ;; ++ esac ++ if test "$_lt_dar_can_shared" = "yes"; then ++ output_verbose_link_cmd=func_echo_all ++ archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" ++ module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" ++ archive_expsym_cmds="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" ++ module_expsym_cmds="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" ++ ++ else ++ ld_shlibs=no ++ fi ++ ++ ;; ++ ++ dgux*) ++ archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' ++ hardcode_libdir_flag_spec='-L$libdir' ++ hardcode_shlibpath_var=no ++ ;; ++ ++ # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor ++ # support. Future versions do this automatically, but an explicit c++rt0.o ++ # does not break anything, and helps significantly (at the cost of a little ++ # extra space). ++ freebsd2.2*) ++ archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o' ++ hardcode_libdir_flag_spec='-R$libdir' ++ hardcode_direct=yes ++ hardcode_shlibpath_var=no ++ ;; ++ ++ # Unfortunately, older versions of FreeBSD 2 do not have this feature. ++ freebsd2.*) ++ archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' ++ hardcode_direct=yes ++ hardcode_minus_L=yes ++ hardcode_shlibpath_var=no ++ ;; ++ ++ # FreeBSD 3 and greater uses gcc -shared to do shared libraries. ++ freebsd* | dragonfly*) ++ archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' ++ hardcode_libdir_flag_spec='-R$libdir' ++ hardcode_direct=yes ++ hardcode_shlibpath_var=no ++ ;; ++ ++ hpux9*) ++ if test "$GCC" = yes; then ++ archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' ++ else ++ archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' ++ fi ++ hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' ++ hardcode_libdir_separator=: ++ hardcode_direct=yes ++ ++ # hardcode_minus_L: Not really in the search PATH, ++ # but as the default location of the library. ++ hardcode_minus_L=yes ++ export_dynamic_flag_spec='${wl}-E' ++ ;; ++ ++ hpux10*) ++ if test "$GCC" = yes && test "$with_gnu_ld" = no; then ++ archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ++ else ++ archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' ++ fi ++ if test "$with_gnu_ld" = no; then ++ hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' ++ hardcode_libdir_flag_spec_ld='+b $libdir' ++ hardcode_libdir_separator=: ++ hardcode_direct=yes ++ hardcode_direct_absolute=yes ++ export_dynamic_flag_spec='${wl}-E' ++ # hardcode_minus_L: Not really in the search PATH, ++ # but as the default location of the library. ++ hardcode_minus_L=yes ++ fi ++ ;; ++ ++ hpux11*) ++ if test "$GCC" = yes && test "$with_gnu_ld" = no; then ++ case $host_cpu in ++ hppa*64*) ++ archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ++ ;; ++ ia64*) ++ archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ++ ;; ++ *) ++ archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ++ ;; ++ esac ++ else ++ case $host_cpu in ++ hppa*64*) ++ archive_cmds='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ++ ;; ++ ia64*) ++ archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ++ ;; ++ *) ++ ++ # Older versions of the 11.00 compiler do not understand -b yet ++ # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does) ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $CC understands -b" >&5 ++$as_echo_n "checking if $CC understands -b... " >&6; } ++if test "${lt_cv_prog_compiler__b+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_prog_compiler__b=no ++ save_LDFLAGS="$LDFLAGS" ++ LDFLAGS="$LDFLAGS -b" ++ echo "$lt_simple_link_test_code" > conftest.$ac_ext ++ if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then ++ # The linker can only warn and ignore the option if not recognized ++ # So say no if there are warnings ++ if test -s conftest.err; then ++ # Append any errors to the config.log. ++ cat conftest.err 1>&5 ++ $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp ++ $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 ++ if diff conftest.exp conftest.er2 >/dev/null; then ++ lt_cv_prog_compiler__b=yes ++ fi ++ else ++ lt_cv_prog_compiler__b=yes ++ fi ++ fi ++ $RM -r conftest* ++ LDFLAGS="$save_LDFLAGS" ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler__b" >&5 ++$as_echo "$lt_cv_prog_compiler__b" >&6; } ++ ++if test x"$lt_cv_prog_compiler__b" = xyes; then ++ archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ++else ++ archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' ++fi ++ ++ ;; ++ esac ++ fi ++ if test "$with_gnu_ld" = no; then ++ hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' ++ hardcode_libdir_separator=: ++ ++ case $host_cpu in ++ hppa*64*|ia64*) ++ hardcode_direct=no ++ hardcode_shlibpath_var=no ++ ;; ++ *) ++ hardcode_direct=yes ++ hardcode_direct_absolute=yes ++ export_dynamic_flag_spec='${wl}-E' ++ ++ # hardcode_minus_L: Not really in the search PATH, ++ # but as the default location of the library. ++ hardcode_minus_L=yes ++ ;; ++ esac ++ fi ++ ;; ++ ++ irix5* | irix6* | nonstopux*) ++ if test "$GCC" = yes; then ++ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ # Try to use the -exported_symbol ld option, if it does not ++ # work, assume that -exports_file does not work either and ++ # implicitly export all symbols. ++ save_LDFLAGS="$LDFLAGS" ++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++int foo(void) {} ++_ACEOF ++if ac_fn_c_try_link "$LINENO"; then : ++ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' ++ ++fi ++rm -f core conftest.err conftest.$ac_objext \ ++ conftest$ac_exeext conftest.$ac_ext ++ LDFLAGS="$save_LDFLAGS" ++ else ++ archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' ++ archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' ++ fi ++ archive_cmds_need_lc='no' ++ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' ++ hardcode_libdir_separator=: ++ inherit_rpath=yes ++ link_all_deplibs=yes ++ ;; ++ ++ netbsd*) ++ if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then ++ archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out ++ else ++ archive_cmds='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF ++ fi ++ hardcode_libdir_flag_spec='-R$libdir' ++ hardcode_direct=yes ++ hardcode_shlibpath_var=no ++ ;; ++ ++ newsos6) ++ archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' ++ hardcode_direct=yes ++ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' ++ hardcode_libdir_separator=: ++ hardcode_shlibpath_var=no ++ ;; ++ ++ *nto* | *qnx*) ++ ;; ++ ++ openbsd*) ++ if test -f /usr/libexec/ld.so; then ++ hardcode_direct=yes ++ hardcode_shlibpath_var=no ++ hardcode_direct_absolute=yes ++ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then ++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' ++ archive_expsym_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols' ++ hardcode_libdir_flag_spec='${wl}-rpath,$libdir' ++ export_dynamic_flag_spec='${wl}-E' ++ else ++ case $host_os in ++ openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) ++ archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' ++ hardcode_libdir_flag_spec='-R$libdir' ++ ;; ++ *) ++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' ++ hardcode_libdir_flag_spec='${wl}-rpath,$libdir' ++ ;; ++ esac ++ fi ++ else ++ ld_shlibs=no ++ fi ++ ;; ++ ++ os2*) ++ hardcode_libdir_flag_spec='-L$libdir' ++ hardcode_minus_L=yes ++ allow_undefined_flag=unsupported ++ archive_cmds='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' ++ old_archive_from_new_cmds='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' ++ ;; ++ ++ osf3*) ++ if test "$GCC" = yes; then ++ allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' ++ archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ else ++ allow_undefined_flag=' -expect_unresolved \*' ++ archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' ++ fi ++ archive_cmds_need_lc='no' ++ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' ++ hardcode_libdir_separator=: ++ ;; ++ ++ osf4* | osf5*) # as osf3* with the addition of -msym flag ++ if test "$GCC" = yes; then ++ allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' ++ archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' ++ else ++ allow_undefined_flag=' -expect_unresolved \*' ++ archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' ++ archive_expsym_cmds='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ ++ $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp' ++ ++ # Both c and cxx compiler support -rpath directly ++ hardcode_libdir_flag_spec='-rpath $libdir' ++ fi ++ archive_cmds_need_lc='no' ++ hardcode_libdir_separator=: ++ ;; ++ ++ solaris*) ++ no_undefined_flag=' -z defs' ++ if test "$GCC" = yes; then ++ wlarc='${wl}' ++ archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ++ archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ ++ $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' ++ else ++ case `$CC -V 2>&1` in ++ *"Compilers 5.0"*) ++ wlarc='' ++ archive_cmds='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' ++ archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ ++ $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' ++ ;; ++ *) ++ wlarc='${wl}' ++ archive_cmds='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags' ++ archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ ++ $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' ++ ;; ++ esac ++ fi ++ hardcode_libdir_flag_spec='-R$libdir' ++ hardcode_shlibpath_var=no ++ case $host_os in ++ solaris2.[0-5] | solaris2.[0-5].*) ;; ++ *) ++ # The compiler driver will combine and reorder linker options, ++ # but understands `-z linker_flag'. GCC discards it without `$wl', ++ # but is careful enough not to reorder. ++ # Supported since Solaris 2.6 (maybe 2.5.1?) ++ if test "$GCC" = yes; then ++ whole_archive_flag_spec='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' ++ else ++ whole_archive_flag_spec='-z allextract$convenience -z defaultextract' ++ fi ++ ;; ++ esac ++ link_all_deplibs=yes ++ ;; ++ ++ sunos4*) ++ if test "x$host_vendor" = xsequent; then ++ # Use $CC to link under sequent, because it throws in some extra .o ++ # files that make .init and .fini sections work. ++ archive_cmds='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' ++ else ++ archive_cmds='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' ++ fi ++ hardcode_libdir_flag_spec='-L$libdir' ++ hardcode_direct=yes ++ hardcode_minus_L=yes ++ hardcode_shlibpath_var=no ++ ;; ++ ++ sysv4) ++ case $host_vendor in ++ sni) ++ archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' ++ hardcode_direct=yes # is this really true??? ++ ;; ++ siemens) ++ ## LD is ld it makes a PLAMLIB ++ ## CC just makes a GrossModule. ++ archive_cmds='$LD -G -o $lib $libobjs $deplibs $linker_flags' ++ reload_cmds='$CC -r -o $output$reload_objs' ++ hardcode_direct=no ++ ;; ++ motorola) ++ archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' ++ hardcode_direct=no #Motorola manual says yes, but my tests say they lie ++ ;; ++ esac ++ runpath_var='LD_RUN_PATH' ++ hardcode_shlibpath_var=no ++ ;; ++ ++ sysv4.3*) ++ archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' ++ hardcode_shlibpath_var=no ++ export_dynamic_flag_spec='-Bexport' ++ ;; ++ ++ sysv4*MP*) ++ if test -d /usr/nec; then ++ archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' ++ hardcode_shlibpath_var=no ++ runpath_var=LD_RUN_PATH ++ hardcode_runpath_var=yes ++ ld_shlibs=yes ++ fi ++ ;; ++ ++ sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) ++ no_undefined_flag='${wl}-z,text' ++ archive_cmds_need_lc=no ++ hardcode_shlibpath_var=no ++ runpath_var='LD_RUN_PATH' ++ ++ if test "$GCC" = yes; then ++ archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ++ archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ++ else ++ archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ++ archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ++ fi ++ ;; ++ ++ sysv5* | sco3.2v5* | sco5v6*) ++ # Note: We can NOT use -z defs as we might desire, because we do not ++ # link with -lc, and that would cause any symbols used from libc to ++ # always be unresolved, which means just about no library would ++ # ever link correctly. If we're not using GNU ld we use -z text ++ # though, which does catch some bad symbols but isn't as heavy-handed ++ # as -z defs. ++ no_undefined_flag='${wl}-z,text' ++ allow_undefined_flag='${wl}-z,nodefs' ++ archive_cmds_need_lc=no ++ hardcode_shlibpath_var=no ++ hardcode_libdir_flag_spec='${wl}-R,$libdir' ++ hardcode_libdir_separator=':' ++ link_all_deplibs=yes ++ export_dynamic_flag_spec='${wl}-Bexport' ++ runpath_var='LD_RUN_PATH' ++ ++ if test "$GCC" = yes; then ++ archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ++ archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ++ else ++ archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ++ archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ++ fi ++ ;; ++ ++ uts4*) ++ archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' ++ hardcode_libdir_flag_spec='-L$libdir' ++ hardcode_shlibpath_var=no ++ ;; ++ ++ *) ++ ld_shlibs=no ++ ;; ++ esac ++ ++ if test x$host_vendor = xsni; then ++ case $host in ++ sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) ++ export_dynamic_flag_spec='${wl}-Blargedynsym' ++ ;; ++ esac ++ fi ++ fi ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs" >&5 ++$as_echo "$ld_shlibs" >&6; } ++test "$ld_shlibs" = no && can_build_shared=no ++ ++with_gnu_ld=$with_gnu_ld ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++# ++# Do we need to explicitly link libc? ++# ++case "x$archive_cmds_need_lc" in ++x|xyes) ++ # Assume -lc should be added ++ archive_cmds_need_lc=yes ++ ++ if test "$enable_shared" = yes && test "$GCC" = yes; then ++ case $archive_cmds in ++ *'~'*) ++ # FIXME: we may have to deal with multi-command sequences. ++ ;; ++ '$CC '*) ++ # Test whether the compiler implicitly links with -lc since on some ++ # systems, -lgcc has to come before -lc. If gcc already passes -lc ++ # to ld, don't add -lc before -lgcc. ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5 ++$as_echo_n "checking whether -lc should be explicitly linked in... " >&6; } ++if test "${lt_cv_archive_cmds_need_lc+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ $RM conftest* ++ echo "$lt_simple_compile_test_code" > conftest.$ac_ext ++ ++ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 ++ (eval $ac_compile) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } 2>conftest.err; then ++ soname=conftest ++ lib=conftest ++ libobjs=conftest.$ac_objext ++ deplibs= ++ wl=$lt_prog_compiler_wl ++ pic_flag=$lt_prog_compiler_pic ++ compiler_flags=-v ++ linker_flags=-v ++ verstring= ++ output_objdir=. ++ libname=conftest ++ lt_save_allow_undefined_flag=$allow_undefined_flag ++ allow_undefined_flag= ++ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5 ++ (eval $archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } ++ then ++ lt_cv_archive_cmds_need_lc=no ++ else ++ lt_cv_archive_cmds_need_lc=yes ++ fi ++ allow_undefined_flag=$lt_save_allow_undefined_flag ++ else ++ cat conftest.err 1>&5 ++ fi ++ $RM conftest* ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc" >&5 ++$as_echo "$lt_cv_archive_cmds_need_lc" >&6; } ++ archive_cmds_need_lc=$lt_cv_archive_cmds_need_lc ++ ;; ++ esac ++ fi ++ ;; ++esac ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5 ++$as_echo_n "checking dynamic linker characteristics... " >&6; } ++ ++if test "$GCC" = yes; then ++ case $host_os in ++ darwin*) lt_awk_arg="/^libraries:/,/LR/" ;; ++ *) lt_awk_arg="/^libraries:/" ;; ++ esac ++ case $host_os in ++ mingw* | cegcc*) lt_sed_strip_eq="s,=\([A-Za-z]:\),\1,g" ;; ++ *) lt_sed_strip_eq="s,=/,/,g" ;; ++ esac ++ lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq` ++ case $lt_search_path_spec in ++ *\;*) ++ # if the path contains ";" then we assume it to be the separator ++ # otherwise default to the standard path separator (i.e. ":") - it is ++ # assumed that no part of a normal pathname contains ";" but that should ++ # okay in the real world where ";" in dirpaths is itself problematic. ++ lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'` ++ ;; ++ *) ++ lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"` ++ ;; ++ esac ++ # Ok, now we have the path, separated by spaces, we can step through it ++ # and add multilib dir if necessary. ++ lt_tmp_lt_search_path_spec= ++ lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` ++ for lt_sys_path in $lt_search_path_spec; do ++ if test -d "$lt_sys_path/$lt_multi_os_dir"; then ++ lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir" ++ else ++ test -d "$lt_sys_path" && \ ++ lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path" ++ fi ++ done ++ lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk ' ++BEGIN {RS=" "; FS="/|\n";} { ++ lt_foo=""; ++ lt_count=0; ++ for (lt_i = NF; lt_i > 0; lt_i--) { ++ if ($lt_i != "" && $lt_i != ".") { ++ if ($lt_i == "..") { ++ lt_count++; ++ } else { ++ if (lt_count == 0) { ++ lt_foo="/" $lt_i lt_foo; ++ } else { ++ lt_count--; ++ } ++ } ++ } ++ } ++ if (lt_foo != "") { lt_freq[lt_foo]++; } ++ if (lt_freq[lt_foo] == 1) { print lt_foo; } ++}'` ++ # AWK program above erroneously prepends '/' to C:/dos/paths ++ # for these hosts. ++ case $host_os in ++ mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\ ++ $SED 's,/\([A-Za-z]:\),\1,g'` ;; ++ esac ++ sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP` ++else ++ sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" ++fi ++library_names_spec= ++libname_spec='lib$name' ++soname_spec= ++shrext_cmds=".so" ++postinstall_cmds= ++postuninstall_cmds= ++finish_cmds= ++finish_eval= ++shlibpath_var= ++shlibpath_overrides_runpath=unknown ++version_type=none ++dynamic_linker="$host_os ld.so" ++sys_lib_dlsearch_path_spec="/lib /usr/lib" ++need_lib_prefix=unknown ++hardcode_into_libs=no ++ ++# when you set need_version to no, make sure it does not cause -set_version ++# flags to be left without arguments ++need_version=unknown ++ ++case $host_os in ++aix3*) ++ version_type=linux ++ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' ++ shlibpath_var=LIBPATH ++ ++ # AIX 3 has no versioning support, so we append a major version to the name. ++ soname_spec='${libname}${release}${shared_ext}$major' ++ ;; ++ ++aix[4-9]*) ++ version_type=linux ++ need_lib_prefix=no ++ need_version=no ++ hardcode_into_libs=yes ++ if test "$host_cpu" = ia64; then ++ # AIX 5 supports IA64 ++ library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' ++ shlibpath_var=LD_LIBRARY_PATH ++ else ++ # With GCC up to 2.95.x, collect2 would create an import file ++ # for dependence libraries. The import file would start with ++ # the line `#! .'. This would cause the generated library to ++ # depend on `.', always an invalid library. This was fixed in ++ # development snapshots of GCC prior to 3.0. ++ case $host_os in ++ aix4 | aix4.[01] | aix4.[01].*) ++ if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' ++ echo ' yes ' ++ echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then ++ : ++ else ++ can_build_shared=no ++ fi ++ ;; ++ esac ++ # AIX (on Power*) has no versioning support, so currently we can not hardcode correct ++ # soname into executable. Probably we can add versioning support to ++ # collect2, so additional links can be useful in future. ++ if test "$aix_use_runtimelinking" = yes; then ++ # If using run time linking (on AIX 4.2 or later) use lib.so ++ # instead of lib.a to let people know that these are not ++ # typical AIX shared libraries. ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' ++ else ++ # We preserve .a as extension for shared libraries through AIX4.2 ++ # and later when we are not doing run time linking. ++ library_names_spec='${libname}${release}.a $libname.a' ++ soname_spec='${libname}${release}${shared_ext}$major' ++ fi ++ shlibpath_var=LIBPATH ++ fi ++ ;; ++ ++amigaos*) ++ case $host_cpu in ++ powerpc) ++ # Since July 2007 AmigaOS4 officially supports .so libraries. ++ # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' ++ ;; ++ m68k) ++ library_names_spec='$libname.ixlibrary $libname.a' ++ # Create ${libname}_ixlibrary.a entries in /sys/libs. ++ finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' ++ ;; ++ esac ++ ;; ++ ++beos*) ++ library_names_spec='${libname}${shared_ext}' ++ dynamic_linker="$host_os ld.so" ++ shlibpath_var=LIBRARY_PATH ++ ;; ++ ++bsdi[45]*) ++ version_type=linux ++ need_version=no ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' ++ soname_spec='${libname}${release}${shared_ext}$major' ++ finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' ++ shlibpath_var=LD_LIBRARY_PATH ++ sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" ++ sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" ++ # the default ld.so.conf also contains /usr/contrib/lib and ++ # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow ++ # libtool to hard-code these into programs ++ ;; ++ ++cygwin* | mingw* | pw32* | cegcc*) ++ version_type=windows ++ shrext_cmds=".dll" ++ need_version=no ++ need_lib_prefix=no ++ ++ case $GCC,$host_os in ++ yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) ++ library_names_spec='$libname.dll.a' ++ # DLL is installed to $(libdir)/../bin by postinstall_cmds ++ postinstall_cmds='base_file=`basename \${file}`~ ++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ ++ dldir=$destdir/`dirname \$dlpath`~ ++ test -d \$dldir || mkdir -p \$dldir~ ++ $install_prog $dir/$dlname \$dldir/$dlname~ ++ chmod a+x \$dldir/$dlname~ ++ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then ++ eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; ++ fi' ++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ ++ dlpath=$dir/\$dldll~ ++ $RM \$dlpath' ++ shlibpath_overrides_runpath=yes ++ ++ case $host_os in ++ cygwin*) ++ # Cygwin DLLs use 'cyg' prefix rather than 'lib' ++ soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' ++ ++ sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api" ++ ;; ++ mingw* | cegcc*) ++ # MinGW DLLs use traditional 'lib' prefix ++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' ++ ;; ++ pw32*) ++ # pw32 DLLs use 'pw' prefix rather than 'lib' ++ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' ++ ;; ++ esac ++ ;; ++ ++ *) ++ library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' ++ ;; ++ esac ++ dynamic_linker='Win32 ld.exe' ++ # FIXME: first we should search . and the directory the executable is in ++ shlibpath_var=PATH ++ ;; ++ ++darwin* | rhapsody*) ++ dynamic_linker="$host_os dyld" ++ version_type=darwin ++ need_lib_prefix=no ++ need_version=no ++ library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' ++ soname_spec='${libname}${release}${major}$shared_ext' ++ shlibpath_overrides_runpath=yes ++ shlibpath_var=DYLD_LIBRARY_PATH ++ shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' ++ ++ sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib" ++ sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' ++ ;; ++ ++dgux*) ++ version_type=linux ++ need_lib_prefix=no ++ need_version=no ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' ++ soname_spec='${libname}${release}${shared_ext}$major' ++ shlibpath_var=LD_LIBRARY_PATH ++ ;; ++ ++freebsd* | dragonfly*) ++ # DragonFly does not have aout. When/if they implement a new ++ # versioning mechanism, adjust this. ++ if test -x /usr/bin/objformat; then ++ objformat=`/usr/bin/objformat` ++ else ++ case $host_os in ++ freebsd[23].*) objformat=aout ;; ++ *) objformat=elf ;; ++ esac ++ fi ++ version_type=freebsd-$objformat ++ case $version_type in ++ freebsd-elf*) ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' ++ need_version=no ++ need_lib_prefix=no ++ ;; ++ freebsd-*) ++ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' ++ need_version=yes ++ ;; ++ esac ++ shlibpath_var=LD_LIBRARY_PATH ++ case $host_os in ++ freebsd2.*) ++ shlibpath_overrides_runpath=yes ++ ;; ++ freebsd3.[01]* | freebsdelf3.[01]*) ++ shlibpath_overrides_runpath=yes ++ hardcode_into_libs=yes ++ ;; ++ freebsd3.[2-9]* | freebsdelf3.[2-9]* | \ ++ freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1) ++ shlibpath_overrides_runpath=no ++ hardcode_into_libs=yes ++ ;; ++ *) # from 4.6 on, and DragonFly ++ shlibpath_overrides_runpath=yes ++ hardcode_into_libs=yes ++ ;; ++ esac ++ ;; ++ ++gnu*) ++ version_type=linux ++ need_lib_prefix=no ++ need_version=no ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' ++ soname_spec='${libname}${release}${shared_ext}$major' ++ shlibpath_var=LD_LIBRARY_PATH ++ hardcode_into_libs=yes ++ ;; ++ ++haiku*) ++ version_type=linux ++ need_lib_prefix=no ++ need_version=no ++ dynamic_linker="$host_os runtime_loader" ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' ++ soname_spec='${libname}${release}${shared_ext}$major' ++ shlibpath_var=LIBRARY_PATH ++ shlibpath_overrides_runpath=yes ++ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/beos/system/lib' ++ hardcode_into_libs=yes ++ ;; ++ ++hpux9* | hpux10* | hpux11*) ++ # Give a soname corresponding to the major version so that dld.sl refuses to ++ # link against other versions. ++ version_type=sunos ++ need_lib_prefix=no ++ need_version=no ++ case $host_cpu in ++ ia64*) ++ shrext_cmds='.so' ++ hardcode_into_libs=yes ++ dynamic_linker="$host_os dld.so" ++ shlibpath_var=LD_LIBRARY_PATH ++ shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' ++ soname_spec='${libname}${release}${shared_ext}$major' ++ if test "X$HPUX_IA64_MODE" = X32; then ++ sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" ++ else ++ sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" ++ fi ++ sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ++ ;; ++ hppa*64*) ++ shrext_cmds='.sl' ++ hardcode_into_libs=yes ++ dynamic_linker="$host_os dld.sl" ++ shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH ++ shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' ++ soname_spec='${libname}${release}${shared_ext}$major' ++ sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" ++ sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ++ ;; ++ *) ++ shrext_cmds='.sl' ++ dynamic_linker="$host_os dld.sl" ++ shlibpath_var=SHLIB_PATH ++ shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' ++ soname_spec='${libname}${release}${shared_ext}$major' ++ ;; ++ esac ++ # HP-UX runs *really* slowly unless shared libraries are mode 555, ... ++ postinstall_cmds='chmod 555 $lib' ++ # or fails outright, so override atomically: ++ install_override_mode=555 ++ ;; ++ ++interix[3-9]*) ++ version_type=linux ++ need_lib_prefix=no ++ need_version=no ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' ++ soname_spec='${libname}${release}${shared_ext}$major' ++ dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' ++ shlibpath_var=LD_LIBRARY_PATH ++ shlibpath_overrides_runpath=no ++ hardcode_into_libs=yes ++ ;; ++ ++irix5* | irix6* | nonstopux*) ++ case $host_os in ++ nonstopux*) version_type=nonstopux ;; ++ *) ++ if test "$lt_cv_prog_gnu_ld" = yes; then ++ version_type=linux ++ else ++ version_type=irix ++ fi ;; ++ esac ++ need_lib_prefix=no ++ need_version=no ++ soname_spec='${libname}${release}${shared_ext}$major' ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' ++ case $host_os in ++ irix5* | nonstopux*) ++ libsuff= shlibsuff= ++ ;; ++ *) ++ case $LD in # libtool.m4 will add one of these switches to LD ++ *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") ++ libsuff= shlibsuff= libmagic=32-bit;; ++ *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") ++ libsuff=32 shlibsuff=N32 libmagic=N32;; ++ *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") ++ libsuff=64 shlibsuff=64 libmagic=64-bit;; ++ *) libsuff= shlibsuff= libmagic=never-match;; ++ esac ++ ;; ++ esac ++ shlibpath_var=LD_LIBRARY${shlibsuff}_PATH ++ shlibpath_overrides_runpath=no ++ sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" ++ sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" ++ hardcode_into_libs=yes ++ ;; ++ ++# No shared lib support for Linux oldld, aout, or coff. ++linux*oldld* | linux*aout* | linux*coff*) ++ dynamic_linker=no ++ ;; ++ ++# This must be Linux ELF. ++linux* | k*bsd*-gnu | kopensolaris*-gnu) ++ version_type=linux ++ need_lib_prefix=no ++ need_version=no ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' ++ soname_spec='${libname}${release}${shared_ext}$major' ++ finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' ++ shlibpath_var=LD_LIBRARY_PATH ++ shlibpath_overrides_runpath=no ++ ++ # Some binutils ld are patched to set DT_RUNPATH ++ if test "${lt_cv_shlibpath_overrides_runpath+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_shlibpath_overrides_runpath=no ++ save_LDFLAGS=$LDFLAGS ++ save_libdir=$libdir ++ eval "libdir=/foo; wl=\"$lt_prog_compiler_wl\"; \ ++ LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec\"" ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_link "$LINENO"; then : ++ if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then : ++ lt_cv_shlibpath_overrides_runpath=yes ++fi ++fi ++rm -f core conftest.err conftest.$ac_objext \ ++ conftest$ac_exeext conftest.$ac_ext ++ LDFLAGS=$save_LDFLAGS ++ libdir=$save_libdir ++ ++fi ++ ++ shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath ++ ++ # This implies no fast_install, which is unacceptable. ++ # Some rework will be needed to allow for fast_install ++ # before this can be enabled. ++ hardcode_into_libs=yes ++ ++ # Append ld.so.conf contents to the search path ++ if test -f /etc/ld.so.conf; then ++ lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` ++ sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" ++ fi ++ ++ # We used to test for /lib/ld.so.1 and disable shared libraries on ++ # powerpc, because MkLinux only supported shared libraries with the ++ # GNU dynamic linker. Since this was broken with cross compilers, ++ # most powerpc-linux boxes support dynamic linking these days and ++ # people can always --disable-shared, the test was removed, and we ++ # assume the GNU/Linux dynamic linker is in use. ++ dynamic_linker='GNU/Linux ld.so' ++ ;; ++ ++netbsd*) ++ version_type=sunos ++ need_lib_prefix=no ++ need_version=no ++ if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' ++ finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' ++ dynamic_linker='NetBSD (a.out) ld.so' ++ else ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' ++ soname_spec='${libname}${release}${shared_ext}$major' ++ dynamic_linker='NetBSD ld.elf_so' ++ fi ++ shlibpath_var=LD_LIBRARY_PATH ++ shlibpath_overrides_runpath=yes ++ hardcode_into_libs=yes ++ ;; ++ ++newsos6) ++ version_type=linux ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' ++ shlibpath_var=LD_LIBRARY_PATH ++ shlibpath_overrides_runpath=yes ++ ;; ++ ++*nto* | *qnx*) ++ version_type=qnx ++ need_lib_prefix=no ++ need_version=no ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' ++ soname_spec='${libname}${release}${shared_ext}$major' ++ shlibpath_var=LD_LIBRARY_PATH ++ shlibpath_overrides_runpath=no ++ hardcode_into_libs=yes ++ dynamic_linker='ldqnx.so' ++ ;; ++ ++openbsd*) ++ version_type=sunos ++ sys_lib_dlsearch_path_spec="/usr/lib" ++ need_lib_prefix=no ++ # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. ++ case $host_os in ++ openbsd3.3 | openbsd3.3.*) need_version=yes ;; ++ *) need_version=no ;; ++ esac ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' ++ finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' ++ shlibpath_var=LD_LIBRARY_PATH ++ if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then ++ case $host_os in ++ openbsd2.[89] | openbsd2.[89].*) ++ shlibpath_overrides_runpath=no ++ ;; ++ *) ++ shlibpath_overrides_runpath=yes ++ ;; ++ esac ++ else ++ shlibpath_overrides_runpath=yes ++ fi ++ ;; ++ ++os2*) ++ libname_spec='$name' ++ shrext_cmds=".dll" ++ need_lib_prefix=no ++ library_names_spec='$libname${shared_ext} $libname.a' ++ dynamic_linker='OS/2 ld.exe' ++ shlibpath_var=LIBPATH ++ ;; ++ ++osf3* | osf4* | osf5*) ++ version_type=osf ++ need_lib_prefix=no ++ need_version=no ++ soname_spec='${libname}${release}${shared_ext}$major' ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' ++ shlibpath_var=LD_LIBRARY_PATH ++ sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" ++ sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" ++ ;; ++ ++rdos*) ++ dynamic_linker=no ++ ;; ++ ++solaris*) ++ version_type=linux ++ need_lib_prefix=no ++ need_version=no ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' ++ soname_spec='${libname}${release}${shared_ext}$major' ++ shlibpath_var=LD_LIBRARY_PATH ++ shlibpath_overrides_runpath=yes ++ hardcode_into_libs=yes ++ # ldd complains unless libraries are executable ++ postinstall_cmds='chmod +x $lib' ++ ;; ++ ++sunos4*) ++ version_type=sunos ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' ++ finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' ++ shlibpath_var=LD_LIBRARY_PATH ++ shlibpath_overrides_runpath=yes ++ if test "$with_gnu_ld" = yes; then ++ need_lib_prefix=no ++ fi ++ need_version=yes ++ ;; ++ ++sysv4 | sysv4.3*) ++ version_type=linux ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' ++ soname_spec='${libname}${release}${shared_ext}$major' ++ shlibpath_var=LD_LIBRARY_PATH ++ case $host_vendor in ++ sni) ++ shlibpath_overrides_runpath=no ++ need_lib_prefix=no ++ runpath_var=LD_RUN_PATH ++ ;; ++ siemens) ++ need_lib_prefix=no ++ ;; ++ motorola) ++ need_lib_prefix=no ++ need_version=no ++ shlibpath_overrides_runpath=no ++ sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' ++ ;; ++ esac ++ ;; ++ ++sysv4*MP*) ++ if test -d /usr/nec ;then ++ version_type=linux ++ library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' ++ soname_spec='$libname${shared_ext}.$major' ++ shlibpath_var=LD_LIBRARY_PATH ++ fi ++ ;; ++ ++sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) ++ version_type=freebsd-elf ++ need_lib_prefix=no ++ need_version=no ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' ++ soname_spec='${libname}${release}${shared_ext}$major' ++ shlibpath_var=LD_LIBRARY_PATH ++ shlibpath_overrides_runpath=yes ++ hardcode_into_libs=yes ++ if test "$with_gnu_ld" = yes; then ++ sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' ++ else ++ sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' ++ case $host_os in ++ sco3.2v5*) ++ sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" ++ ;; ++ esac ++ fi ++ sys_lib_dlsearch_path_spec='/usr/lib' ++ ;; ++ ++tpf*) ++ # TPF is a cross-target only. Preferred cross-host = GNU/Linux. ++ version_type=linux ++ need_lib_prefix=no ++ need_version=no ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' ++ shlibpath_var=LD_LIBRARY_PATH ++ shlibpath_overrides_runpath=no ++ hardcode_into_libs=yes ++ ;; ++ ++uts4*) ++ version_type=linux ++ library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' ++ soname_spec='${libname}${release}${shared_ext}$major' ++ shlibpath_var=LD_LIBRARY_PATH ++ ;; ++ ++*) ++ dynamic_linker=no ++ ;; ++esac ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 ++$as_echo "$dynamic_linker" >&6; } ++test "$dynamic_linker" = no && can_build_shared=no ++ ++variables_saved_for_relink="PATH $shlibpath_var $runpath_var" ++if test "$GCC" = yes; then ++ variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" ++fi ++ ++if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then ++ sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" ++fi ++if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then ++ sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" ++fi ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5 ++$as_echo_n "checking how to hardcode library paths into programs... " >&6; } ++hardcode_action= ++if test -n "$hardcode_libdir_flag_spec" || ++ test -n "$runpath_var" || ++ test "X$hardcode_automatic" = "Xyes" ; then ++ ++ # We can hardcode non-existent directories. ++ if test "$hardcode_direct" != no && ++ # If the only mechanism to avoid hardcoding is shlibpath_var, we ++ # have to relink, otherwise we might link with an installed library ++ # when we should be linking with a yet-to-be-installed one ++ ## test "$_LT_TAGVAR(hardcode_shlibpath_var, )" != no && ++ test "$hardcode_minus_L" != no; then ++ # Linking always hardcodes the temporary library directory. ++ hardcode_action=relink ++ else ++ # We can link without hardcoding, and we can hardcode nonexisting dirs. ++ hardcode_action=immediate ++ fi ++else ++ # We cannot hardcode anything, or else we can only hardcode existing ++ # directories. ++ hardcode_action=unsupported ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action" >&5 ++$as_echo "$hardcode_action" >&6; } ++ ++if test "$hardcode_action" = relink || ++ test "$inherit_rpath" = yes; then ++ # Fast installation is not supported ++ enable_fast_install=no ++elif test "$shlibpath_overrides_runpath" = yes || ++ test "$enable_shared" = no; then ++ # Fast installation is not necessary ++ enable_fast_install=needless ++fi ++ ++ ++ ++ ++ ++ ++ if test "x$enable_dlopen" != xyes; then ++ enable_dlopen=unknown ++ enable_dlopen_self=unknown ++ enable_dlopen_self_static=unknown ++else ++ lt_cv_dlopen=no ++ lt_cv_dlopen_libs= ++ ++ case $host_os in ++ beos*) ++ lt_cv_dlopen="load_add_on" ++ lt_cv_dlopen_libs= ++ lt_cv_dlopen_self=yes ++ ;; ++ ++ mingw* | pw32* | cegcc*) ++ lt_cv_dlopen="LoadLibrary" ++ lt_cv_dlopen_libs= ++ ;; ++ ++ cygwin*) ++ lt_cv_dlopen="dlopen" ++ lt_cv_dlopen_libs= ++ ;; ++ ++ darwin*) ++ # if libdl is installed we need to link against it ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 ++$as_echo_n "checking for dlopen in -ldl... " >&6; } ++if test "${ac_cv_lib_dl_dlopen+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ ac_check_lib_save_LIBS=$LIBS ++LIBS="-ldl $LIBS" ++cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++/* Override any GCC internal prototype to avoid an error. ++ Use char because int might match the return type of a GCC ++ builtin and then its argument prototype would still apply. */ ++#ifdef __cplusplus ++extern "C" ++#endif ++char dlopen (); ++int ++main () ++{ ++return dlopen (); ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_link "$LINENO"; then : ++ ac_cv_lib_dl_dlopen=yes ++else ++ ac_cv_lib_dl_dlopen=no ++fi ++rm -f core conftest.err conftest.$ac_objext \ ++ conftest$ac_exeext conftest.$ac_ext ++LIBS=$ac_check_lib_save_LIBS ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 ++$as_echo "$ac_cv_lib_dl_dlopen" >&6; } ++if test "x$ac_cv_lib_dl_dlopen" = x""yes; then : ++ lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" ++else ++ ++ lt_cv_dlopen="dyld" ++ lt_cv_dlopen_libs= ++ lt_cv_dlopen_self=yes ++ ++fi ++ ++ ;; ++ ++ *) ++ ac_fn_c_check_func "$LINENO" "shl_load" "ac_cv_func_shl_load" ++if test "x$ac_cv_func_shl_load" = x""yes; then : ++ lt_cv_dlopen="shl_load" ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shl_load in -ldld" >&5 ++$as_echo_n "checking for shl_load in -ldld... " >&6; } ++if test "${ac_cv_lib_dld_shl_load+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ ac_check_lib_save_LIBS=$LIBS ++LIBS="-ldld $LIBS" ++cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++/* Override any GCC internal prototype to avoid an error. ++ Use char because int might match the return type of a GCC ++ builtin and then its argument prototype would still apply. */ ++#ifdef __cplusplus ++extern "C" ++#endif ++char shl_load (); ++int ++main () ++{ ++return shl_load (); ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_link "$LINENO"; then : ++ ac_cv_lib_dld_shl_load=yes ++else ++ ac_cv_lib_dld_shl_load=no ++fi ++rm -f core conftest.err conftest.$ac_objext \ ++ conftest$ac_exeext conftest.$ac_ext ++LIBS=$ac_check_lib_save_LIBS ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_shl_load" >&5 ++$as_echo "$ac_cv_lib_dld_shl_load" >&6; } ++if test "x$ac_cv_lib_dld_shl_load" = x""yes; then : ++ lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld" ++else ++ ac_fn_c_check_func "$LINENO" "dlopen" "ac_cv_func_dlopen" ++if test "x$ac_cv_func_dlopen" = x""yes; then : ++ lt_cv_dlopen="dlopen" ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 ++$as_echo_n "checking for dlopen in -ldl... " >&6; } ++if test "${ac_cv_lib_dl_dlopen+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ ac_check_lib_save_LIBS=$LIBS ++LIBS="-ldl $LIBS" ++cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++/* Override any GCC internal prototype to avoid an error. ++ Use char because int might match the return type of a GCC ++ builtin and then its argument prototype would still apply. */ ++#ifdef __cplusplus ++extern "C" ++#endif ++char dlopen (); ++int ++main () ++{ ++return dlopen (); ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_link "$LINENO"; then : ++ ac_cv_lib_dl_dlopen=yes ++else ++ ac_cv_lib_dl_dlopen=no ++fi ++rm -f core conftest.err conftest.$ac_objext \ ++ conftest$ac_exeext conftest.$ac_ext ++LIBS=$ac_check_lib_save_LIBS ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 ++$as_echo "$ac_cv_lib_dl_dlopen" >&6; } ++if test "x$ac_cv_lib_dl_dlopen" = x""yes; then : ++ lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -lsvld" >&5 ++$as_echo_n "checking for dlopen in -lsvld... " >&6; } ++if test "${ac_cv_lib_svld_dlopen+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ ac_check_lib_save_LIBS=$LIBS ++LIBS="-lsvld $LIBS" ++cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++/* Override any GCC internal prototype to avoid an error. ++ Use char because int might match the return type of a GCC ++ builtin and then its argument prototype would still apply. */ ++#ifdef __cplusplus ++extern "C" ++#endif ++char dlopen (); ++int ++main () ++{ ++return dlopen (); ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_link "$LINENO"; then : ++ ac_cv_lib_svld_dlopen=yes ++else ++ ac_cv_lib_svld_dlopen=no ++fi ++rm -f core conftest.err conftest.$ac_objext \ ++ conftest$ac_exeext conftest.$ac_ext ++LIBS=$ac_check_lib_save_LIBS ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_svld_dlopen" >&5 ++$as_echo "$ac_cv_lib_svld_dlopen" >&6; } ++if test "x$ac_cv_lib_svld_dlopen" = x""yes; then : ++ lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld" ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dld_link in -ldld" >&5 ++$as_echo_n "checking for dld_link in -ldld... " >&6; } ++if test "${ac_cv_lib_dld_dld_link+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ ac_check_lib_save_LIBS=$LIBS ++LIBS="-ldld $LIBS" ++cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++/* Override any GCC internal prototype to avoid an error. ++ Use char because int might match the return type of a GCC ++ builtin and then its argument prototype would still apply. */ ++#ifdef __cplusplus ++extern "C" ++#endif ++char dld_link (); ++int ++main () ++{ ++return dld_link (); ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_link "$LINENO"; then : ++ ac_cv_lib_dld_dld_link=yes ++else ++ ac_cv_lib_dld_dld_link=no ++fi ++rm -f core conftest.err conftest.$ac_objext \ ++ conftest$ac_exeext conftest.$ac_ext ++LIBS=$ac_check_lib_save_LIBS ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_dld_link" >&5 ++$as_echo "$ac_cv_lib_dld_dld_link" >&6; } ++if test "x$ac_cv_lib_dld_dld_link" = x""yes; then : ++ lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld" ++fi ++ ++ ++fi ++ ++ ++fi ++ ++ ++fi ++ ++ ++fi ++ ++ ++fi ++ ++ ;; ++ esac ++ ++ if test "x$lt_cv_dlopen" != xno; then ++ enable_dlopen=yes ++ else ++ enable_dlopen=no ++ fi ++ ++ case $lt_cv_dlopen in ++ dlopen) ++ save_CPPFLAGS="$CPPFLAGS" ++ test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" ++ ++ save_LDFLAGS="$LDFLAGS" ++ wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" ++ ++ save_LIBS="$LIBS" ++ LIBS="$lt_cv_dlopen_libs $LIBS" ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a program can dlopen itself" >&5 ++$as_echo_n "checking whether a program can dlopen itself... " >&6; } ++if test "${lt_cv_dlopen_self+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test "$cross_compiling" = yes; then : ++ lt_cv_dlopen_self=cross ++else ++ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 ++ lt_status=$lt_dlunknown ++ cat > conftest.$ac_ext <<_LT_EOF ++#line 11134 "configure" ++#include "confdefs.h" ++ ++#if HAVE_DLFCN_H ++#include ++#endif ++ ++#include ++ ++#ifdef RTLD_GLOBAL ++# define LT_DLGLOBAL RTLD_GLOBAL ++#else ++# ifdef DL_GLOBAL ++# define LT_DLGLOBAL DL_GLOBAL ++# else ++# define LT_DLGLOBAL 0 ++# endif ++#endif ++ ++/* We may have to define LT_DLLAZY_OR_NOW in the command line if we ++ find out it does not work in some platform. */ ++#ifndef LT_DLLAZY_OR_NOW ++# ifdef RTLD_LAZY ++# define LT_DLLAZY_OR_NOW RTLD_LAZY ++# else ++# ifdef DL_LAZY ++# define LT_DLLAZY_OR_NOW DL_LAZY ++# else ++# ifdef RTLD_NOW ++# define LT_DLLAZY_OR_NOW RTLD_NOW ++# else ++# ifdef DL_NOW ++# define LT_DLLAZY_OR_NOW DL_NOW ++# else ++# define LT_DLLAZY_OR_NOW 0 ++# endif ++# endif ++# endif ++# endif ++#endif ++ ++/* When -fvisbility=hidden is used, assume the code has been annotated ++ correspondingly for the symbols needed. */ ++#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) ++void fnord () __attribute__((visibility("default"))); ++#endif ++ ++void fnord () { int i=42; } ++int main () ++{ ++ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); ++ int status = $lt_dlunknown; ++ ++ if (self) ++ { ++ if (dlsym (self,"fnord")) status = $lt_dlno_uscore; ++ else ++ { ++ if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; ++ else puts (dlerror ()); ++ } ++ /* dlclose (self); */ ++ } ++ else ++ puts (dlerror ()); ++ ++ return status; ++} ++_LT_EOF ++ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 ++ (eval $ac_link) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then ++ (./conftest; exit; ) >&5 2>/dev/null ++ lt_status=$? ++ case x$lt_status in ++ x$lt_dlno_uscore) lt_cv_dlopen_self=yes ;; ++ x$lt_dlneed_uscore) lt_cv_dlopen_self=yes ;; ++ x$lt_dlunknown|x*) lt_cv_dlopen_self=no ;; ++ esac ++ else : ++ # compilation failed ++ lt_cv_dlopen_self=no ++ fi ++fi ++rm -fr conftest* ++ ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self" >&5 ++$as_echo "$lt_cv_dlopen_self" >&6; } ++ ++ if test "x$lt_cv_dlopen_self" = xyes; then ++ wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\" ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a statically linked program can dlopen itself" >&5 ++$as_echo_n "checking whether a statically linked program can dlopen itself... " >&6; } ++if test "${lt_cv_dlopen_self_static+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test "$cross_compiling" = yes; then : ++ lt_cv_dlopen_self_static=cross ++else ++ lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 ++ lt_status=$lt_dlunknown ++ cat > conftest.$ac_ext <<_LT_EOF ++#line 11240 "configure" ++#include "confdefs.h" ++ ++#if HAVE_DLFCN_H ++#include ++#endif ++ ++#include ++ ++#ifdef RTLD_GLOBAL ++# define LT_DLGLOBAL RTLD_GLOBAL ++#else ++# ifdef DL_GLOBAL ++# define LT_DLGLOBAL DL_GLOBAL ++# else ++# define LT_DLGLOBAL 0 ++# endif ++#endif ++ ++/* We may have to define LT_DLLAZY_OR_NOW in the command line if we ++ find out it does not work in some platform. */ ++#ifndef LT_DLLAZY_OR_NOW ++# ifdef RTLD_LAZY ++# define LT_DLLAZY_OR_NOW RTLD_LAZY ++# else ++# ifdef DL_LAZY ++# define LT_DLLAZY_OR_NOW DL_LAZY ++# else ++# ifdef RTLD_NOW ++# define LT_DLLAZY_OR_NOW RTLD_NOW ++# else ++# ifdef DL_NOW ++# define LT_DLLAZY_OR_NOW DL_NOW ++# else ++# define LT_DLLAZY_OR_NOW 0 ++# endif ++# endif ++# endif ++# endif ++#endif ++ ++/* When -fvisbility=hidden is used, assume the code has been annotated ++ correspondingly for the symbols needed. */ ++#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) ++void fnord () __attribute__((visibility("default"))); ++#endif ++ ++void fnord () { int i=42; } ++int main () ++{ ++ void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); ++ int status = $lt_dlunknown; ++ ++ if (self) ++ { ++ if (dlsym (self,"fnord")) status = $lt_dlno_uscore; ++ else ++ { ++ if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; ++ else puts (dlerror ()); ++ } ++ /* dlclose (self); */ ++ } ++ else ++ puts (dlerror ()); ++ ++ return status; ++} ++_LT_EOF ++ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 ++ (eval $ac_link) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then ++ (./conftest; exit; ) >&5 2>/dev/null ++ lt_status=$? ++ case x$lt_status in ++ x$lt_dlno_uscore) lt_cv_dlopen_self_static=yes ;; ++ x$lt_dlneed_uscore) lt_cv_dlopen_self_static=yes ;; ++ x$lt_dlunknown|x*) lt_cv_dlopen_self_static=no ;; ++ esac ++ else : ++ # compilation failed ++ lt_cv_dlopen_self_static=no ++ fi ++fi ++rm -fr conftest* ++ ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self_static" >&5 ++$as_echo "$lt_cv_dlopen_self_static" >&6; } ++ fi ++ ++ CPPFLAGS="$save_CPPFLAGS" ++ LDFLAGS="$save_LDFLAGS" ++ LIBS="$save_LIBS" ++ ;; ++ esac ++ ++ case $lt_cv_dlopen_self in ++ yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;; ++ *) enable_dlopen_self=unknown ;; ++ esac ++ ++ case $lt_cv_dlopen_self_static in ++ yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;; ++ *) enable_dlopen_self_static=unknown ;; ++ esac ++fi ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++striplib= ++old_striplib= ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether stripping libraries is possible" >&5 ++$as_echo_n "checking whether stripping libraries is possible... " >&6; } ++if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then ++ test -z "$old_striplib" && old_striplib="$STRIP --strip-debug" ++ test -z "$striplib" && striplib="$STRIP --strip-unneeded" ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 ++$as_echo "yes" >&6; } ++else ++# FIXME - insert some real tests, host_os isn't really good enough ++ case $host_os in ++ darwin*) ++ if test -n "$STRIP" ; then ++ striplib="$STRIP -x" ++ old_striplib="$STRIP -S" ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 ++$as_echo "yes" >&6; } ++ else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++ fi ++ ;; ++ *) ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++ ;; ++ esac ++fi ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ # Report which library types will actually be built ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5 ++$as_echo_n "checking if libtool supports shared libraries... " >&6; } ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5 ++$as_echo "$can_build_shared" >&6; } ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5 ++$as_echo_n "checking whether to build shared libraries... " >&6; } ++ test "$can_build_shared" = "no" && enable_shared=no ++ ++ # On AIX, shared libraries and static libraries use the same namespace, and ++ # are all built from PIC. ++ case $host_os in ++ aix3*) ++ test "$enable_shared" = yes && enable_static=no ++ if test -n "$RANLIB"; then ++ archive_cmds="$archive_cmds~\$RANLIB \$lib" ++ postinstall_cmds='$RANLIB $lib' ++ fi ++ ;; ++ ++ aix[4-9]*) ++ if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then ++ test "$enable_shared" = yes && enable_static=no ++ fi ++ ;; ++ esac ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_shared" >&5 ++$as_echo "$enable_shared" >&6; } ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5 ++$as_echo_n "checking whether to build static libraries... " >&6; } ++ # Make sure either enable_shared or enable_static is yes. ++ test "$enable_shared" = yes || enable_static=yes ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5 ++$as_echo "$enable_static" >&6; } ++ ++ ++ ++ ++fi ++ac_ext=c ++ac_cpp='$CPP $CPPFLAGS' ++ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ++ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ++ac_compiler_gnu=$ac_cv_c_compiler_gnu ++ ++CC="$lt_save_CC" ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ac_config_commands="$ac_config_commands libtool" ++ ++ ++ ++ ++# Only expand once: ++ ++ ++ ++ ++backtrace_supported=yes ++ ++if test -n "${with_target_subdir}"; then ++ # We are compiling a GCC library. We can assume that the unwind ++ # library exists. ++ BACKTRACE_FILE="backtrace.lo simple.lo" ++else ++ ac_fn_c_check_header_mongrel "$LINENO" "unwind.h" "ac_cv_header_unwind_h" "$ac_includes_default" ++if test "x$ac_cv_header_unwind_h" = x""yes; then : ++ ac_fn_c_check_func "$LINENO" "_Unwind_Backtrace" "ac_cv_func__Unwind_Backtrace" ++if test "x$ac_cv_func__Unwind_Backtrace" = x""yes; then : ++ BACKTRACE_FILE="backtrace.lo simple.lo" ++else ++ BACKTRACE_FILE="nounwind.lo" ++ backtrace_supported=no ++fi ++ ++else ++ BACKTRACE_FILE="nounwind.lo" ++ backtrace_supported=no ++fi ++ ++ ++fi ++ ++ ++EXTRA_FLAGS= ++if test -n "${with_target_subdir}"; then ++ EXTRA_FLAGS="-funwind-tables -frandom-seed=\$@" ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -funwind-tables option" >&5 ++$as_echo_n "checking for -funwind-tables option... " >&6; } ++if test "${libbacktrace_cv_c_unwind_tables+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ CFLAGS_hold="$CFLAGS" ++ CFLAGS="$CFLAGS -funwind-tables" ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++static int f() { return 0; } ++int ++main () ++{ ++return f(); ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ libbacktrace_cv_c_unwind_tables=yes ++else ++ libbacktrace_cv_c_unwind_tables=no ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++ CFLAGS="$CFLAGS_hold" ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libbacktrace_cv_c_unwind_tables" >&5 ++$as_echo "$libbacktrace_cv_c_unwind_tables" >&6; } ++ if test "$libbacktrace_cv_c_unwind_tables" = "yes"; then ++ EXTRA_FLAGS=-funwind-tables ++ fi ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -frandom-seed=string option" >&5 ++$as_echo_n "checking for -frandom-seed=string option... " >&6; } ++if test "${libbacktrace_cv_c_random_seed_string+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ CFLAGS_hold="$CFLAGS" ++ CFLAGS="$CFLAGS -frandom-seed=conftest.lo" ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++return 0; ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ libbacktrace_cv_c_random_seed_string=yes ++else ++ libbacktrace_cv_c_random_seed_string=no ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++ CFLAGS="$CFLAGS_hold" ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libbacktrace_cv_c_random_seed_string" >&5 ++$as_echo "$libbacktrace_cv_c_random_seed_string" >&6; } ++ if test "$libbacktrace_cv_c_random_seed_string" = "yes"; then ++ EXTRA_FLAGS="$EXTRA_FLAGS -frandom-seed=\$@" ++ fi ++fi ++ ++ ++ac_ext=c ++ac_cpp='$CPP $CPPFLAGS' ++ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ++ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ++ac_compiler_gnu=$ac_cv_c_compiler_gnu ++ ++WARN_FLAGS= ++save_CFLAGS="$CFLAGS" ++for real_option in -W -Wall -Wwrite-strings -Wstrict-prototypes \ ++ -Wmissing-prototypes -Wold-style-definition \ ++ -Wmissing-format-attribute -Wcast-qual; do ++ # Do the check with the no- prefix removed since gcc silently ++ # accepts any -Wno-* option on purpose ++ case $real_option in ++ -Wno-*) option=-W`expr x$real_option : 'x-Wno-\(.*\)'` ;; ++ *) option=$real_option ;; ++ esac ++ as_acx_Woption=`$as_echo "acx_cv_prog_cc_warning_$option" | $as_tr_sh` ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC supports $option" >&5 ++$as_echo_n "checking whether $CC supports $option... " >&6; } ++if { as_var=$as_acx_Woption; eval "test \"\${$as_var+set}\" = set"; }; then : ++ $as_echo_n "(cached) " >&6 ++else ++ CFLAGS="$option" ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ eval "$as_acx_Woption=yes" ++else ++ eval "$as_acx_Woption=no" ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++ ++fi ++eval ac_res=\$$as_acx_Woption ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 ++$as_echo "$ac_res" >&6; } ++ if test `eval 'as_val=${'$as_acx_Woption'};$as_echo "$as_val"'` = yes; then : ++ WARN_FLAGS="$WARN_FLAGS${WARN_FLAGS:+ }$real_option" ++fi ++ done ++CFLAGS="$save_CFLAGS" ++ac_ext=c ++ac_cpp='$CPP $CPPFLAGS' ++ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ++ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ++ac_compiler_gnu=$ac_cv_c_compiler_gnu ++ ++ ++ ++if test -n "${with_target_subdir}"; then ++ WARN_FLAGS="$WARN_FLAGS -Werror" ++fi ++ ++ ++ ++if test -n "${with_target_subdir}"; then ++ ++ ++# Check whether --with-system-libunwind was given. ++if test "${with_system_libunwind+set}" = set; then : ++ withval=$with_system_libunwind; ++fi ++ ++ # If system-libunwind was not specifically set, pick a default setting. ++ if test x$with_system_libunwind = x; then ++ case ${target} in ++ ia64-*-hpux*) with_system_libunwind=yes ;; ++ *) with_system_libunwind=no ;; ++ esac ++ fi ++ # Based on system-libunwind and target, do we have ipinfo? ++ if test x$with_system_libunwind = xyes; then ++ case ${target} in ++ ia64-*-*) have_unwind_getipinfo=no ;; ++ *) have_unwind_getipinfo=yes ;; ++ esac ++ else ++ # Darwin before version 9 does not have _Unwind_GetIPInfo. ++ ++ case ${target} in ++ *-*-darwin[3-8]|*-*-darwin[3-8].*) have_unwind_getipinfo=no ;; ++ *) have_unwind_getipinfo=yes ;; ++ esac ++ ++ fi ++ ++ if test x$have_unwind_getipinfo = xyes; then ++ ++$as_echo "#define HAVE_GETIPINFO 1" >>confdefs.h ++ ++ fi ++ ++else ++ ac_save_CFFLAGS="$CFLAGS" ++ CFLAGS="$CFLAGS -Werror-implicit-function-declaration" ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for _Unwind_GetIPInfo" >&5 ++$as_echo_n "checking for _Unwind_GetIPInfo... " >&6; } ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++#include "unwind.h" ++ struct _Unwind_Context *context; ++ int ip_before_insn = 0; ++int ++main () ++{ ++return _Unwind_GetIPInfo (context, &ip_before_insn); ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_link "$LINENO"; then : ++ have_unwind_getipinfo=yes ++else ++ have_unwind_getipinfo=no ++fi ++rm -f core conftest.err conftest.$ac_objext \ ++ conftest$ac_exeext conftest.$ac_ext ++ CFLAGS="$ac_save_CFLAGS" ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $have_unwind_getipinfo" >&5 ++$as_echo "$have_unwind_getipinfo" >&6; } ++ if test "$have_unwind_getipinfo" = "yes"; then ++ ++$as_echo "#define HAVE_GETIPINFO 1" >>confdefs.h ++ ++ fi ++fi ++ ++# Enable --enable-host-shared. ++# Check whether --enable-host-shared was given. ++if test "${enable_host_shared+set}" = set; then : ++ enableval=$enable_host_shared; PIC_FLAG=-fPIC ++else ++ PIC_FLAG= ++fi ++ ++ ++ ++# Test for __sync support. ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking __sync extensions" >&5 ++$as_echo_n "checking __sync extensions... " >&6; } ++if test "${libbacktrace_cv_sys_sync+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "${with_target_subdir}"; then ++ case "${host}" in ++ hppa*-*-hpux*) libbacktrace_cv_sys_sync=no ;; ++ *) libbacktrace_cv_sys_sync=yes ;; ++ esac ++ else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++int i; ++int ++main () ++{ ++__sync_bool_compare_and_swap (&i, i, i); ++ __sync_lock_test_and_set (&i, 1); ++ __sync_lock_release (&i); ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_link "$LINENO"; then : ++ libbacktrace_cv_sys_sync=yes ++else ++ libbacktrace_cv_sys_sync=no ++fi ++rm -f core conftest.err conftest.$ac_objext \ ++ conftest$ac_exeext conftest.$ac_ext ++ fi ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libbacktrace_cv_sys_sync" >&5 ++$as_echo "$libbacktrace_cv_sys_sync" >&6; } ++BACKTRACE_SUPPORTS_THREADS=0 ++if test "$libbacktrace_cv_sys_sync" = "yes"; then ++ BACKTRACE_SUPPORTS_THREADS=1 ++ ++$as_echo "#define HAVE_SYNC_FUNCTIONS 1" >>confdefs.h ++ ++fi ++ ++ ++# Test for __atomic support. ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking __atomic extensions" >&5 ++$as_echo_n "checking __atomic extensions... " >&6; } ++if test "${libbacktrace_cv_sys_atomic+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "${with_target_subdir}"; then ++ libbacktrace_cv_sys_atomic=yes ++ else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++int i; ++int ++main () ++{ ++__atomic_load_n (&i, __ATOMIC_ACQUIRE); ++ __atomic_store_n (&i, 1, __ATOMIC_RELEASE); ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_link "$LINENO"; then : ++ libbacktrace_cv_sys_atomic=yes ++else ++ libbacktrace_cv_sys_atomic=no ++fi ++rm -f core conftest.err conftest.$ac_objext \ ++ conftest$ac_exeext conftest.$ac_ext ++ fi ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libbacktrace_cv_sys_atomic" >&5 ++$as_echo "$libbacktrace_cv_sys_atomic" >&6; } ++if test "$libbacktrace_cv_sys_atomic" = "yes"; then ++ ++$as_echo "#define HAVE_ATOMIC_FUNCTIONS 1" >>confdefs.h ++ ++fi ++ ++# The library needs to be able to read the executable itself. Compile ++# a file to determine the executable format. The awk script ++# filetype.awk prints out the file type. ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking output filetype" >&5 ++$as_echo_n "checking output filetype... " >&6; } ++if test "${libbacktrace_cv_sys_filetype+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ filetype= ++cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++int i; ++int ++main () ++{ ++int j; ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ filetype=`${AWK} -f $srcdir/filetype.awk conftest.$ac_objext` ++else ++ { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 ++$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} ++as_fn_error "compiler failed ++See \`config.log' for more details." "$LINENO" 5; } ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++libbacktrace_cv_sys_filetype=$filetype ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libbacktrace_cv_sys_filetype" >&5 ++$as_echo "$libbacktrace_cv_sys_filetype" >&6; } ++ ++# Match the file type to decide what files to compile. ++FORMAT_FILE= ++backtrace_supports_data=yes ++case "$libbacktrace_cv_sys_filetype" in ++elf*) FORMAT_FILE="elf.lo" ;; ++pecoff) FORMAT_FILE="pecoff.lo" ++ backtrace_supports_data=no ++ ;; ++*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: could not determine output file type" >&5 ++$as_echo "$as_me: WARNING: could not determine output file type" >&2;} ++ FORMAT_FILE="unknown.lo" ++ backtrace_supported=no ++ ;; ++esac ++ ++ ++# ELF defines. ++elfsize= ++case "$libbacktrace_cv_sys_filetype" in ++elf32) elfsize=32 ;; ++elf64) elfsize=64 ;; ++*) elfsize=unused ++esac ++ ++cat >>confdefs.h <<_ACEOF ++#define BACKTRACE_ELF_SIZE $elfsize ++_ACEOF ++ ++ ++BACKTRACE_SUPPORTED=0 ++if test "$backtrace_supported" = "yes"; then ++ BACKTRACE_SUPPORTED=1 ++fi ++ ++ ++BACKTRACE_SUPPORTS_DATA=0 ++if test "$backtrace_supports_data" = "yes"; then ++ BACKTRACE_SUPPORTS_DATA=1 ++fi ++ ++ ++ ++ ++inttype_headers=`echo inttypes.h sys/inttypes.h | sed -e 's/,/ /g'` ++ ++acx_cv_header_stdint=stddef.h ++acx_cv_header_stdint_kind="(already complete)" ++for i in stdint.h $inttype_headers; do ++ unset ac_cv_type_uintptr_t ++ unset ac_cv_type_uintmax_t ++ unset ac_cv_type_int_least32_t ++ unset ac_cv_type_int_fast32_t ++ unset ac_cv_type_uint64_t ++ $as_echo_n "looking for a compliant stdint.h in $i, " >&6 ++ ac_fn_c_check_type "$LINENO" "uintmax_t" "ac_cv_type_uintmax_t" "#include ++#include <$i> ++" ++if test "x$ac_cv_type_uintmax_t" = x""yes; then : ++ acx_cv_header_stdint=$i ++else ++ continue ++fi ++ ++ ac_fn_c_check_type "$LINENO" "uintptr_t" "ac_cv_type_uintptr_t" "#include ++#include <$i> ++" ++if test "x$ac_cv_type_uintptr_t" = x""yes; then : ++ ++else ++ acx_cv_header_stdint_kind="(mostly complete)" ++fi ++ ++ ac_fn_c_check_type "$LINENO" "int_least32_t" "ac_cv_type_int_least32_t" "#include ++#include <$i> ++" ++if test "x$ac_cv_type_int_least32_t" = x""yes; then : ++ ++else ++ acx_cv_header_stdint_kind="(mostly complete)" ++fi ++ ++ ac_fn_c_check_type "$LINENO" "int_fast32_t" "ac_cv_type_int_fast32_t" "#include ++#include <$i> ++" ++if test "x$ac_cv_type_int_fast32_t" = x""yes; then : ++ ++else ++ acx_cv_header_stdint_kind="(mostly complete)" ++fi ++ ++ ac_fn_c_check_type "$LINENO" "uint64_t" "ac_cv_type_uint64_t" "#include ++#include <$i> ++" ++if test "x$ac_cv_type_uint64_t" = x""yes; then : ++ ++else ++ acx_cv_header_stdint_kind="(lacks uint64_t)" ++fi ++ ++ break ++done ++if test "$acx_cv_header_stdint" = stddef.h; then ++ acx_cv_header_stdint_kind="(lacks uintmax_t)" ++ for i in stdint.h $inttype_headers; do ++ unset ac_cv_type_uintptr_t ++ unset ac_cv_type_uint32_t ++ unset ac_cv_type_uint64_t ++ $as_echo_n "looking for an incomplete stdint.h in $i, " >&6 ++ ac_fn_c_check_type "$LINENO" "uint32_t" "ac_cv_type_uint32_t" "#include ++#include <$i> ++" ++if test "x$ac_cv_type_uint32_t" = x""yes; then : ++ acx_cv_header_stdint=$i ++else ++ continue ++fi ++ ++ ac_fn_c_check_type "$LINENO" "uint64_t" "ac_cv_type_uint64_t" "#include ++#include <$i> ++" ++if test "x$ac_cv_type_uint64_t" = x""yes; then : ++ ++fi ++ ++ ac_fn_c_check_type "$LINENO" "uintptr_t" "ac_cv_type_uintptr_t" "#include ++#include <$i> ++" ++if test "x$ac_cv_type_uintptr_t" = x""yes; then : ++ ++fi ++ ++ break ++ done ++fi ++if test "$acx_cv_header_stdint" = stddef.h; then ++ acx_cv_header_stdint_kind="(u_intXX_t style)" ++ for i in sys/types.h $inttype_headers; do ++ unset ac_cv_type_u_int32_t ++ unset ac_cv_type_u_int64_t ++ $as_echo_n "looking for u_intXX_t types in $i, " >&6 ++ ac_fn_c_check_type "$LINENO" "u_int32_t" "ac_cv_type_u_int32_t" "#include ++#include <$i> ++" ++if test "x$ac_cv_type_u_int32_t" = x""yes; then : ++ acx_cv_header_stdint=$i ++else ++ continue ++fi ++ ++ ac_fn_c_check_type "$LINENO" "u_int64_t" "ac_cv_type_u_int64_t" "#include ++#include <$i> ++" ++if test "x$ac_cv_type_u_int64_t" = x""yes; then : ++ ++fi ++ ++ break ++ done ++fi ++if test "$acx_cv_header_stdint" = stddef.h; then ++ acx_cv_header_stdint_kind="(using manual detection)" ++fi ++ ++test -z "$ac_cv_type_uintptr_t" && ac_cv_type_uintptr_t=no ++test -z "$ac_cv_type_uint64_t" && ac_cv_type_uint64_t=no ++test -z "$ac_cv_type_u_int64_t" && ac_cv_type_u_int64_t=no ++test -z "$ac_cv_type_int_least32_t" && ac_cv_type_int_least32_t=no ++test -z "$ac_cv_type_int_fast32_t" && ac_cv_type_int_fast32_t=no ++ ++# ----------------- Summarize what we found so far ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking what to include in gstdint.h" >&5 ++$as_echo_n "checking what to include in gstdint.h... " >&6; } ++ ++case `$as_basename -- gstdint.h || ++$as_expr X/gstdint.h : '.*/\([^/][^/]*\)/*$' \| \ ++ Xgstdint.h : 'X\(//\)$' \| \ ++ Xgstdint.h : 'X\(/\)' \| . 2>/dev/null || ++$as_echo X/gstdint.h | ++ sed '/^.*\/\([^/][^/]*\)\/*$/{ ++ s//\1/ ++ q ++ } ++ /^X\/\(\/\/\)$/{ ++ s//\1/ ++ q ++ } ++ /^X\/\(\/\).*/{ ++ s//\1/ ++ q ++ } ++ s/.*/./; q'` in ++ stdint.h) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: are you sure you want it there?" >&5 ++$as_echo "$as_me: WARNING: are you sure you want it there?" >&2;} ;; ++ inttypes.h) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: are you sure you want it there?" >&5 ++$as_echo "$as_me: WARNING: are you sure you want it there?" >&2;} ;; ++ *) ;; ++esac ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $acx_cv_header_stdint $acx_cv_header_stdint_kind" >&5 ++$as_echo "$acx_cv_header_stdint $acx_cv_header_stdint_kind" >&6; } ++ ++# ----------------- done included file, check C basic types -------- ++ ++# Lacking an uintptr_t? Test size of void * ++case "$acx_cv_header_stdint:$ac_cv_type_uintptr_t" in ++ stddef.h:* | *:no) # The cast to long int works around a bug in the HP C Compiler ++# version HP92453-01 B.11.11.23709.GP, which incorrectly rejects ++# declarations like `int a3[[(sizeof (unsigned char)) >= 0]];'. ++# This bug is HP SR number 8606223364. ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking size of void *" >&5 ++$as_echo_n "checking size of void *... " >&6; } ++if test "${ac_cv_sizeof_void_p+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if ac_fn_c_compute_int "$LINENO" "(long int) (sizeof (void *))" "ac_cv_sizeof_void_p" "$ac_includes_default"; then : ++ ++else ++ if test "$ac_cv_type_void_p" = yes; then ++ { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 ++$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} ++{ as_fn_set_status 77 ++as_fn_error "cannot compute sizeof (void *) ++See \`config.log' for more details." "$LINENO" 5; }; } ++ else ++ ac_cv_sizeof_void_p=0 ++ fi ++fi ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sizeof_void_p" >&5 ++$as_echo "$ac_cv_sizeof_void_p" >&6; } ++ ++ ++ ++cat >>confdefs.h <<_ACEOF ++#define SIZEOF_VOID_P $ac_cv_sizeof_void_p ++_ACEOF ++ ++ ;; ++esac ++ ++# Lacking an uint64_t? Test size of long ++case "$acx_cv_header_stdint:$ac_cv_type_uint64_t:$ac_cv_type_u_int64_t" in ++ stddef.h:*:* | *:no:no) # The cast to long int works around a bug in the HP C Compiler ++# version HP92453-01 B.11.11.23709.GP, which incorrectly rejects ++# declarations like `int a3[[(sizeof (unsigned char)) >= 0]];'. ++# This bug is HP SR number 8606223364. ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking size of long" >&5 ++$as_echo_n "checking size of long... " >&6; } ++if test "${ac_cv_sizeof_long+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if ac_fn_c_compute_int "$LINENO" "(long int) (sizeof (long))" "ac_cv_sizeof_long" "$ac_includes_default"; then : ++ ++else ++ if test "$ac_cv_type_long" = yes; then ++ { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 ++$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} ++{ as_fn_set_status 77 ++as_fn_error "cannot compute sizeof (long) ++See \`config.log' for more details." "$LINENO" 5; }; } ++ else ++ ac_cv_sizeof_long=0 ++ fi ++fi ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sizeof_long" >&5 ++$as_echo "$ac_cv_sizeof_long" >&6; } ++ ++ ++ ++cat >>confdefs.h <<_ACEOF ++#define SIZEOF_LONG $ac_cv_sizeof_long ++_ACEOF ++ ++ ;; ++esac ++ ++if test $acx_cv_header_stdint = stddef.h; then ++ # Lacking a good header? Test size of everything and deduce all types. ++ # The cast to long int works around a bug in the HP C Compiler ++# version HP92453-01 B.11.11.23709.GP, which incorrectly rejects ++# declarations like `int a3[[(sizeof (unsigned char)) >= 0]];'. ++# This bug is HP SR number 8606223364. ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking size of int" >&5 ++$as_echo_n "checking size of int... " >&6; } ++if test "${ac_cv_sizeof_int+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if ac_fn_c_compute_int "$LINENO" "(long int) (sizeof (int))" "ac_cv_sizeof_int" "$ac_includes_default"; then : ++ ++else ++ if test "$ac_cv_type_int" = yes; then ++ { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 ++$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} ++{ as_fn_set_status 77 ++as_fn_error "cannot compute sizeof (int) ++See \`config.log' for more details." "$LINENO" 5; }; } ++ else ++ ac_cv_sizeof_int=0 ++ fi ++fi ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sizeof_int" >&5 ++$as_echo "$ac_cv_sizeof_int" >&6; } ++ ++ ++ ++cat >>confdefs.h <<_ACEOF ++#define SIZEOF_INT $ac_cv_sizeof_int ++_ACEOF ++ ++ ++ # The cast to long int works around a bug in the HP C Compiler ++# version HP92453-01 B.11.11.23709.GP, which incorrectly rejects ++# declarations like `int a3[[(sizeof (unsigned char)) >= 0]];'. ++# This bug is HP SR number 8606223364. ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking size of short" >&5 ++$as_echo_n "checking size of short... " >&6; } ++if test "${ac_cv_sizeof_short+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if ac_fn_c_compute_int "$LINENO" "(long int) (sizeof (short))" "ac_cv_sizeof_short" "$ac_includes_default"; then : ++ ++else ++ if test "$ac_cv_type_short" = yes; then ++ { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 ++$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} ++{ as_fn_set_status 77 ++as_fn_error "cannot compute sizeof (short) ++See \`config.log' for more details." "$LINENO" 5; }; } ++ else ++ ac_cv_sizeof_short=0 ++ fi ++fi ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sizeof_short" >&5 ++$as_echo "$ac_cv_sizeof_short" >&6; } ++ ++ ++ ++cat >>confdefs.h <<_ACEOF ++#define SIZEOF_SHORT $ac_cv_sizeof_short ++_ACEOF ++ ++ ++ # The cast to long int works around a bug in the HP C Compiler ++# version HP92453-01 B.11.11.23709.GP, which incorrectly rejects ++# declarations like `int a3[[(sizeof (unsigned char)) >= 0]];'. ++# This bug is HP SR number 8606223364. ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking size of char" >&5 ++$as_echo_n "checking size of char... " >&6; } ++if test "${ac_cv_sizeof_char+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if ac_fn_c_compute_int "$LINENO" "(long int) (sizeof (char))" "ac_cv_sizeof_char" "$ac_includes_default"; then : ++ ++else ++ if test "$ac_cv_type_char" = yes; then ++ { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 ++$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} ++{ as_fn_set_status 77 ++as_fn_error "cannot compute sizeof (char) ++See \`config.log' for more details." "$LINENO" 5; }; } ++ else ++ ac_cv_sizeof_char=0 ++ fi ++fi ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sizeof_char" >&5 ++$as_echo "$ac_cv_sizeof_char" >&6; } ++ ++ ++ ++cat >>confdefs.h <<_ACEOF ++#define SIZEOF_CHAR $ac_cv_sizeof_char ++_ACEOF ++ ++ ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for type equivalent to int8_t" >&5 ++$as_echo_n "checking for type equivalent to int8_t... " >&6; } ++ case "$ac_cv_sizeof_char" in ++ 1) acx_cv_type_int8_t=char ;; ++ *) as_fn_error "no 8-bit type, please report a bug" "$LINENO" 5 ++ esac ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $acx_cv_type_int8_t" >&5 ++$as_echo "$acx_cv_type_int8_t" >&6; } ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for type equivalent to int16_t" >&5 ++$as_echo_n "checking for type equivalent to int16_t... " >&6; } ++ case "$ac_cv_sizeof_int:$ac_cv_sizeof_short" in ++ 2:*) acx_cv_type_int16_t=int ;; ++ *:2) acx_cv_type_int16_t=short ;; ++ *) as_fn_error "no 16-bit type, please report a bug" "$LINENO" 5 ++ esac ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $acx_cv_type_int16_t" >&5 ++$as_echo "$acx_cv_type_int16_t" >&6; } ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for type equivalent to int32_t" >&5 ++$as_echo_n "checking for type equivalent to int32_t... " >&6; } ++ case "$ac_cv_sizeof_int:$ac_cv_sizeof_long" in ++ 4:*) acx_cv_type_int32_t=int ;; ++ *:4) acx_cv_type_int32_t=long ;; ++ *) as_fn_error "no 32-bit type, please report a bug" "$LINENO" 5 ++ esac ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $acx_cv_type_int32_t" >&5 ++$as_echo "$acx_cv_type_int32_t" >&6; } ++fi ++ ++# These tests are here to make the output prettier ++ ++if test "$ac_cv_type_uint64_t" != yes && test "$ac_cv_type_u_int64_t" != yes; then ++ case "$ac_cv_sizeof_long" in ++ 8) acx_cv_type_int64_t=long ;; ++ esac ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for type equivalent to int64_t" >&5 ++$as_echo_n "checking for type equivalent to int64_t... " >&6; } ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${acx_cv_type_int64_t-'using preprocessor symbols'}" >&5 ++$as_echo "${acx_cv_type_int64_t-'using preprocessor symbols'}" >&6; } ++fi ++ ++# Now we can use the above types ++ ++if test "$ac_cv_type_uintptr_t" != yes; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking for type equivalent to intptr_t" >&5 ++$as_echo_n "checking for type equivalent to intptr_t... " >&6; } ++ case $ac_cv_sizeof_void_p in ++ 2) acx_cv_type_intptr_t=int16_t ;; ++ 4) acx_cv_type_intptr_t=int32_t ;; ++ 8) acx_cv_type_intptr_t=int64_t ;; ++ *) as_fn_error "no equivalent for intptr_t, please report a bug" "$LINENO" 5 ++ esac ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $acx_cv_type_intptr_t" >&5 ++$as_echo "$acx_cv_type_intptr_t" >&6; } ++fi ++ ++# ----------------- done all checks, emit header ------------- ++ac_config_commands="$ac_config_commands gstdint.h" ++ ++ ++ ++ ++for ac_header in sys/mman.h ++do : ++ ac_fn_c_check_header_mongrel "$LINENO" "sys/mman.h" "ac_cv_header_sys_mman_h" "$ac_includes_default" ++if test "x$ac_cv_header_sys_mman_h" = x""yes; then : ++ cat >>confdefs.h <<_ACEOF ++#define HAVE_SYS_MMAN_H 1 ++_ACEOF ++ ++fi ++ ++done ++ ++if test "$ac_cv_header_sys_mman_h" = "no"; then ++ have_mmap=no ++else ++ if test -n "${with_target_subdir}"; then ++ # When built as a GCC target library, we can't do a link test. We ++ # simply assume that if we have mman.h, we have mmap. ++ have_mmap=yes ++ case "${host}" in ++ spu-*-*|*-*-msdosdjgpp) ++ # The SPU does not have mmap, but it has a sys/mman.h header file ++ # containing "mmap_eaddr" and the mmap flags, confusing the test. ++ # DJGPP also has sys/man.h, but no mmap ++ have_mmap=no ;; ++ esac ++ else ++ ac_fn_c_check_func "$LINENO" "mmap" "ac_cv_func_mmap" ++if test "x$ac_cv_func_mmap" = x""yes; then : ++ have_mmap=yes ++else ++ have_mmap=no ++fi ++ ++ fi ++fi ++if test "$have_mmap" = "no"; then ++ VIEW_FILE=read.lo ++ ALLOC_FILE=alloc.lo ++else ++ VIEW_FILE=mmapio.lo ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++#include ++#if !defined(MAP_ANONYMOUS) && !defined(MAP_ANON) ++ #error no MAP_ANONYMOUS ++#endif ++ ++_ACEOF ++if ac_fn_c_try_cpp "$LINENO"; then : ++ ALLOC_FILE=mmap.lo ++else ++ ALLOC_FILE=alloc.lo ++fi ++rm -f conftest.err conftest.$ac_ext ++fi ++ ++ ++ ++BACKTRACE_USES_MALLOC=0 ++if test "$ALLOC_FILE" = "alloc.lo"; then ++ BACKTRACE_USES_MALLOC=1 ++fi ++ ++ ++# Check for dl_iterate_phdr. ++for ac_header in link.h ++do : ++ ac_fn_c_check_header_mongrel "$LINENO" "link.h" "ac_cv_header_link_h" "$ac_includes_default" ++if test "x$ac_cv_header_link_h" = x""yes; then : ++ cat >>confdefs.h <<_ACEOF ++#define HAVE_LINK_H 1 ++_ACEOF ++ ++fi ++ ++done ++ ++if test "$ac_cv_header_link_h" = "no"; then ++ have_dl_iterate_phdr=no ++else ++ if test -n "${with_target_subdir}"; then ++ # When built as a GCC target library, we can't do a link test. ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++#include ++ ++_ACEOF ++if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | ++ $EGREP "dl_iterate_phdr" >/dev/null 2>&1; then : ++ have_dl_iterate_phdr=yes ++else ++ have_dl_iterate_phdr=no ++fi ++rm -f conftest* ++ ++ case "${host}" in ++ *-*-solaris2.10*) ++ # Avoid dl_iterate_phdr on Solaris 10, where it is in the ++ # header file but is only in -ldl. ++ have_dl_iterate_phdr=no ;; ++ esac ++ else ++ ac_fn_c_check_func "$LINENO" "dl_iterate_phdr" "ac_cv_func_dl_iterate_phdr" ++if test "x$ac_cv_func_dl_iterate_phdr" = x""yes; then : ++ have_dl_iterate_phdr=yes ++else ++ have_dl_iterate_phdr=no ++fi ++ ++ fi ++fi ++if test "$have_dl_iterate_phdr" = "yes"; then ++ ++$as_echo "#define HAVE_DL_ITERATE_PHDR 1" >>confdefs.h ++ ++fi ++ ++# Check for the fcntl function. ++if test -n "${with_target_subdir}"; then ++ case "${host}" in ++ *-*-mingw*) have_fcntl=no ;; ++ spu-*-*) have_fcntl=no ;; ++ *) have_fcntl=yes ;; ++ esac ++else ++ ac_fn_c_check_func "$LINENO" "fcntl" "ac_cv_func_fcntl" ++if test "x$ac_cv_func_fcntl" = x""yes; then : ++ have_fcntl=yes ++else ++ have_fcntl=no ++fi ++ ++fi ++if test "$have_fcntl" = "yes"; then ++ ++$as_echo "#define HAVE_FCNTL 1" >>confdefs.h ++ ++fi ++ ++ac_fn_c_check_decl "$LINENO" "strnlen" "ac_cv_have_decl_strnlen" "$ac_includes_default" ++if test "x$ac_cv_have_decl_strnlen" = x""yes; then : ++ ac_have_decl=1 ++else ++ ac_have_decl=0 ++fi ++ ++cat >>confdefs.h <<_ACEOF ++#define HAVE_DECL_STRNLEN $ac_have_decl ++_ACEOF ++ ++ ++# Check for getexecname function. ++if test -n "${with_target_subdir}"; then ++ case "${host}" in ++ *-*-solaris2*) have_getexecname=yes ;; ++ *) have_getexecname=no ;; ++ esac ++else ++ ac_fn_c_check_func "$LINENO" "getexecname" "ac_cv_func_getexecname" ++if test "x$ac_cv_func_getexecname" = x""yes; then : ++ have_getexecname=yes ++else ++ have_getexecname=no ++fi ++ ++fi ++if test "$have_getexecname" = "yes"; then ++ ++$as_echo "#define HAVE_GETEXECNAME 1" >>confdefs.h ++ ++fi ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether tests can run" >&5 ++$as_echo_n "checking whether tests can run... " >&6; } ++if test "${libbacktrace_cv_sys_native+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test "$cross_compiling" = yes; then : ++ libbacktrace_cv_sys_native=no ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++return 0; ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_run "$LINENO"; then : ++ libbacktrace_cv_sys_native=yes ++else ++ libbacktrace_cv_sys_native=no ++fi ++rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ ++ conftest.$ac_objext conftest.beam conftest.$ac_ext ++fi ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libbacktrace_cv_sys_native" >&5 ++$as_echo "$libbacktrace_cv_sys_native" >&6; } ++ if test "$libbacktrace_cv_sys_native" = "yes"; then ++ NATIVE_TRUE= ++ NATIVE_FALSE='#' ++else ++ NATIVE_TRUE='#' ++ NATIVE_FALSE= ++fi ++ ++ ++if test "${multilib}" = "yes"; then ++ multilib_arg="--enable-multilib" ++else ++ multilib_arg= ++fi ++ ++ac_config_files="$ac_config_files Makefile backtrace-supported.h" ++ ++ ++# We need multilib support, but only if configuring for the target. ++ac_config_commands="$ac_config_commands default" ++ ++ ++cat >confcache <<\_ACEOF ++# This file is a shell script that caches the results of configure ++# tests run on this system so they can be shared between configure ++# scripts and configure runs, see configure's option --config-cache. ++# It is not useful on other systems. If it contains results you don't ++# want to keep, you may remove or edit it. ++# ++# config.status only pays attention to the cache file if you give it ++# the --recheck option to rerun configure. ++# ++# `ac_cv_env_foo' variables (set or unset) will be overridden when ++# loading this file, other *unset* `ac_cv_foo' will be assigned the ++# following values. ++ ++_ACEOF ++ ++# The following way of writing the cache mishandles newlines in values, ++# but we know of no workaround that is simple, portable, and efficient. ++# So, we kill variables containing newlines. ++# Ultrix sh set writes to stderr and can't be redirected directly, ++# and sets the high bit in the cache file unless we assign to the vars. ++( ++ for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do ++ eval ac_val=\$$ac_var ++ case $ac_val in #( ++ *${as_nl}*) ++ case $ac_var in #( ++ *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 ++$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; ++ esac ++ case $ac_var in #( ++ _ | IFS | as_nl) ;; #( ++ BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( ++ *) { eval $ac_var=; unset $ac_var;} ;; ++ esac ;; ++ esac ++ done ++ ++ (set) 2>&1 | ++ case $as_nl`(ac_space=' '; set) 2>&1` in #( ++ *${as_nl}ac_space=\ *) ++ # `set' does not quote correctly, so add quotes: double-quote ++ # substitution turns \\\\ into \\, and sed turns \\ into \. ++ sed -n \ ++ "s/'/'\\\\''/g; ++ s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" ++ ;; #( ++ *) ++ # `set' quotes correctly as required by POSIX, so do not add quotes. ++ sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" ++ ;; ++ esac | ++ sort ++) | ++ sed ' ++ /^ac_cv_env_/b end ++ t clear ++ :clear ++ s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ ++ t end ++ s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ ++ :end' >>confcache ++if diff "$cache_file" confcache >/dev/null 2>&1; then :; else ++ if test -w "$cache_file"; then ++ test "x$cache_file" != "x/dev/null" && ++ { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 ++$as_echo "$as_me: updating cache $cache_file" >&6;} ++ cat confcache >$cache_file ++ else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 ++$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;} ++ fi ++fi ++rm -f confcache ++ ++test "x$prefix" = xNONE && prefix=$ac_default_prefix ++# Let make expand exec_prefix. ++test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' ++ ++DEFS=-DHAVE_CONFIG_H ++ ++ac_libobjs= ++ac_ltlibobjs= ++for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue ++ # 1. Remove the extension, and $U if already installed. ++ ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' ++ ac_i=`$as_echo "$ac_i" | sed "$ac_script"` ++ # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR ++ # will be set to the directory where LIBOBJS objects are built. ++ as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" ++ as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' ++done ++LIBOBJS=$ac_libobjs ++ ++LTLIBOBJS=$ac_ltlibobjs ++ ++ ++ if test -n "$EXEEXT"; then ++ am__EXEEXT_TRUE= ++ am__EXEEXT_FALSE='#' ++else ++ am__EXEEXT_TRUE='#' ++ am__EXEEXT_FALSE= ++fi ++ ++if test -z "${MAINTAINER_MODE_TRUE}" && test -z "${MAINTAINER_MODE_FALSE}"; then ++ as_fn_error "conditional \"MAINTAINER_MODE\" was never defined. ++Usually this means the macro was only invoked conditionally." "$LINENO" 5 ++fi ++if test -z "${NATIVE_TRUE}" && test -z "${NATIVE_FALSE}"; then ++ as_fn_error "conditional \"NATIVE\" was never defined. ++Usually this means the macro was only invoked conditionally." "$LINENO" 5 ++fi ++ ++: ${CONFIG_STATUS=./config.status} ++ac_write_fail=0 ++ac_clean_files_save=$ac_clean_files ++ac_clean_files="$ac_clean_files $CONFIG_STATUS" ++{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 ++$as_echo "$as_me: creating $CONFIG_STATUS" >&6;} ++as_write_fail=0 ++cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 ++#! $SHELL ++# Generated by $as_me. ++# Run this file to recreate the current configuration. ++# Compiler output produced by configure, useful for debugging ++# configure, is in config.log if it exists. ++ ++debug=false ++ac_cs_recheck=false ++ac_cs_silent=false ++ ++SHELL=\${CONFIG_SHELL-$SHELL} ++export SHELL ++_ASEOF ++cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 ++## -------------------- ## ++## M4sh Initialization. ## ++## -------------------- ## ++ ++# Be more Bourne compatible ++DUALCASE=1; export DUALCASE # for MKS sh ++if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : ++ emulate sh ++ NULLCMD=: ++ # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which ++ # is contrary to our usage. Disable this feature. ++ alias -g '${1+"$@"}'='"$@"' ++ setopt NO_GLOB_SUBST ++else ++ case `(set -o) 2>/dev/null` in #( ++ *posix*) : ++ set -o posix ;; #( ++ *) : ++ ;; ++esac ++fi ++ ++ ++as_nl=' ++' ++export as_nl ++# Printing a long string crashes Solaris 7 /usr/bin/printf. ++as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' ++as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo ++as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo ++# Prefer a ksh shell builtin over an external printf program on Solaris, ++# but without wasting forks for bash or zsh. ++if test -z "$BASH_VERSION$ZSH_VERSION" \ ++ && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then ++ as_echo='print -r --' ++ as_echo_n='print -rn --' ++elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then ++ as_echo='printf %s\n' ++ as_echo_n='printf %s' ++else ++ if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then ++ as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' ++ as_echo_n='/usr/ucb/echo -n' ++ else ++ as_echo_body='eval expr "X$1" : "X\\(.*\\)"' ++ as_echo_n_body='eval ++ arg=$1; ++ case $arg in #( ++ *"$as_nl"*) ++ expr "X$arg" : "X\\(.*\\)$as_nl"; ++ arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; ++ esac; ++ expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" ++ ' ++ export as_echo_n_body ++ as_echo_n='sh -c $as_echo_n_body as_echo' ++ fi ++ export as_echo_body ++ as_echo='sh -c $as_echo_body as_echo' ++fi ++ ++# The user is always right. ++if test "${PATH_SEPARATOR+set}" != set; then ++ PATH_SEPARATOR=: ++ (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { ++ (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || ++ PATH_SEPARATOR=';' ++ } ++fi ++ ++ ++# IFS ++# We need space, tab and new line, in precisely that order. Quoting is ++# there to prevent editors from complaining about space-tab. ++# (If _AS_PATH_WALK were called with IFS unset, it would disable word ++# splitting by setting IFS to empty value.) ++IFS=" "" $as_nl" ++ ++# Find who we are. Look in the path if we contain no directory separator. ++case $0 in #(( ++ *[\\/]* ) as_myself=$0 ;; ++ *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break ++ done ++IFS=$as_save_IFS ++ ++ ;; ++esac ++# We did not find ourselves, most probably we were run as `sh COMMAND' ++# in which case we are not to be found in the path. ++if test "x$as_myself" = x; then ++ as_myself=$0 ++fi ++if test ! -f "$as_myself"; then ++ $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 ++ exit 1 ++fi ++ ++# Unset variables that we do not need and which cause bugs (e.g. in ++# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" ++# suppresses any "Segmentation fault" message there. '((' could ++# trigger a bug in pdksh 5.2.14. ++for as_var in BASH_ENV ENV MAIL MAILPATH ++do eval test x\${$as_var+set} = xset \ ++ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : ++done ++PS1='$ ' ++PS2='> ' ++PS4='+ ' ++ ++# NLS nuisances. ++LC_ALL=C ++export LC_ALL ++LANGUAGE=C ++export LANGUAGE ++ ++# CDPATH. ++(unset CDPATH) >/dev/null 2>&1 && unset CDPATH ++ ++ ++# as_fn_error ERROR [LINENO LOG_FD] ++# --------------------------------- ++# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are ++# provided, also output the error to LOG_FD, referencing LINENO. Then exit the ++# script with status $?, using 1 if that was 0. ++as_fn_error () ++{ ++ as_status=$?; test $as_status -eq 0 && as_status=1 ++ if test "$3"; then ++ as_lineno=${as_lineno-"$2"} as_lineno_stack=as_lineno_stack=$as_lineno_stack ++ $as_echo "$as_me:${as_lineno-$LINENO}: error: $1" >&$3 ++ fi ++ $as_echo "$as_me: error: $1" >&2 ++ as_fn_exit $as_status ++} # as_fn_error ++ ++ ++# as_fn_set_status STATUS ++# ----------------------- ++# Set $? to STATUS, without forking. ++as_fn_set_status () ++{ ++ return $1 ++} # as_fn_set_status ++ ++# as_fn_exit STATUS ++# ----------------- ++# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. ++as_fn_exit () ++{ ++ set +e ++ as_fn_set_status $1 ++ exit $1 ++} # as_fn_exit ++ ++# as_fn_unset VAR ++# --------------- ++# Portably unset VAR. ++as_fn_unset () ++{ ++ { eval $1=; unset $1;} ++} ++as_unset=as_fn_unset ++# as_fn_append VAR VALUE ++# ---------------------- ++# Append the text in VALUE to the end of the definition contained in VAR. Take ++# advantage of any shell optimizations that allow amortized linear growth over ++# repeated appends, instead of the typical quadratic growth present in naive ++# implementations. ++if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : ++ eval 'as_fn_append () ++ { ++ eval $1+=\$2 ++ }' ++else ++ as_fn_append () ++ { ++ eval $1=\$$1\$2 ++ } ++fi # as_fn_append ++ ++# as_fn_arith ARG... ++# ------------------ ++# Perform arithmetic evaluation on the ARGs, and store the result in the ++# global $as_val. Take advantage of shells that can avoid forks. The arguments ++# must be portable across $(()) and expr. ++if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : ++ eval 'as_fn_arith () ++ { ++ as_val=$(( $* )) ++ }' ++else ++ as_fn_arith () ++ { ++ as_val=`expr "$@" || test $? -eq 1` ++ } ++fi # as_fn_arith ++ ++ ++if expr a : '\(a\)' >/dev/null 2>&1 && ++ test "X`expr 00001 : '.*\(...\)'`" = X001; then ++ as_expr=expr ++else ++ as_expr=false ++fi ++ ++if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then ++ as_basename=basename ++else ++ as_basename=false ++fi ++ ++if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then ++ as_dirname=dirname ++else ++ as_dirname=false ++fi ++ ++as_me=`$as_basename -- "$0" || ++$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ ++ X"$0" : 'X\(//\)$' \| \ ++ X"$0" : 'X\(/\)' \| . 2>/dev/null || ++$as_echo X/"$0" | ++ sed '/^.*\/\([^/][^/]*\)\/*$/{ ++ s//\1/ ++ q ++ } ++ /^X\/\(\/\/\)$/{ ++ s//\1/ ++ q ++ } ++ /^X\/\(\/\).*/{ ++ s//\1/ ++ q ++ } ++ s/.*/./; q'` ++ ++# Avoid depending upon Character Ranges. ++as_cr_letters='abcdefghijklmnopqrstuvwxyz' ++as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' ++as_cr_Letters=$as_cr_letters$as_cr_LETTERS ++as_cr_digits='0123456789' ++as_cr_alnum=$as_cr_Letters$as_cr_digits ++ ++ECHO_C= ECHO_N= ECHO_T= ++case `echo -n x` in #((((( ++-n*) ++ case `echo 'xy\c'` in ++ *c*) ECHO_T=' ';; # ECHO_T is single tab character. ++ xy) ECHO_C='\c';; ++ *) echo `echo ksh88 bug on AIX 6.1` > /dev/null ++ ECHO_T=' ';; ++ esac;; ++*) ++ ECHO_N='-n';; ++esac ++ ++rm -f conf$$ conf$$.exe conf$$.file ++if test -d conf$$.dir; then ++ rm -f conf$$.dir/conf$$.file ++else ++ rm -f conf$$.dir ++ mkdir conf$$.dir 2>/dev/null ++fi ++if (echo >conf$$.file) 2>/dev/null; then ++ if ln -s conf$$.file conf$$ 2>/dev/null; then ++ as_ln_s='ln -s' ++ # ... but there are two gotchas: ++ # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. ++ # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. ++ # In both cases, we have to default to `cp -p'. ++ ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || ++ as_ln_s='cp -p' ++ elif ln conf$$.file conf$$ 2>/dev/null; then ++ as_ln_s=ln ++ else ++ as_ln_s='cp -p' ++ fi ++else ++ as_ln_s='cp -p' ++fi ++rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file ++rmdir conf$$.dir 2>/dev/null ++ ++ ++# as_fn_mkdir_p ++# ------------- ++# Create "$as_dir" as a directory, including parents if necessary. ++as_fn_mkdir_p () ++{ ++ ++ case $as_dir in #( ++ -*) as_dir=./$as_dir;; ++ esac ++ test -d "$as_dir" || eval $as_mkdir_p || { ++ as_dirs= ++ while :; do ++ case $as_dir in #( ++ *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( ++ *) as_qdir=$as_dir;; ++ esac ++ as_dirs="'$as_qdir' $as_dirs" ++ as_dir=`$as_dirname -- "$as_dir" || ++$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ ++ X"$as_dir" : 'X\(//\)[^/]' \| \ ++ X"$as_dir" : 'X\(//\)$' \| \ ++ X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || ++$as_echo X"$as_dir" | ++ sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ ++ s//\1/ ++ q ++ } ++ /^X\(\/\/\)[^/].*/{ ++ s//\1/ ++ q ++ } ++ /^X\(\/\/\)$/{ ++ s//\1/ ++ q ++ } ++ /^X\(\/\).*/{ ++ s//\1/ ++ q ++ } ++ s/.*/./; q'` ++ test -d "$as_dir" && break ++ done ++ test -z "$as_dirs" || eval "mkdir $as_dirs" ++ } || test -d "$as_dir" || as_fn_error "cannot create directory $as_dir" ++ ++ ++} # as_fn_mkdir_p ++if mkdir -p . 2>/dev/null; then ++ as_mkdir_p='mkdir -p "$as_dir"' ++else ++ test -d ./-p && rmdir ./-p ++ as_mkdir_p=false ++fi ++ ++if test -x / >/dev/null 2>&1; then ++ as_test_x='test -x' ++else ++ if ls -dL / >/dev/null 2>&1; then ++ as_ls_L_option=L ++ else ++ as_ls_L_option= ++ fi ++ as_test_x=' ++ eval sh -c '\'' ++ if test -d "$1"; then ++ test -d "$1/."; ++ else ++ case $1 in #( ++ -*)set "./$1";; ++ esac; ++ case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #(( ++ ???[sx]*):;;*)false;;esac;fi ++ '\'' sh ++ ' ++fi ++as_executable_p=$as_test_x ++ ++# Sed expression to map a string onto a valid CPP name. ++as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" ++ ++# Sed expression to map a string onto a valid variable name. ++as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" ++ ++ ++exec 6>&1 ++## ----------------------------------- ## ++## Main body of $CONFIG_STATUS script. ## ++## ----------------------------------- ## ++_ASEOF ++test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 ++ ++cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 ++# Save the log message, to keep $0 and so on meaningful, and to ++# report actual input values of CONFIG_FILES etc. instead of their ++# values after options handling. ++ac_log=" ++This file was extended by package-unused $as_me version-unused, which was ++generated by GNU Autoconf 2.64. Invocation command line was ++ ++ CONFIG_FILES = $CONFIG_FILES ++ CONFIG_HEADERS = $CONFIG_HEADERS ++ CONFIG_LINKS = $CONFIG_LINKS ++ CONFIG_COMMANDS = $CONFIG_COMMANDS ++ $ $0 $@ ++ ++on `(hostname || uname -n) 2>/dev/null | sed 1q` ++" ++ ++_ACEOF ++ ++case $ac_config_files in *" ++"*) set x $ac_config_files; shift; ac_config_files=$*;; ++esac ++ ++case $ac_config_headers in *" ++"*) set x $ac_config_headers; shift; ac_config_headers=$*;; ++esac ++ ++ ++cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ++# Files that config.status was made for. ++config_files="$ac_config_files" ++config_headers="$ac_config_headers" ++config_commands="$ac_config_commands" ++ ++_ACEOF ++ ++cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 ++ac_cs_usage="\ ++\`$as_me' instantiates files and other configuration actions ++from templates according to the current configuration. Unless the files ++and actions are specified as TAGs, all are instantiated by default. ++ ++Usage: $0 [OPTION]... [TAG]... ++ ++ -h, --help print this help, then exit ++ -V, --version print version number and configuration settings, then exit ++ -q, --quiet, --silent ++ do not print progress messages ++ -d, --debug don't remove temporary files ++ --recheck update $as_me by reconfiguring in the same conditions ++ --file=FILE[:TEMPLATE] ++ instantiate the configuration file FILE ++ --header=FILE[:TEMPLATE] ++ instantiate the configuration header FILE ++ ++Configuration files: ++$config_files ++ ++Configuration headers: ++$config_headers ++ ++Configuration commands: ++$config_commands ++ ++Report bugs to the package provider." ++ ++_ACEOF ++cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ++ac_cs_version="\\ ++package-unused config.status version-unused ++configured by $0, generated by GNU Autoconf 2.64, ++ with options \\"`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`\\" ++ ++Copyright (C) 2009 Free Software Foundation, Inc. ++This config.status script is free software; the Free Software Foundation ++gives unlimited permission to copy, distribute and modify it." ++ ++ac_pwd='$ac_pwd' ++srcdir='$srcdir' ++INSTALL='$INSTALL' ++MKDIR_P='$MKDIR_P' ++AWK='$AWK' ++test -n "\$AWK" || AWK=awk ++_ACEOF ++ ++cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 ++# The default lists apply if the user does not specify any file. ++ac_need_defaults=: ++while test $# != 0 ++do ++ case $1 in ++ --*=*) ++ ac_option=`expr "X$1" : 'X\([^=]*\)='` ++ ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` ++ ac_shift=: ++ ;; ++ *) ++ ac_option=$1 ++ ac_optarg=$2 ++ ac_shift=shift ++ ;; ++ esac ++ ++ case $ac_option in ++ # Handling of the options. ++ -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) ++ ac_cs_recheck=: ;; ++ --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) ++ $as_echo "$ac_cs_version"; exit ;; ++ --debug | --debu | --deb | --de | --d | -d ) ++ debug=: ;; ++ --file | --fil | --fi | --f ) ++ $ac_shift ++ case $ac_optarg in ++ *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; ++ esac ++ as_fn_append CONFIG_FILES " '$ac_optarg'" ++ ac_need_defaults=false;; ++ --header | --heade | --head | --hea ) ++ $ac_shift ++ case $ac_optarg in ++ *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; ++ esac ++ as_fn_append CONFIG_HEADERS " '$ac_optarg'" ++ ac_need_defaults=false;; ++ --he | --h) ++ # Conflict between --help and --header ++ as_fn_error "ambiguous option: \`$1' ++Try \`$0 --help' for more information.";; ++ --help | --hel | -h ) ++ $as_echo "$ac_cs_usage"; exit ;; ++ -q | -quiet | --quiet | --quie | --qui | --qu | --q \ ++ | -silent | --silent | --silen | --sile | --sil | --si | --s) ++ ac_cs_silent=: ;; ++ ++ # This is an error. ++ -*) as_fn_error "unrecognized option: \`$1' ++Try \`$0 --help' for more information." ;; ++ ++ *) as_fn_append ac_config_targets " $1" ++ ac_need_defaults=false ;; ++ ++ esac ++ shift ++done ++ ++ac_configure_extra_args= ++ ++if $ac_cs_silent; then ++ exec 6>/dev/null ++ ac_configure_extra_args="$ac_configure_extra_args --silent" ++fi ++ ++_ACEOF ++cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ++if \$ac_cs_recheck; then ++ set X '$SHELL' '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion ++ shift ++ \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 ++ CONFIG_SHELL='$SHELL' ++ export CONFIG_SHELL ++ exec "\$@" ++fi ++ ++_ACEOF ++cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 ++exec 5>>config.log ++{ ++ echo ++ sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX ++## Running $as_me. ## ++_ASBOX ++ $as_echo "$ac_log" ++} >&5 ++ ++_ACEOF ++cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ++# ++# INIT-COMMANDS ++# ++ ++srcdir="$srcdir" ++host="$host" ++target="$target" ++with_multisubdir="$with_multisubdir" ++with_multisrctop="$with_multisrctop" ++with_target_subdir="$with_target_subdir" ++ac_configure_args="${multilib_arg} ${ac_configure_args}" ++multi_basedir="$multi_basedir" ++CONFIG_SHELL=${CONFIG_SHELL-/bin/sh} ++CC="$CC" ++CXX="$CXX" ++GFORTRAN="$GFORTRAN" ++ ++ ++# The HP-UX ksh and POSIX shell print the target directory to stdout ++# if CDPATH is set. ++(unset CDPATH) >/dev/null 2>&1 && unset CDPATH ++ ++sed_quote_subst='$sed_quote_subst' ++double_quote_subst='$double_quote_subst' ++delay_variable_subst='$delay_variable_subst' ++macro_version='`$ECHO "$macro_version" | $SED "$delay_single_quote_subst"`' ++macro_revision='`$ECHO "$macro_revision" | $SED "$delay_single_quote_subst"`' ++enable_shared='`$ECHO "$enable_shared" | $SED "$delay_single_quote_subst"`' ++enable_static='`$ECHO "$enable_static" | $SED "$delay_single_quote_subst"`' ++pic_mode='`$ECHO "$pic_mode" | $SED "$delay_single_quote_subst"`' ++enable_fast_install='`$ECHO "$enable_fast_install" | $SED "$delay_single_quote_subst"`' ++SHELL='`$ECHO "$SHELL" | $SED "$delay_single_quote_subst"`' ++ECHO='`$ECHO "$ECHO" | $SED "$delay_single_quote_subst"`' ++host_alias='`$ECHO "$host_alias" | $SED "$delay_single_quote_subst"`' ++host='`$ECHO "$host" | $SED "$delay_single_quote_subst"`' ++host_os='`$ECHO "$host_os" | $SED "$delay_single_quote_subst"`' ++build_alias='`$ECHO "$build_alias" | $SED "$delay_single_quote_subst"`' ++build='`$ECHO "$build" | $SED "$delay_single_quote_subst"`' ++build_os='`$ECHO "$build_os" | $SED "$delay_single_quote_subst"`' ++SED='`$ECHO "$SED" | $SED "$delay_single_quote_subst"`' ++Xsed='`$ECHO "$Xsed" | $SED "$delay_single_quote_subst"`' ++GREP='`$ECHO "$GREP" | $SED "$delay_single_quote_subst"`' ++EGREP='`$ECHO "$EGREP" | $SED "$delay_single_quote_subst"`' ++FGREP='`$ECHO "$FGREP" | $SED "$delay_single_quote_subst"`' ++LD='`$ECHO "$LD" | $SED "$delay_single_quote_subst"`' ++NM='`$ECHO "$NM" | $SED "$delay_single_quote_subst"`' ++LN_S='`$ECHO "$LN_S" | $SED "$delay_single_quote_subst"`' ++max_cmd_len='`$ECHO "$max_cmd_len" | $SED "$delay_single_quote_subst"`' ++ac_objext='`$ECHO "$ac_objext" | $SED "$delay_single_quote_subst"`' ++exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' ++lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' ++lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' ++lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' ++reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' ++reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' ++OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' ++deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' ++file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' ++AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' ++AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' ++STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' ++RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' ++old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' ++old_postuninstall_cmds='`$ECHO "$old_postuninstall_cmds" | $SED "$delay_single_quote_subst"`' ++old_archive_cmds='`$ECHO "$old_archive_cmds" | $SED "$delay_single_quote_subst"`' ++lock_old_archive_extraction='`$ECHO "$lock_old_archive_extraction" | $SED "$delay_single_quote_subst"`' ++CC='`$ECHO "$CC" | $SED "$delay_single_quote_subst"`' ++CFLAGS='`$ECHO "$CFLAGS" | $SED "$delay_single_quote_subst"`' ++compiler='`$ECHO "$compiler" | $SED "$delay_single_quote_subst"`' ++GCC='`$ECHO "$GCC" | $SED "$delay_single_quote_subst"`' ++lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$delay_single_quote_subst"`' ++lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' ++lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' ++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' ++objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' ++MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' ++lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' ++lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' ++lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' ++lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' ++lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' ++need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' ++DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' ++NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' ++LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' ++OTOOL='`$ECHO "$OTOOL" | $SED "$delay_single_quote_subst"`' ++OTOOL64='`$ECHO "$OTOOL64" | $SED "$delay_single_quote_subst"`' ++libext='`$ECHO "$libext" | $SED "$delay_single_quote_subst"`' ++shrext_cmds='`$ECHO "$shrext_cmds" | $SED "$delay_single_quote_subst"`' ++extract_expsyms_cmds='`$ECHO "$extract_expsyms_cmds" | $SED "$delay_single_quote_subst"`' ++archive_cmds_need_lc='`$ECHO "$archive_cmds_need_lc" | $SED "$delay_single_quote_subst"`' ++enable_shared_with_static_runtimes='`$ECHO "$enable_shared_with_static_runtimes" | $SED "$delay_single_quote_subst"`' ++export_dynamic_flag_spec='`$ECHO "$export_dynamic_flag_spec" | $SED "$delay_single_quote_subst"`' ++whole_archive_flag_spec='`$ECHO "$whole_archive_flag_spec" | $SED "$delay_single_quote_subst"`' ++compiler_needs_object='`$ECHO "$compiler_needs_object" | $SED "$delay_single_quote_subst"`' ++old_archive_from_new_cmds='`$ECHO "$old_archive_from_new_cmds" | $SED "$delay_single_quote_subst"`' ++old_archive_from_expsyms_cmds='`$ECHO "$old_archive_from_expsyms_cmds" | $SED "$delay_single_quote_subst"`' ++archive_cmds='`$ECHO "$archive_cmds" | $SED "$delay_single_quote_subst"`' ++archive_expsym_cmds='`$ECHO "$archive_expsym_cmds" | $SED "$delay_single_quote_subst"`' ++module_cmds='`$ECHO "$module_cmds" | $SED "$delay_single_quote_subst"`' ++module_expsym_cmds='`$ECHO "$module_expsym_cmds" | $SED "$delay_single_quote_subst"`' ++with_gnu_ld='`$ECHO "$with_gnu_ld" | $SED "$delay_single_quote_subst"`' ++allow_undefined_flag='`$ECHO "$allow_undefined_flag" | $SED "$delay_single_quote_subst"`' ++no_undefined_flag='`$ECHO "$no_undefined_flag" | $SED "$delay_single_quote_subst"`' ++hardcode_libdir_flag_spec='`$ECHO "$hardcode_libdir_flag_spec" | $SED "$delay_single_quote_subst"`' ++hardcode_libdir_flag_spec_ld='`$ECHO "$hardcode_libdir_flag_spec_ld" | $SED "$delay_single_quote_subst"`' ++hardcode_libdir_separator='`$ECHO "$hardcode_libdir_separator" | $SED "$delay_single_quote_subst"`' ++hardcode_direct='`$ECHO "$hardcode_direct" | $SED "$delay_single_quote_subst"`' ++hardcode_direct_absolute='`$ECHO "$hardcode_direct_absolute" | $SED "$delay_single_quote_subst"`' ++hardcode_minus_L='`$ECHO "$hardcode_minus_L" | $SED "$delay_single_quote_subst"`' ++hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_quote_subst"`' ++hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' ++inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' ++link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' ++fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`' ++always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' ++export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' ++exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' ++include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' ++prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' ++file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' ++variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' ++need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' ++need_version='`$ECHO "$need_version" | $SED "$delay_single_quote_subst"`' ++version_type='`$ECHO "$version_type" | $SED "$delay_single_quote_subst"`' ++runpath_var='`$ECHO "$runpath_var" | $SED "$delay_single_quote_subst"`' ++shlibpath_var='`$ECHO "$shlibpath_var" | $SED "$delay_single_quote_subst"`' ++shlibpath_overrides_runpath='`$ECHO "$shlibpath_overrides_runpath" | $SED "$delay_single_quote_subst"`' ++libname_spec='`$ECHO "$libname_spec" | $SED "$delay_single_quote_subst"`' ++library_names_spec='`$ECHO "$library_names_spec" | $SED "$delay_single_quote_subst"`' ++soname_spec='`$ECHO "$soname_spec" | $SED "$delay_single_quote_subst"`' ++install_override_mode='`$ECHO "$install_override_mode" | $SED "$delay_single_quote_subst"`' ++postinstall_cmds='`$ECHO "$postinstall_cmds" | $SED "$delay_single_quote_subst"`' ++postuninstall_cmds='`$ECHO "$postuninstall_cmds" | $SED "$delay_single_quote_subst"`' ++finish_cmds='`$ECHO "$finish_cmds" | $SED "$delay_single_quote_subst"`' ++finish_eval='`$ECHO "$finish_eval" | $SED "$delay_single_quote_subst"`' ++hardcode_into_libs='`$ECHO "$hardcode_into_libs" | $SED "$delay_single_quote_subst"`' ++sys_lib_search_path_spec='`$ECHO "$sys_lib_search_path_spec" | $SED "$delay_single_quote_subst"`' ++sys_lib_dlsearch_path_spec='`$ECHO "$sys_lib_dlsearch_path_spec" | $SED "$delay_single_quote_subst"`' ++hardcode_action='`$ECHO "$hardcode_action" | $SED "$delay_single_quote_subst"`' ++enable_dlopen='`$ECHO "$enable_dlopen" | $SED "$delay_single_quote_subst"`' ++enable_dlopen_self='`$ECHO "$enable_dlopen_self" | $SED "$delay_single_quote_subst"`' ++enable_dlopen_self_static='`$ECHO "$enable_dlopen_self_static" | $SED "$delay_single_quote_subst"`' ++old_striplib='`$ECHO "$old_striplib" | $SED "$delay_single_quote_subst"`' ++striplib='`$ECHO "$striplib" | $SED "$delay_single_quote_subst"`' ++ ++LTCC='$LTCC' ++LTCFLAGS='$LTCFLAGS' ++compiler='$compiler_DEFAULT' ++ ++# A function that is used when there is no print builtin or printf. ++func_fallback_echo () ++{ ++ eval 'cat <<_LTECHO_EOF ++\$1 ++_LTECHO_EOF' ++} ++ ++# Quote evaled strings. ++for var in SHELL \ ++ECHO \ ++SED \ ++GREP \ ++EGREP \ ++FGREP \ ++LD \ ++NM \ ++LN_S \ ++lt_SP2NL \ ++lt_NL2SP \ ++reload_flag \ ++OBJDUMP \ ++deplibs_check_method \ ++file_magic_cmd \ ++AR \ ++AR_FLAGS \ ++STRIP \ ++RANLIB \ ++CC \ ++CFLAGS \ ++compiler \ ++lt_cv_sys_global_symbol_pipe \ ++lt_cv_sys_global_symbol_to_cdecl \ ++lt_cv_sys_global_symbol_to_c_name_address \ ++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ ++lt_prog_compiler_no_builtin_flag \ ++lt_prog_compiler_wl \ ++lt_prog_compiler_pic \ ++lt_prog_compiler_static \ ++lt_cv_prog_compiler_c_o \ ++need_locks \ ++DSYMUTIL \ ++NMEDIT \ ++LIPO \ ++OTOOL \ ++OTOOL64 \ ++shrext_cmds \ ++export_dynamic_flag_spec \ ++whole_archive_flag_spec \ ++compiler_needs_object \ ++with_gnu_ld \ ++allow_undefined_flag \ ++no_undefined_flag \ ++hardcode_libdir_flag_spec \ ++hardcode_libdir_flag_spec_ld \ ++hardcode_libdir_separator \ ++fix_srcfile_path \ ++exclude_expsyms \ ++include_expsyms \ ++file_list_spec \ ++variables_saved_for_relink \ ++libname_spec \ ++library_names_spec \ ++soname_spec \ ++install_override_mode \ ++finish_eval \ ++old_striplib \ ++striplib; do ++ case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in ++ *[\\\\\\\`\\"\\\$]*) ++ eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" ++ ;; ++ *) ++ eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" ++ ;; ++ esac ++done ++ ++# Double-quote double-evaled strings. ++for var in reload_cmds \ ++old_postinstall_cmds \ ++old_postuninstall_cmds \ ++old_archive_cmds \ ++extract_expsyms_cmds \ ++old_archive_from_new_cmds \ ++old_archive_from_expsyms_cmds \ ++archive_cmds \ ++archive_expsym_cmds \ ++module_cmds \ ++module_expsym_cmds \ ++export_symbols_cmds \ ++prelink_cmds \ ++postinstall_cmds \ ++postuninstall_cmds \ ++finish_cmds \ ++sys_lib_search_path_spec \ ++sys_lib_dlsearch_path_spec; do ++ case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in ++ *[\\\\\\\`\\"\\\$]*) ++ eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" ++ ;; ++ *) ++ eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" ++ ;; ++ esac ++done ++ ++ac_aux_dir='$ac_aux_dir' ++xsi_shell='$xsi_shell' ++lt_shell_append='$lt_shell_append' ++ ++# See if we are running on zsh, and set the options which allow our ++# commands through without removal of \ escapes INIT. ++if test -n "\${ZSH_VERSION+set}" ; then ++ setopt NO_GLOB_SUBST ++fi ++ ++ ++ PACKAGE='$PACKAGE' ++ VERSION='$VERSION' ++ TIMESTAMP='$TIMESTAMP' ++ RM='$RM' ++ ofile='$ofile' ++ ++ ++ ++ ++GCC="$GCC" ++CC="$CC" ++acx_cv_header_stdint="$acx_cv_header_stdint" ++acx_cv_type_int8_t="$acx_cv_type_int8_t" ++acx_cv_type_int16_t="$acx_cv_type_int16_t" ++acx_cv_type_int32_t="$acx_cv_type_int32_t" ++acx_cv_type_int64_t="$acx_cv_type_int64_t" ++acx_cv_type_intptr_t="$acx_cv_type_intptr_t" ++ac_cv_type_uintmax_t="$ac_cv_type_uintmax_t" ++ac_cv_type_uintptr_t="$ac_cv_type_uintptr_t" ++ac_cv_type_uint64_t="$ac_cv_type_uint64_t" ++ac_cv_type_u_int64_t="$ac_cv_type_u_int64_t" ++ac_cv_type_u_int32_t="$ac_cv_type_u_int32_t" ++ac_cv_type_int_least32_t="$ac_cv_type_int_least32_t" ++ac_cv_type_int_fast32_t="$ac_cv_type_int_fast32_t" ++ac_cv_sizeof_void_p="$ac_cv_sizeof_void_p" ++ ++ ++# Variables needed in config.status (file generation) which aren't already ++# passed by autoconf. ++SUBDIRS="$SUBDIRS" ++ ++ ++_ACEOF ++ ++cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 ++ ++# Handling of arguments. ++for ac_config_target in $ac_config_targets ++do ++ case $ac_config_target in ++ "config.h") CONFIG_HEADERS="$CONFIG_HEADERS config.h" ;; ++ "default-1") CONFIG_COMMANDS="$CONFIG_COMMANDS default-1" ;; ++ "libtool") CONFIG_COMMANDS="$CONFIG_COMMANDS libtool" ;; ++ "gstdint.h") CONFIG_COMMANDS="$CONFIG_COMMANDS gstdint.h" ;; ++ "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;; ++ "backtrace-supported.h") CONFIG_FILES="$CONFIG_FILES backtrace-supported.h" ;; ++ "default") CONFIG_COMMANDS="$CONFIG_COMMANDS default" ;; ++ ++ *) as_fn_error "invalid argument: \`$ac_config_target'" "$LINENO" 5;; ++ esac ++done ++ ++ ++# If the user did not use the arguments to specify the items to instantiate, ++# then the envvar interface is used. Set only those that are not. ++# We use the long form for the default assignment because of an extremely ++# bizarre bug on SunOS 4.1.3. ++if $ac_need_defaults; then ++ test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files ++ test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers ++ test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands ++fi ++ ++# Have a temporary directory for convenience. Make it in the build tree ++# simply because there is no reason against having it here, and in addition, ++# creating and moving files from /tmp can sometimes cause problems. ++# Hook for its removal unless debugging. ++# Note that there is a small window in which the directory will not be cleaned: ++# after its creation but before its name has been assigned to `$tmp'. ++$debug || ++{ ++ tmp= ++ trap 'exit_status=$? ++ { test -z "$tmp" || test ! -d "$tmp" || rm -fr "$tmp"; } && exit $exit_status ++' 0 ++ trap 'as_fn_exit 1' 1 2 13 15 ++} ++# Create a (secure) tmp directory for tmp files. ++ ++{ ++ tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && ++ test -n "$tmp" && test -d "$tmp" ++} || ++{ ++ tmp=./conf$$-$RANDOM ++ (umask 077 && mkdir "$tmp") ++} || as_fn_error "cannot create a temporary directory in ." "$LINENO" 5 ++ ++# Set up the scripts for CONFIG_FILES section. ++# No need to generate them if there are no CONFIG_FILES. ++# This happens for instance with `./config.status config.h'. ++if test -n "$CONFIG_FILES"; then ++ ++ ++ac_cr=`echo X | tr X '\015'` ++# On cygwin, bash can eat \r inside `` if the user requested igncr. ++# But we know of no other shell where ac_cr would be empty at this ++# point, so we can use a bashism as a fallback. ++if test "x$ac_cr" = x; then ++ eval ac_cr=\$\'\\r\' ++fi ++ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` ++if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then ++ ac_cs_awk_cr='\r' ++else ++ ac_cs_awk_cr=$ac_cr ++fi ++ ++echo 'BEGIN {' >"$tmp/subs1.awk" && ++_ACEOF ++ ++ ++{ ++ echo "cat >conf$$subs.awk <<_ACEOF" && ++ echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && ++ echo "_ACEOF" ++} >conf$$subs.sh || ++ as_fn_error "could not make $CONFIG_STATUS" "$LINENO" 5 ++ac_delim_num=`echo "$ac_subst_vars" | grep -c '$'` ++ac_delim='%!_!# ' ++for ac_last_try in false false false false false :; do ++ . ./conf$$subs.sh || ++ as_fn_error "could not make $CONFIG_STATUS" "$LINENO" 5 ++ ++ ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` ++ if test $ac_delim_n = $ac_delim_num; then ++ break ++ elif $ac_last_try; then ++ as_fn_error "could not make $CONFIG_STATUS" "$LINENO" 5 ++ else ++ ac_delim="$ac_delim!$ac_delim _$ac_delim!! " ++ fi ++done ++rm -f conf$$subs.sh ++ ++cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ++cat >>"\$tmp/subs1.awk" <<\\_ACAWK && ++_ACEOF ++sed -n ' ++h ++s/^/S["/; s/!.*/"]=/ ++p ++g ++s/^[^!]*!// ++:repl ++t repl ++s/'"$ac_delim"'$// ++t delim ++:nl ++h ++s/\(.\{148\}\).*/\1/ ++t more1 ++s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ ++p ++n ++b repl ++:more1 ++s/["\\]/\\&/g; s/^/"/; s/$/"\\/ ++p ++g ++s/.\{148\}// ++t nl ++:delim ++h ++s/\(.\{148\}\).*/\1/ ++t more2 ++s/["\\]/\\&/g; s/^/"/; s/$/"/ ++p ++b ++:more2 ++s/["\\]/\\&/g; s/^/"/; s/$/"\\/ ++p ++g ++s/.\{148\}// ++t delim ++' >$CONFIG_STATUS || ac_write_fail=1 ++rm -f conf$$subs.awk ++cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ++_ACAWK ++cat >>"\$tmp/subs1.awk" <<_ACAWK && ++ for (key in S) S_is_set[key] = 1 ++ FS = "" ++ ++} ++{ ++ line = $ 0 ++ nfields = split(line, field, "@") ++ substed = 0 ++ len = length(field[1]) ++ for (i = 2; i < nfields; i++) { ++ key = field[i] ++ keylen = length(key) ++ if (S_is_set[key]) { ++ value = S[key] ++ line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) ++ len += length(value) + length(field[++i]) ++ substed = 1 ++ } else ++ len += 1 + keylen ++ } ++ ++ print line ++} ++ ++_ACAWK ++_ACEOF ++cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 ++if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then ++ sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" ++else ++ cat ++fi < "$tmp/subs1.awk" > "$tmp/subs.awk" \ ++ || as_fn_error "could not setup config files machinery" "$LINENO" 5 ++_ACEOF ++ ++# VPATH may cause trouble with some makes, so we remove $(srcdir), ++# ${srcdir} and @srcdir@ from VPATH if srcdir is ".", strip leading and ++# trailing colons and then remove the whole line if VPATH becomes empty ++# (actually we leave an empty line to preserve line numbers). ++if test "x$srcdir" = x.; then ++ ac_vpsub='/^[ ]*VPATH[ ]*=/{ ++s/:*\$(srcdir):*/:/ ++s/:*\${srcdir}:*/:/ ++s/:*@srcdir@:*/:/ ++s/^\([^=]*=[ ]*\):*/\1/ ++s/:*$// ++s/^[^=]*=[ ]*$// ++}' ++fi ++ ++cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 ++fi # test -n "$CONFIG_FILES" ++ ++# Set up the scripts for CONFIG_HEADERS section. ++# No need to generate them if there are no CONFIG_HEADERS. ++# This happens for instance with `./config.status Makefile'. ++if test -n "$CONFIG_HEADERS"; then ++cat >"$tmp/defines.awk" <<\_ACAWK || ++BEGIN { ++_ACEOF ++ ++# Transform confdefs.h into an awk script `defines.awk', embedded as ++# here-document in config.status, that substitutes the proper values into ++# config.h.in to produce config.h. ++ ++# Create a delimiter string that does not exist in confdefs.h, to ease ++# handling of long lines. ++ac_delim='%!_!# ' ++for ac_last_try in false false :; do ++ ac_t=`sed -n "/$ac_delim/p" confdefs.h` ++ if test -z "$ac_t"; then ++ break ++ elif $ac_last_try; then ++ as_fn_error "could not make $CONFIG_HEADERS" "$LINENO" 5 ++ else ++ ac_delim="$ac_delim!$ac_delim _$ac_delim!! " ++ fi ++done ++ ++# For the awk script, D is an array of macro values keyed by name, ++# likewise P contains macro parameters if any. Preserve backslash ++# newline sequences. ++ ++ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]* ++sed -n ' ++s/.\{148\}/&'"$ac_delim"'/g ++t rset ++:rset ++s/^[ ]*#[ ]*define[ ][ ]*/ / ++t def ++d ++:def ++s/\\$// ++t bsnl ++s/["\\]/\\&/g ++s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ ++D["\1"]=" \3"/p ++s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2"/p ++d ++:bsnl ++s/["\\]/\\&/g ++s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ ++D["\1"]=" \3\\\\\\n"\\/p ++t cont ++s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p ++t cont ++d ++:cont ++n ++s/.\{148\}/&'"$ac_delim"'/g ++t clear ++:clear ++s/\\$// ++t bsnlc ++s/["\\]/\\&/g; s/^/"/; s/$/"/p ++d ++:bsnlc ++s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p ++b cont ++' >$CONFIG_STATUS || ac_write_fail=1 ++ ++cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ++ for (key in D) D_is_set[key] = 1 ++ FS = "" ++} ++/^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ { ++ line = \$ 0 ++ split(line, arg, " ") ++ if (arg[1] == "#") { ++ defundef = arg[2] ++ mac1 = arg[3] ++ } else { ++ defundef = substr(arg[1], 2) ++ mac1 = arg[2] ++ } ++ split(mac1, mac2, "(") #) ++ macro = mac2[1] ++ prefix = substr(line, 1, index(line, defundef) - 1) ++ if (D_is_set[macro]) { ++ # Preserve the white space surrounding the "#". ++ print prefix "define", macro P[macro] D[macro] ++ next ++ } else { ++ # Replace #undef with comments. This is necessary, for example, ++ # in the case of _POSIX_SOURCE, which is predefined and required ++ # on some systems where configure will not decide to define it. ++ if (defundef == "undef") { ++ print "/*", prefix defundef, macro, "*/" ++ next ++ } ++ } ++} ++{ print } ++_ACAWK ++_ACEOF ++cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 ++ as_fn_error "could not setup config headers machinery" "$LINENO" 5 ++fi # test -n "$CONFIG_HEADERS" ++ ++ ++eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS :C $CONFIG_COMMANDS" ++shift ++for ac_tag ++do ++ case $ac_tag in ++ :[FHLC]) ac_mode=$ac_tag; continue;; ++ esac ++ case $ac_mode$ac_tag in ++ :[FHL]*:*);; ++ :L* | :C*:*) as_fn_error "invalid tag \`$ac_tag'" "$LINENO" 5;; ++ :[FH]-) ac_tag=-:-;; ++ :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; ++ esac ++ ac_save_IFS=$IFS ++ IFS=: ++ set x $ac_tag ++ IFS=$ac_save_IFS ++ shift ++ ac_file=$1 ++ shift ++ ++ case $ac_mode in ++ :L) ac_source=$1;; ++ :[FH]) ++ ac_file_inputs= ++ for ac_f ++ do ++ case $ac_f in ++ -) ac_f="$tmp/stdin";; ++ *) # Look for the file first in the build tree, then in the source tree ++ # (if the path is not absolute). The absolute path cannot be DOS-style, ++ # because $ac_f cannot contain `:'. ++ test -f "$ac_f" || ++ case $ac_f in ++ [\\/$]*) false;; ++ *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; ++ esac || ++ as_fn_error "cannot find input file: \`$ac_f'" "$LINENO" 5;; ++ esac ++ case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac ++ as_fn_append ac_file_inputs " '$ac_f'" ++ done ++ ++ # Let's still pretend it is `configure' which instantiates (i.e., don't ++ # use $as_me), people would be surprised to read: ++ # /* config.h. Generated by config.status. */ ++ configure_input='Generated from '` ++ $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' ++ `' by configure.' ++ if test x"$ac_file" != x-; then ++ configure_input="$ac_file. $configure_input" ++ { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 ++$as_echo "$as_me: creating $ac_file" >&6;} ++ fi ++ # Neutralize special characters interpreted by sed in replacement strings. ++ case $configure_input in #( ++ *\&* | *\|* | *\\* ) ++ ac_sed_conf_input=`$as_echo "$configure_input" | ++ sed 's/[\\\\&|]/\\\\&/g'`;; #( ++ *) ac_sed_conf_input=$configure_input;; ++ esac ++ ++ case $ac_tag in ++ *:-:* | *:-) cat >"$tmp/stdin" \ ++ || as_fn_error "could not create $ac_file" "$LINENO" 5 ;; ++ esac ++ ;; ++ esac ++ ++ ac_dir=`$as_dirname -- "$ac_file" || ++$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ ++ X"$ac_file" : 'X\(//\)[^/]' \| \ ++ X"$ac_file" : 'X\(//\)$' \| \ ++ X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || ++$as_echo X"$ac_file" | ++ sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ ++ s//\1/ ++ q ++ } ++ /^X\(\/\/\)[^/].*/{ ++ s//\1/ ++ q ++ } ++ /^X\(\/\/\)$/{ ++ s//\1/ ++ q ++ } ++ /^X\(\/\).*/{ ++ s//\1/ ++ q ++ } ++ s/.*/./; q'` ++ as_dir="$ac_dir"; as_fn_mkdir_p ++ ac_builddir=. ++ ++case "$ac_dir" in ++.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; ++*) ++ ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` ++ # A ".." for each directory in $ac_dir_suffix. ++ ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` ++ case $ac_top_builddir_sub in ++ "") ac_top_builddir_sub=. ac_top_build_prefix= ;; ++ *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; ++ esac ;; ++esac ++ac_abs_top_builddir=$ac_pwd ++ac_abs_builddir=$ac_pwd$ac_dir_suffix ++# for backward compatibility: ++ac_top_builddir=$ac_top_build_prefix ++ ++case $srcdir in ++ .) # We are building in place. ++ ac_srcdir=. ++ ac_top_srcdir=$ac_top_builddir_sub ++ ac_abs_top_srcdir=$ac_pwd ;; ++ [\\/]* | ?:[\\/]* ) # Absolute name. ++ ac_srcdir=$srcdir$ac_dir_suffix; ++ ac_top_srcdir=$srcdir ++ ac_abs_top_srcdir=$srcdir ;; ++ *) # Relative name. ++ ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix ++ ac_top_srcdir=$ac_top_build_prefix$srcdir ++ ac_abs_top_srcdir=$ac_pwd/$srcdir ;; ++esac ++ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix ++ ++ ++ case $ac_mode in ++ :F) ++ # ++ # CONFIG_FILE ++ # ++ ++ case $INSTALL in ++ [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;; ++ *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;; ++ esac ++ ac_MKDIR_P=$MKDIR_P ++ case $MKDIR_P in ++ [\\/$]* | ?:[\\/]* ) ;; ++ */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;; ++ esac ++_ACEOF ++ ++cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 ++# If the template does not know about datarootdir, expand it. ++# FIXME: This hack should be removed a few years after 2.60. ++ac_datarootdir_hack=; ac_datarootdir_seen= ++ac_sed_dataroot=' ++/datarootdir/ { ++ p ++ q ++} ++/@datadir@/p ++/@docdir@/p ++/@infodir@/p ++/@localedir@/p ++/@mandir@/p' ++case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in ++*datarootdir*) ac_datarootdir_seen=yes;; ++*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) ++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 ++$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} ++_ACEOF ++cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ++ ac_datarootdir_hack=' ++ s&@datadir@&$datadir&g ++ s&@docdir@&$docdir&g ++ s&@infodir@&$infodir&g ++ s&@localedir@&$localedir&g ++ s&@mandir@&$mandir&g ++ s&\\\${datarootdir}&$datarootdir&g' ;; ++esac ++_ACEOF ++ ++# Neutralize VPATH when `$srcdir' = `.'. ++# Shell code in configure.ac might set extrasub. ++# FIXME: do we really want to maintain this feature? ++cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ++ac_sed_extra="$ac_vpsub ++$extrasub ++_ACEOF ++cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 ++:t ++/@[a-zA-Z_][a-zA-Z_0-9]*@/!b ++s|@configure_input@|$ac_sed_conf_input|;t t ++s&@top_builddir@&$ac_top_builddir_sub&;t t ++s&@top_build_prefix@&$ac_top_build_prefix&;t t ++s&@srcdir@&$ac_srcdir&;t t ++s&@abs_srcdir@&$ac_abs_srcdir&;t t ++s&@top_srcdir@&$ac_top_srcdir&;t t ++s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t ++s&@builddir@&$ac_builddir&;t t ++s&@abs_builddir@&$ac_abs_builddir&;t t ++s&@abs_top_builddir@&$ac_abs_top_builddir&;t t ++s&@INSTALL@&$ac_INSTALL&;t t ++s&@MKDIR_P@&$ac_MKDIR_P&;t t ++$ac_datarootdir_hack ++" ++eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$tmp/subs.awk" >$tmp/out \ ++ || as_fn_error "could not create $ac_file" "$LINENO" 5 ++ ++test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && ++ { ac_out=`sed -n '/\${datarootdir}/p' "$tmp/out"`; test -n "$ac_out"; } && ++ { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' "$tmp/out"`; test -z "$ac_out"; } && ++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' ++which seems to be undefined. Please make sure it is defined." >&5 ++$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' ++which seems to be undefined. Please make sure it is defined." >&2;} ++ ++ rm -f "$tmp/stdin" ++ case $ac_file in ++ -) cat "$tmp/out" && rm -f "$tmp/out";; ++ *) rm -f "$ac_file" && mv "$tmp/out" "$ac_file";; ++ esac \ ++ || as_fn_error "could not create $ac_file" "$LINENO" 5 ++ ;; ++ :H) ++ # ++ # CONFIG_HEADER ++ # ++ if test x"$ac_file" != x-; then ++ { ++ $as_echo "/* $configure_input */" \ ++ && eval '$AWK -f "$tmp/defines.awk"' "$ac_file_inputs" ++ } >"$tmp/config.h" \ ++ || as_fn_error "could not create $ac_file" "$LINENO" 5 ++ if diff "$ac_file" "$tmp/config.h" >/dev/null 2>&1; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5 ++$as_echo "$as_me: $ac_file is unchanged" >&6;} ++ else ++ rm -f "$ac_file" ++ mv "$tmp/config.h" "$ac_file" \ ++ || as_fn_error "could not create $ac_file" "$LINENO" 5 ++ fi ++ else ++ $as_echo "/* $configure_input */" \ ++ && eval '$AWK -f "$tmp/defines.awk"' "$ac_file_inputs" \ ++ || as_fn_error "could not create -" "$LINENO" 5 ++ fi ++# Compute "$ac_file"'s index in $config_headers. ++_am_arg="$ac_file" ++_am_stamp_count=1 ++for _am_header in $config_headers :; do ++ case $_am_header in ++ $_am_arg | $_am_arg:* ) ++ break ;; ++ * ) ++ _am_stamp_count=`expr $_am_stamp_count + 1` ;; ++ esac ++done ++echo "timestamp for $_am_arg" >`$as_dirname -- "$_am_arg" || ++$as_expr X"$_am_arg" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ ++ X"$_am_arg" : 'X\(//\)[^/]' \| \ ++ X"$_am_arg" : 'X\(//\)$' \| \ ++ X"$_am_arg" : 'X\(/\)' \| . 2>/dev/null || ++$as_echo X"$_am_arg" | ++ sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ ++ s//\1/ ++ q ++ } ++ /^X\(\/\/\)[^/].*/{ ++ s//\1/ ++ q ++ } ++ /^X\(\/\/\)$/{ ++ s//\1/ ++ q ++ } ++ /^X\(\/\).*/{ ++ s//\1/ ++ q ++ } ++ s/.*/./; q'`/stamp-h$_am_stamp_count ++ ;; ++ ++ :C) { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5 ++$as_echo "$as_me: executing $ac_file commands" >&6;} ++ ;; ++ esac ++ ++ ++ case $ac_file$ac_mode in ++ "default-1":C) ++# Only add multilib support code if we just rebuilt the top-level ++# Makefile. ++case " $CONFIG_FILES " in ++ *" Makefile "*) ++ ac_file=Makefile . ${multi_basedir}/config-ml.in ++ ;; ++esac ;; ++ "libtool":C) ++ ++ # See if we are running on zsh, and set the options which allow our ++ # commands through without removal of \ escapes. ++ if test -n "${ZSH_VERSION+set}" ; then ++ setopt NO_GLOB_SUBST ++ fi ++ ++ cfgfile="${ofile}T" ++ trap "$RM \"$cfgfile\"; exit 1" 1 2 15 ++ $RM "$cfgfile" ++ ++ cat <<_LT_EOF >> "$cfgfile" ++#! $SHELL ++ ++# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services. ++# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION ++# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: ++# NOTE: Changes made to this file will be lost: look at ltmain.sh. ++# ++# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, ++# 2006, 2007, 2008, 2009 Free Software Foundation, Inc. ++# Written by Gordon Matzigkeit, 1996 ++# ++# This file is part of GNU Libtool. ++# ++# GNU Libtool is free software; you can redistribute it and/or ++# modify it under the terms of the GNU General Public License as ++# published by the Free Software Foundation; either version 2 of ++# the License, or (at your option) any later version. ++# ++# As a special exception to the GNU General Public License, ++# if you distribute this file as part of a program or library that ++# is built using GNU Libtool, you may include this file under the ++# same distribution terms that you use for the rest of that program. ++# ++# GNU Libtool is distributed in the hope that it will be useful, ++# but WITHOUT ANY WARRANTY; without even the implied warranty of ++# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++# GNU General Public License for more details. ++# ++# You should have received a copy of the GNU General Public License ++# along with GNU Libtool; see the file COPYING. If not, a copy ++# can be downloaded from http://www.gnu.org/licenses/gpl.html, or ++# obtained by writing to the Free Software Foundation, Inc., ++# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. ++ ++ ++# The names of the tagged configurations supported by this script. ++available_tags="" ++ ++# ### BEGIN LIBTOOL CONFIG ++ ++# Which release of libtool.m4 was used? ++macro_version=$macro_version ++macro_revision=$macro_revision ++ ++# Whether or not to build shared libraries. ++build_libtool_libs=$enable_shared ++ ++# Whether or not to build static libraries. ++build_old_libs=$enable_static ++ ++# What type of objects to build. ++pic_mode=$pic_mode ++ ++# Whether or not to optimize for fast installation. ++fast_install=$enable_fast_install ++ ++# Shell to use when invoking shell scripts. ++SHELL=$lt_SHELL ++ ++# An echo program that protects backslashes. ++ECHO=$lt_ECHO ++ ++# The host system. ++host_alias=$host_alias ++host=$host ++host_os=$host_os ++ ++# The build system. ++build_alias=$build_alias ++build=$build ++build_os=$build_os ++ ++# A sed program that does not truncate output. ++SED=$lt_SED ++ ++# Sed that helps us avoid accidentally triggering echo(1) options like -n. ++Xsed="\$SED -e 1s/^X//" ++ ++# A grep program that handles long lines. ++GREP=$lt_GREP ++ ++# An ERE matcher. ++EGREP=$lt_EGREP ++ ++# A literal string matcher. ++FGREP=$lt_FGREP ++ ++# A BSD- or MS-compatible name lister. ++NM=$lt_NM ++ ++# Whether we need soft or hard links. ++LN_S=$lt_LN_S ++ ++# What is the maximum length of a command? ++max_cmd_len=$max_cmd_len ++ ++# Object file suffix (normally "o"). ++objext=$ac_objext ++ ++# Executable file suffix (normally ""). ++exeext=$exeext ++ ++# whether the shell understands "unset". ++lt_unset=$lt_unset ++ ++# turn spaces into newlines. ++SP2NL=$lt_lt_SP2NL ++ ++# turn newlines into spaces. ++NL2SP=$lt_lt_NL2SP ++ ++# An object symbol dumper. ++OBJDUMP=$lt_OBJDUMP ++ ++# Method to check whether dependent libraries are shared objects. ++deplibs_check_method=$lt_deplibs_check_method ++ ++# Command to use when deplibs_check_method == "file_magic". ++file_magic_cmd=$lt_file_magic_cmd ++ ++# The archiver. ++AR=$lt_AR ++AR_FLAGS=$lt_AR_FLAGS ++ ++# A symbol stripping program. ++STRIP=$lt_STRIP ++ ++# Commands used to install an old-style archive. ++RANLIB=$lt_RANLIB ++old_postinstall_cmds=$lt_old_postinstall_cmds ++old_postuninstall_cmds=$lt_old_postuninstall_cmds ++ ++# Whether to use a lock for old archive extraction. ++lock_old_archive_extraction=$lock_old_archive_extraction ++ ++# A C compiler. ++LTCC=$lt_CC ++ ++# LTCC compiler flags. ++LTCFLAGS=$lt_CFLAGS ++ ++# Take the output of nm and produce a listing of raw symbols and C names. ++global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe ++ ++# Transform the output of nm in a proper C declaration. ++global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl ++ ++# Transform the output of nm in a C name address pair. ++global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address ++ ++# Transform the output of nm in a C name address pair when lib prefix is needed. ++global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix ++ ++# The name of the directory that contains temporary libtool files. ++objdir=$objdir ++ ++# Used to examine libraries when file_magic_cmd begins with "file". ++MAGIC_CMD=$MAGIC_CMD ++ ++# Must we lock files when doing compilation? ++need_locks=$lt_need_locks ++ ++# Tool to manipulate archived DWARF debug symbol files on Mac OS X. ++DSYMUTIL=$lt_DSYMUTIL ++ ++# Tool to change global to local symbols on Mac OS X. ++NMEDIT=$lt_NMEDIT ++ ++# Tool to manipulate fat objects and archives on Mac OS X. ++LIPO=$lt_LIPO ++ ++# ldd/readelf like tool for Mach-O binaries on Mac OS X. ++OTOOL=$lt_OTOOL ++ ++# ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4. ++OTOOL64=$lt_OTOOL64 ++ ++# Old archive suffix (normally "a"). ++libext=$libext ++ ++# Shared library suffix (normally ".so"). ++shrext_cmds=$lt_shrext_cmds ++ ++# The commands to extract the exported symbol list from a shared archive. ++extract_expsyms_cmds=$lt_extract_expsyms_cmds ++ ++# Variables whose values should be saved in libtool wrapper scripts and ++# restored at link time. ++variables_saved_for_relink=$lt_variables_saved_for_relink ++ ++# Do we need the "lib" prefix for modules? ++need_lib_prefix=$need_lib_prefix ++ ++# Do we need a version for libraries? ++need_version=$need_version ++ ++# Library versioning type. ++version_type=$version_type ++ ++# Shared library runtime path variable. ++runpath_var=$runpath_var ++ ++# Shared library path variable. ++shlibpath_var=$shlibpath_var ++ ++# Is shlibpath searched before the hard-coded library search path? ++shlibpath_overrides_runpath=$shlibpath_overrides_runpath ++ ++# Format of library name prefix. ++libname_spec=$lt_libname_spec ++ ++# List of archive names. First name is the real one, the rest are links. ++# The last name is the one that the linker finds with -lNAME ++library_names_spec=$lt_library_names_spec ++ ++# The coded name of the library, if different from the real name. ++soname_spec=$lt_soname_spec ++ ++# Permission mode override for installation of shared libraries. ++install_override_mode=$lt_install_override_mode ++ ++# Command to use after installation of a shared archive. ++postinstall_cmds=$lt_postinstall_cmds ++ ++# Command to use after uninstallation of a shared archive. ++postuninstall_cmds=$lt_postuninstall_cmds ++ ++# Commands used to finish a libtool library installation in a directory. ++finish_cmds=$lt_finish_cmds ++ ++# As "finish_cmds", except a single script fragment to be evaled but ++# not shown. ++finish_eval=$lt_finish_eval ++ ++# Whether we should hardcode library paths into libraries. ++hardcode_into_libs=$hardcode_into_libs ++ ++# Compile-time system search path for libraries. ++sys_lib_search_path_spec=$lt_sys_lib_search_path_spec ++ ++# Run-time system search path for libraries. ++sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec ++ ++# Whether dlopen is supported. ++dlopen_support=$enable_dlopen ++ ++# Whether dlopen of programs is supported. ++dlopen_self=$enable_dlopen_self ++ ++# Whether dlopen of statically linked programs is supported. ++dlopen_self_static=$enable_dlopen_self_static ++ ++# Commands to strip libraries. ++old_striplib=$lt_old_striplib ++striplib=$lt_striplib ++ ++ ++# The linker used to build libraries. ++LD=$lt_LD ++ ++# How to create reloadable object files. ++reload_flag=$lt_reload_flag ++reload_cmds=$lt_reload_cmds ++ ++# Commands used to build an old-style archive. ++old_archive_cmds=$lt_old_archive_cmds ++ ++# A language specific compiler. ++CC=$lt_compiler ++ ++# Is the compiler the GNU compiler? ++with_gcc=$GCC ++ ++# Compiler flag to turn off builtin functions. ++no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag ++ ++# How to pass a linker flag through the compiler. ++wl=$lt_lt_prog_compiler_wl ++ ++# Additional compiler flags for building library objects. ++pic_flag=$lt_lt_prog_compiler_pic ++ ++# Compiler flag to prevent dynamic linking. ++link_static_flag=$lt_lt_prog_compiler_static ++ ++# Does compiler simultaneously support -c and -o options? ++compiler_c_o=$lt_lt_cv_prog_compiler_c_o ++ ++# Whether or not to add -lc for building shared libraries. ++build_libtool_need_lc=$archive_cmds_need_lc ++ ++# Whether or not to disallow shared libs when runtime libs are static. ++allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes ++ ++# Compiler flag to allow reflexive dlopens. ++export_dynamic_flag_spec=$lt_export_dynamic_flag_spec ++ ++# Compiler flag to generate shared objects directly from archives. ++whole_archive_flag_spec=$lt_whole_archive_flag_spec ++ ++# Whether the compiler copes with passing no objects directly. ++compiler_needs_object=$lt_compiler_needs_object ++ ++# Create an old-style archive from a shared archive. ++old_archive_from_new_cmds=$lt_old_archive_from_new_cmds ++ ++# Create a temporary old-style archive to link instead of a shared archive. ++old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds ++ ++# Commands used to build a shared archive. ++archive_cmds=$lt_archive_cmds ++archive_expsym_cmds=$lt_archive_expsym_cmds ++ ++# Commands used to build a loadable module if different from building ++# a shared archive. ++module_cmds=$lt_module_cmds ++module_expsym_cmds=$lt_module_expsym_cmds ++ ++# Whether we are building with GNU ld or not. ++with_gnu_ld=$lt_with_gnu_ld ++ ++# Flag that allows shared libraries with undefined symbols to be built. ++allow_undefined_flag=$lt_allow_undefined_flag ++ ++# Flag that enforces no undefined symbols. ++no_undefined_flag=$lt_no_undefined_flag ++ ++# Flag to hardcode \$libdir into a binary during linking. ++# This must work even if \$libdir does not exist ++hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec ++ ++# If ld is used when linking, flag to hardcode \$libdir into a binary ++# during linking. This must work even if \$libdir does not exist. ++hardcode_libdir_flag_spec_ld=$lt_hardcode_libdir_flag_spec_ld ++ ++# Whether we need a single "-rpath" flag with a separated argument. ++hardcode_libdir_separator=$lt_hardcode_libdir_separator ++ ++# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes ++# DIR into the resulting binary. ++hardcode_direct=$hardcode_direct ++ ++# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes ++# DIR into the resulting binary and the resulting library dependency is ++# "absolute",i.e impossible to change by setting \${shlibpath_var} if the ++# library is relocated. ++hardcode_direct_absolute=$hardcode_direct_absolute ++ ++# Set to "yes" if using the -LDIR flag during linking hardcodes DIR ++# into the resulting binary. ++hardcode_minus_L=$hardcode_minus_L ++ ++# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR ++# into the resulting binary. ++hardcode_shlibpath_var=$hardcode_shlibpath_var ++ ++# Set to "yes" if building a shared library automatically hardcodes DIR ++# into the library and all subsequent libraries and executables linked ++# against it. ++hardcode_automatic=$hardcode_automatic ++ ++# Set to yes if linker adds runtime paths of dependent libraries ++# to runtime path list. ++inherit_rpath=$inherit_rpath ++ ++# Whether libtool must link a program against all its dependency libraries. ++link_all_deplibs=$link_all_deplibs ++ ++# Fix the shell variable \$srcfile for the compiler. ++fix_srcfile_path=$lt_fix_srcfile_path ++ ++# Set to "yes" if exported symbols are required. ++always_export_symbols=$always_export_symbols ++ ++# The commands to list exported symbols. ++export_symbols_cmds=$lt_export_symbols_cmds ++ ++# Symbols that should not be listed in the preloaded symbols. ++exclude_expsyms=$lt_exclude_expsyms ++ ++# Symbols that must always be exported. ++include_expsyms=$lt_include_expsyms ++ ++# Commands necessary for linking programs (against libraries) with templates. ++prelink_cmds=$lt_prelink_cmds ++ ++# Specify filename containing input files. ++file_list_spec=$lt_file_list_spec ++ ++# How to hardcode a shared library path into an executable. ++hardcode_action=$hardcode_action ++ ++# ### END LIBTOOL CONFIG ++ ++_LT_EOF ++ ++ case $host_os in ++ aix3*) ++ cat <<\_LT_EOF >> "$cfgfile" ++# AIX sometimes has problems with the GCC collect2 program. For some ++# reason, if we set the COLLECT_NAMES environment variable, the problems ++# vanish in a puff of smoke. ++if test "X${COLLECT_NAMES+set}" != Xset; then ++ COLLECT_NAMES= ++ export COLLECT_NAMES ++fi ++_LT_EOF ++ ;; ++ esac ++ ++ ++ltmain="$ac_aux_dir/ltmain.sh" ++ ++ ++ # We use sed instead of cat because bash on DJGPP gets confused if ++ # if finds mixed CR/LF and LF-only lines. Since sed operates in ++ # text mode, it properly converts lines to CR/LF. This bash problem ++ # is reportedly fixed, but why not run on old versions too? ++ sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ ++ || (rm -f "$cfgfile"; exit 1) ++ ++ case $xsi_shell in ++ yes) ++ cat << \_LT_EOF >> "$cfgfile" ++ ++# func_dirname file append nondir_replacement ++# Compute the dirname of FILE. If nonempty, add APPEND to the result, ++# otherwise set result to NONDIR_REPLACEMENT. ++func_dirname () ++{ ++ case ${1} in ++ */*) func_dirname_result="${1%/*}${2}" ;; ++ * ) func_dirname_result="${3}" ;; ++ esac ++} ++ ++# func_basename file ++func_basename () ++{ ++ func_basename_result="${1##*/}" ++} ++ ++# func_dirname_and_basename file append nondir_replacement ++# perform func_basename and func_dirname in a single function ++# call: ++# dirname: Compute the dirname of FILE. If nonempty, ++# add APPEND to the result, otherwise set result ++# to NONDIR_REPLACEMENT. ++# value returned in "$func_dirname_result" ++# basename: Compute filename of FILE. ++# value retuned in "$func_basename_result" ++# Implementation must be kept synchronized with func_dirname ++# and func_basename. For efficiency, we do not delegate to ++# those functions but instead duplicate the functionality here. ++func_dirname_and_basename () ++{ ++ case ${1} in ++ */*) func_dirname_result="${1%/*}${2}" ;; ++ * ) func_dirname_result="${3}" ;; ++ esac ++ func_basename_result="${1##*/}" ++} ++ ++# func_stripname prefix suffix name ++# strip PREFIX and SUFFIX off of NAME. ++# PREFIX and SUFFIX must not contain globbing or regex special ++# characters, hashes, percent signs, but SUFFIX may contain a leading ++# dot (in which case that matches only a dot). ++func_stripname () ++{ ++ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are ++ # positional parameters, so assign one to ordinary parameter first. ++ func_stripname_result=${3} ++ func_stripname_result=${func_stripname_result#"${1}"} ++ func_stripname_result=${func_stripname_result%"${2}"} ++} ++ ++# func_opt_split ++func_opt_split () ++{ ++ func_opt_split_opt=${1%%=*} ++ func_opt_split_arg=${1#*=} ++} ++ ++# func_lo2o object ++func_lo2o () ++{ ++ case ${1} in ++ *.lo) func_lo2o_result=${1%.lo}.${objext} ;; ++ *) func_lo2o_result=${1} ;; ++ esac ++} ++ ++# func_xform libobj-or-source ++func_xform () ++{ ++ func_xform_result=${1%.*}.lo ++} ++ ++# func_arith arithmetic-term... ++func_arith () ++{ ++ func_arith_result=$(( $* )) ++} ++ ++# func_len string ++# STRING may not start with a hyphen. ++func_len () ++{ ++ func_len_result=${#1} ++} ++ ++_LT_EOF ++ ;; ++ *) # Bourne compatible functions. ++ cat << \_LT_EOF >> "$cfgfile" ++ ++# func_dirname file append nondir_replacement ++# Compute the dirname of FILE. If nonempty, add APPEND to the result, ++# otherwise set result to NONDIR_REPLACEMENT. ++func_dirname () ++{ ++ # Extract subdirectory from the argument. ++ func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` ++ if test "X$func_dirname_result" = "X${1}"; then ++ func_dirname_result="${3}" ++ else ++ func_dirname_result="$func_dirname_result${2}" ++ fi ++} ++ ++# func_basename file ++func_basename () ++{ ++ func_basename_result=`$ECHO "${1}" | $SED "$basename"` ++} ++ ++ ++# func_stripname prefix suffix name ++# strip PREFIX and SUFFIX off of NAME. ++# PREFIX and SUFFIX must not contain globbing or regex special ++# characters, hashes, percent signs, but SUFFIX may contain a leading ++# dot (in which case that matches only a dot). ++# func_strip_suffix prefix name ++func_stripname () ++{ ++ case ${2} in ++ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; ++ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; ++ esac ++} ++ ++# sed scripts: ++my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q' ++my_sed_long_arg='1s/^-[^=]*=//' ++ ++# func_opt_split ++func_opt_split () ++{ ++ func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"` ++ func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"` ++} ++ ++# func_lo2o object ++func_lo2o () ++{ ++ func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` ++} ++ ++# func_xform libobj-or-source ++func_xform () ++{ ++ func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` ++} ++ ++# func_arith arithmetic-term... ++func_arith () ++{ ++ func_arith_result=`expr "$@"` ++} ++ ++# func_len string ++# STRING may not start with a hyphen. ++func_len () ++{ ++ func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` ++} ++ ++_LT_EOF ++esac ++ ++case $lt_shell_append in ++ yes) ++ cat << \_LT_EOF >> "$cfgfile" ++ ++# func_append var value ++# Append VALUE to the end of shell variable VAR. ++func_append () ++{ ++ eval "$1+=\$2" ++} ++_LT_EOF ++ ;; ++ *) ++ cat << \_LT_EOF >> "$cfgfile" ++ ++# func_append var value ++# Append VALUE to the end of shell variable VAR. ++func_append () ++{ ++ eval "$1=\$$1\$2" ++} ++ ++_LT_EOF ++ ;; ++ esac ++ ++ ++ sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ ++ || (rm -f "$cfgfile"; exit 1) ++ ++ mv -f "$cfgfile" "$ofile" || ++ (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") ++ chmod +x "$ofile" ++ ++ ;; ++ "gstdint.h":C) ++if test "$GCC" = yes; then ++ echo "/* generated for " `$CC --version | sed 1q` "*/" > tmp-stdint.h ++else ++ echo "/* generated for $CC */" > tmp-stdint.h ++fi ++ ++sed 's/^ *//' >> tmp-stdint.h < ++EOF ++ ++if test "$acx_cv_header_stdint" != stdint.h; then ++ echo "#include " >> tmp-stdint.h ++fi ++if test "$acx_cv_header_stdint" != stddef.h; then ++ echo "#include <$acx_cv_header_stdint>" >> tmp-stdint.h ++fi ++ ++sed 's/^ *//' >> tmp-stdint.h <> tmp-stdint.h <> tmp-stdint.h <> tmp-stdint.h <> tmp-stdint.h <> tmp-stdint.h <> tmp-stdint.h <> tmp-stdint.h <= 199901L ++ #ifndef _INT64_T ++ #define _INT64_T ++ #ifndef __int64_t_defined ++ #ifndef int64_t ++ typedef long long int64_t; ++ #endif ++ #endif ++ #endif ++ #ifndef _UINT64_T ++ #define _UINT64_T ++ #ifndef uint64_t ++ typedef unsigned long long uint64_t; ++ #endif ++ #endif ++ ++ #elif defined __GNUC__ && defined (__STDC__) && __STDC__-0 ++ /* NextStep 2.0 cc is really gcc 1.93 but it defines __GNUC__ = 2 and ++ does not implement __extension__. But that compiler doesn't define ++ __GNUC_MINOR__. */ ++ # if __GNUC__ < 2 || (__NeXT__ && !__GNUC_MINOR__) ++ # define __extension__ ++ # endif ++ ++ # ifndef _INT64_T ++ # define _INT64_T ++ # ifndef int64_t ++ __extension__ typedef long long int64_t; ++ # endif ++ # endif ++ # ifndef _UINT64_T ++ # define _UINT64_T ++ # ifndef uint64_t ++ __extension__ typedef unsigned long long uint64_t; ++ # endif ++ # endif ++ ++ #elif !defined __STRICT_ANSI__ ++ # if defined _MSC_VER || defined __WATCOMC__ || defined __BORLANDC__ ++ ++ # ifndef _INT64_T ++ # define _INT64_T ++ # ifndef int64_t ++ typedef __int64 int64_t; ++ # endif ++ # endif ++ # ifndef _UINT64_T ++ # define _UINT64_T ++ # ifndef uint64_t ++ typedef unsigned __int64 uint64_t; ++ # endif ++ # endif ++ # endif /* compiler */ ++ ++ #endif /* ANSI version */ ++EOF ++fi ++ ++# ------------- done int64_t types, emit intptr types ------------ ++if test "$ac_cv_type_uintptr_t" != yes; then ++ sed 's/^ *//' >> tmp-stdint.h <> tmp-stdint.h <> tmp-stdint.h <> tmp-stdint.h <> tmp-stdint.h < ++ # srcdir/Makefile.am -> srcdir/{src,libsupc++,...}/Makefile.am, manually ++ # append it here. Only modify Makefiles that have just been created. ++ # ++ # Also, get rid of this simulated-VPATH thing that automake does. ++ cat > vpsed << \_EOF ++ s!`test -f '$<' || echo '$(srcdir)/'`!! ++_EOF ++ for i in $SUBDIRS; do ++ case $CONFIG_FILES in ++ *${i}/Makefile*) ++ #echo "Adding MULTISUBDIR to $i/Makefile" ++ sed -f vpsed $i/Makefile > tmp ++ grep '^MULTISUBDIR =' Makefile >> tmp ++ mv tmp $i/Makefile ++ ;; ++ esac ++ done ++ rm vpsed ++ fi ++ fi ++ ;; ++ ++ esac ++done # for ac_tag ++ ++ ++as_fn_exit 0 ++_ACEOF ++ac_clean_files=$ac_clean_files_save ++ ++test $ac_write_fail = 0 || ++ as_fn_error "write failure creating $CONFIG_STATUS" "$LINENO" 5 ++ ++ ++# configure is writing to config.log, and then calls config.status. ++# config.status does its own redirection, appending to config.log. ++# Unfortunately, on DOS this fails, as config.log is still kept open ++# by configure, so config.status won't be able to write to it; its ++# output is simply discarded. So we exec the FD to /dev/null, ++# effectively closing config.log, so it can be properly (re)opened and ++# appended to by config.status. When coming back to configure, we ++# need to make the FD available again. ++if test "$no_create" != yes; then ++ ac_cs_success=: ++ ac_config_status_args= ++ test "$silent" = yes && ++ ac_config_status_args="$ac_config_status_args --quiet" ++ exec 5>/dev/null ++ $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false ++ exec 5>>config.log ++ # Use ||, not &&, to avoid exiting from the if with $? = 1, which ++ # would make configure fail if this is the last instruction. ++ $ac_cs_success || as_fn_exit $? ++fi ++if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 ++$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} ++fi ++ diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/configure.ac index 000000000,000000000..2b774b058 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/configure.ac @@@ -1,0 -1,0 +1,409 @@@ ++# configure.ac -- Backtrace configure script. ++# Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ ++# Redistribution and use in source and binary forms, with or without ++# modification, are permitted provided that the following conditions are ++# met: ++ ++# (1) Redistributions of source code must retain the above copyright ++# notice, this list of conditions and the following disclaimer. ++ ++# (2) Redistributions in binary form must reproduce the above copyright ++# notice, this list of conditions and the following disclaimer in ++# the documentation and/or other materials provided with the ++# distribution. ++ ++# (3) The name of the author may not be used to ++# endorse or promote products derived from this software without ++# specific prior written permission. ++ ++# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++# DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++# POSSIBILITY OF SUCH DAMAGE. ++ ++AC_PREREQ(2.64) ++AC_INIT(package-unused, version-unused,, libbacktrace) ++AC_CONFIG_SRCDIR(backtrace.h) ++AC_CONFIG_HEADER(config.h) ++ ++if test -n "${with_target_subdir}"; then ++ AM_ENABLE_MULTILIB(, ..) ++fi ++ ++AC_CANONICAL_SYSTEM ++target_alias=${target_alias-$host_alias} ++ ++AC_USE_SYSTEM_EXTENSIONS ++ ++libtool_VERSION=1:0:0 ++AC_SUBST(libtool_VERSION) ++ ++# 1.11.1: Require that version of automake. ++# foreign: Don't require README, INSTALL, NEWS, etc. ++# no-define: Don't define PACKAGE and VERSION. ++# no-dependencies: Don't generate automatic dependencies. ++# (because it breaks when using bootstrap-lean, since some of the ++# headers are gone at "make install" time). ++# -Wall: Issue all automake warnings. ++# -Wno-portability: Don't warn about constructs supported by GNU make. ++# (because GCC requires GNU make anyhow). ++AM_INIT_AUTOMAKE([1.11.1 foreign no-dist no-define no-dependencies -Wall -Wno-portability]) ++ ++AM_MAINTAINER_MODE ++ ++AC_ARG_WITH(target-subdir, ++[ --with-target-subdir=SUBDIR Configuring in a subdirectory for target]) ++ ++# We must force CC to /not/ be precious variables; otherwise ++# the wrong, non-multilib-adjusted value will be used in multilibs. ++# As a side effect, we have to subst CFLAGS ourselves. ++m4_rename([_AC_ARG_VAR_PRECIOUS],[backtrace_PRECIOUS]) ++m4_define([_AC_ARG_VAR_PRECIOUS],[]) ++AC_PROG_CC ++m4_rename_force([backtrace_PRECIOUS],[_AC_ARG_VAR_PRECIOUS]) ++ ++AC_SUBST(CFLAGS) ++ ++AC_PROG_RANLIB ++ ++AC_PROG_AWK ++case "$AWK" in ++"") AC_MSG_ERROR([can't build without awk]) ;; ++esac ++ ++LT_INIT ++AM_PROG_LIBTOOL ++ ++backtrace_supported=yes ++ ++if test -n "${with_target_subdir}"; then ++ # We are compiling a GCC library. We can assume that the unwind ++ # library exists. ++ BACKTRACE_FILE="backtrace.lo simple.lo" ++else ++ AC_CHECK_HEADER([unwind.h], ++ [AC_CHECK_FUNC([_Unwind_Backtrace], ++ [BACKTRACE_FILE="backtrace.lo simple.lo"], ++ [BACKTRACE_FILE="nounwind.lo" ++ backtrace_supported=no])], ++ [BACKTRACE_FILE="nounwind.lo" ++ backtrace_supported=no]) ++fi ++AC_SUBST(BACKTRACE_FILE) ++ ++EXTRA_FLAGS= ++if test -n "${with_target_subdir}"; then ++ EXTRA_FLAGS="-funwind-tables -frandom-seed=\$@" ++else ++ AC_CACHE_CHECK([for -funwind-tables option], ++ [libbacktrace_cv_c_unwind_tables], ++ [CFLAGS_hold="$CFLAGS" ++ CFLAGS="$CFLAGS -funwind-tables" ++ AC_COMPILE_IFELSE( ++ [AC_LANG_PROGRAM([static int f() { return 0; }], [return f();])], ++ [libbacktrace_cv_c_unwind_tables=yes], ++ [libbacktrace_cv_c_unwind_tables=no]) ++ CFLAGS="$CFLAGS_hold"]) ++ if test "$libbacktrace_cv_c_unwind_tables" = "yes"; then ++ EXTRA_FLAGS=-funwind-tables ++ fi ++ AC_CACHE_CHECK([for -frandom-seed=string option], ++ [libbacktrace_cv_c_random_seed_string], ++ [CFLAGS_hold="$CFLAGS" ++ CFLAGS="$CFLAGS -frandom-seed=conftest.lo" ++ AC_COMPILE_IFELSE( ++ [AC_LANG_PROGRAM([], [return 0;])], ++ [libbacktrace_cv_c_random_seed_string=yes], ++ [libbacktrace_cv_c_random_seed_string=no]) ++ CFLAGS="$CFLAGS_hold"]) ++ if test "$libbacktrace_cv_c_random_seed_string" = "yes"; then ++ EXTRA_FLAGS="$EXTRA_FLAGS -frandom-seed=\$@" ++ fi ++fi ++AC_SUBST(EXTRA_FLAGS) ++ ++ACX_PROG_CC_WARNING_OPTS([-W -Wall -Wwrite-strings -Wstrict-prototypes \ ++ -Wmissing-prototypes -Wold-style-definition \ ++ -Wmissing-format-attribute -Wcast-qual], ++ [WARN_FLAGS]) ++ ++if test -n "${with_target_subdir}"; then ++ WARN_FLAGS="$WARN_FLAGS -Werror" ++fi ++ ++AC_SUBST(WARN_FLAGS) ++ ++if test -n "${with_target_subdir}"; then ++ GCC_CHECK_UNWIND_GETIPINFO ++else ++ ac_save_CFFLAGS="$CFLAGS" ++ CFLAGS="$CFLAGS -Werror-implicit-function-declaration" ++ AC_MSG_CHECKING([for _Unwind_GetIPInfo]) ++ AC_LINK_IFELSE( ++ [AC_LANG_PROGRAM( ++ [#include "unwind.h" ++ struct _Unwind_Context *context; ++ int ip_before_insn = 0;], ++ [return _Unwind_GetIPInfo (context, &ip_before_insn);])], ++ [have_unwind_getipinfo=yes], [have_unwind_getipinfo=no]) ++ CFLAGS="$ac_save_CFLAGS" ++ AC_MSG_RESULT([$have_unwind_getipinfo]) ++ if test "$have_unwind_getipinfo" = "yes"; then ++ AC_DEFINE(HAVE_GETIPINFO, 1, [Define if _Unwind_GetIPInfo is available.]) ++ fi ++fi ++ ++# Enable --enable-host-shared. ++AC_ARG_ENABLE(host-shared, ++[AS_HELP_STRING([--enable-host-shared], ++ [build host code as shared libraries])], ++[PIC_FLAG=-fPIC], [PIC_FLAG=]) ++AC_SUBST(PIC_FLAG) ++ ++# Test for __sync support. ++AC_CACHE_CHECK([__sync extensions], ++[libbacktrace_cv_sys_sync], ++[if test -n "${with_target_subdir}"; then ++ case "${host}" in ++ hppa*-*-hpux*) libbacktrace_cv_sys_sync=no ;; ++ *) libbacktrace_cv_sys_sync=yes ;; ++ esac ++ else ++ AC_LINK_IFELSE( ++ [AC_LANG_PROGRAM([int i;], ++ [__sync_bool_compare_and_swap (&i, i, i); ++ __sync_lock_test_and_set (&i, 1); ++ __sync_lock_release (&i);])], ++ [libbacktrace_cv_sys_sync=yes], ++ [libbacktrace_cv_sys_sync=no]) ++ fi]) ++BACKTRACE_SUPPORTS_THREADS=0 ++if test "$libbacktrace_cv_sys_sync" = "yes"; then ++ BACKTRACE_SUPPORTS_THREADS=1 ++ AC_DEFINE([HAVE_SYNC_FUNCTIONS], 1, ++ [Define to 1 if you have the __sync functions]) ++fi ++AC_SUBST(BACKTRACE_SUPPORTS_THREADS) ++ ++# Test for __atomic support. ++AC_CACHE_CHECK([__atomic extensions], ++[libbacktrace_cv_sys_atomic], ++[if test -n "${with_target_subdir}"; then ++ libbacktrace_cv_sys_atomic=yes ++ else ++ AC_LINK_IFELSE( ++ [AC_LANG_PROGRAM([int i;], ++ [__atomic_load_n (&i, __ATOMIC_ACQUIRE); ++ __atomic_store_n (&i, 1, __ATOMIC_RELEASE);])], ++ [libbacktrace_cv_sys_atomic=yes], ++ [libbacktrace_cv_sys_atomic=no]) ++ fi]) ++if test "$libbacktrace_cv_sys_atomic" = "yes"; then ++ AC_DEFINE([HAVE_ATOMIC_FUNCTIONS], 1, ++ [Define to 1 if you have the __atomic functions]) ++fi ++ ++# The library needs to be able to read the executable itself. Compile ++# a file to determine the executable format. The awk script ++# filetype.awk prints out the file type. ++AC_CACHE_CHECK([output filetype], ++[libbacktrace_cv_sys_filetype], ++[filetype= ++AC_COMPILE_IFELSE( ++ [AC_LANG_PROGRAM([int i;], [int j;])], ++ [filetype=`${AWK} -f $srcdir/filetype.awk conftest.$ac_objext`], ++ [AC_MSG_FAILURE([compiler failed])]) ++libbacktrace_cv_sys_filetype=$filetype]) ++ ++# Match the file type to decide what files to compile. ++FORMAT_FILE= ++backtrace_supports_data=yes ++case "$libbacktrace_cv_sys_filetype" in ++elf*) FORMAT_FILE="elf.lo" ;; ++pecoff) FORMAT_FILE="pecoff.lo" ++ backtrace_supports_data=no ++ ;; ++*) AC_MSG_WARN([could not determine output file type]) ++ FORMAT_FILE="unknown.lo" ++ backtrace_supported=no ++ ;; ++esac ++AC_SUBST(FORMAT_FILE) ++ ++# ELF defines. ++elfsize= ++case "$libbacktrace_cv_sys_filetype" in ++elf32) elfsize=32 ;; ++elf64) elfsize=64 ;; ++*) elfsize=unused ++esac ++AC_DEFINE_UNQUOTED([BACKTRACE_ELF_SIZE], [$elfsize], [ELF size: 32 or 64]) ++ ++BACKTRACE_SUPPORTED=0 ++if test "$backtrace_supported" = "yes"; then ++ BACKTRACE_SUPPORTED=1 ++fi ++AC_SUBST(BACKTRACE_SUPPORTED) ++ ++BACKTRACE_SUPPORTS_DATA=0 ++if test "$backtrace_supports_data" = "yes"; then ++ BACKTRACE_SUPPORTS_DATA=1 ++fi ++AC_SUBST(BACKTRACE_SUPPORTS_DATA) ++ ++GCC_HEADER_STDINT(gstdint.h) ++ ++AC_CHECK_HEADERS(sys/mman.h) ++if test "$ac_cv_header_sys_mman_h" = "no"; then ++ have_mmap=no ++else ++ if test -n "${with_target_subdir}"; then ++ # When built as a GCC target library, we can't do a link test. We ++ # simply assume that if we have mman.h, we have mmap. ++ have_mmap=yes ++ case "${host}" in ++ spu-*-*|*-*-msdosdjgpp) ++ # The SPU does not have mmap, but it has a sys/mman.h header file ++ # containing "mmap_eaddr" and the mmap flags, confusing the test. ++ # DJGPP also has sys/man.h, but no mmap ++ have_mmap=no ;; ++ esac ++ else ++ AC_CHECK_FUNC(mmap, [have_mmap=yes], [have_mmap=no]) ++ fi ++fi ++if test "$have_mmap" = "no"; then ++ VIEW_FILE=read.lo ++ ALLOC_FILE=alloc.lo ++else ++ VIEW_FILE=mmapio.lo ++ AC_PREPROC_IFELSE([ ++#include ++#if !defined(MAP_ANONYMOUS) && !defined(MAP_ANON) ++ #error no MAP_ANONYMOUS ++#endif ++], [ALLOC_FILE=mmap.lo], [ALLOC_FILE=alloc.lo]) ++fi ++AC_SUBST(VIEW_FILE) ++AC_SUBST(ALLOC_FILE) ++ ++BACKTRACE_USES_MALLOC=0 ++if test "$ALLOC_FILE" = "alloc.lo"; then ++ BACKTRACE_USES_MALLOC=1 ++fi ++AC_SUBST(BACKTRACE_USES_MALLOC) ++ ++# Check for dl_iterate_phdr. ++AC_CHECK_HEADERS(link.h) ++if test "$ac_cv_header_link_h" = "no"; then ++ have_dl_iterate_phdr=no ++else ++ if test -n "${with_target_subdir}"; then ++ # When built as a GCC target library, we can't do a link test. ++ AC_EGREP_HEADER([dl_iterate_phdr], [link.h], [have_dl_iterate_phdr=yes], ++ [have_dl_iterate_phdr=no]) ++ case "${host}" in ++ *-*-solaris2.10*) ++ # Avoid dl_iterate_phdr on Solaris 10, where it is in the ++ # header file but is only in -ldl. ++ have_dl_iterate_phdr=no ;; ++ esac ++ else ++ AC_CHECK_FUNC([dl_iterate_phdr], [have_dl_iterate_phdr=yes], ++ [have_dl_iterate_phdr=no]) ++ fi ++fi ++if test "$have_dl_iterate_phdr" = "yes"; then ++ AC_DEFINE(HAVE_DL_ITERATE_PHDR, 1, [Define if dl_iterate_phdr is available.]) ++fi ++ ++# Check for the fcntl function. ++if test -n "${with_target_subdir}"; then ++ case "${host}" in ++ *-*-mingw*) have_fcntl=no ;; ++ spu-*-*) have_fcntl=no ;; ++ *) have_fcntl=yes ;; ++ esac ++else ++ AC_CHECK_FUNC(fcntl, [have_fcntl=yes], [have_fcntl=no]) ++fi ++if test "$have_fcntl" = "yes"; then ++ AC_DEFINE([HAVE_FCNTL], 1, ++ [Define to 1 if you have the fcntl function]) ++fi ++ ++AC_CHECK_DECLS(strnlen) ++ ++# Check for getexecname function. ++if test -n "${with_target_subdir}"; then ++ case "${host}" in ++ *-*-solaris2*) have_getexecname=yes ;; ++ *) have_getexecname=no ;; ++ esac ++else ++ AC_CHECK_FUNC(getexecname, [have_getexecname=yes], [have_getexecname=no]) ++fi ++if test "$have_getexecname" = "yes"; then ++ AC_DEFINE(HAVE_GETEXECNAME, 1, [Define if getexecname is available.]) ++fi ++ ++AC_CACHE_CHECK([whether tests can run], ++ [libbacktrace_cv_sys_native], ++ [AC_RUN_IFELSE([AC_LANG_PROGRAM([], [return 0;])], ++ [libbacktrace_cv_sys_native=yes], ++ [libbacktrace_cv_sys_native=no], ++ [libbacktrace_cv_sys_native=no])]) ++AM_CONDITIONAL(NATIVE, test "$libbacktrace_cv_sys_native" = "yes") ++ ++if test "${multilib}" = "yes"; then ++ multilib_arg="--enable-multilib" ++else ++ multilib_arg= ++fi ++ ++AC_CONFIG_FILES(Makefile backtrace-supported.h) ++ ++# We need multilib support, but only if configuring for the target. ++AC_CONFIG_COMMANDS([default], ++[if test -n "$CONFIG_FILES"; then ++ if test -n "${with_target_subdir}"; then ++ # Multilibs need MULTISUBDIR defined correctly in certain makefiles so ++ # that multilib installs will end up installed in the correct place. ++ # The testsuite needs it for multilib-aware ABI baseline files. ++ # To work around this not being passed down from config-ml.in -> ++ # srcdir/Makefile.am -> srcdir/{src,libsupc++,...}/Makefile.am, manually ++ # append it here. Only modify Makefiles that have just been created. ++ # ++ # Also, get rid of this simulated-VPATH thing that automake does. ++ cat > vpsed << \_EOF ++ s!`test -f '$<' || echo '$(srcdir)/'`!! ++_EOF ++ for i in $SUBDIRS; do ++ case $CONFIG_FILES in ++ *${i}/Makefile*) ++ #echo "Adding MULTISUBDIR to $i/Makefile" ++ sed -f vpsed $i/Makefile > tmp ++ grep '^MULTISUBDIR =' Makefile >> tmp ++ mv tmp $i/Makefile ++ ;; ++ esac ++ done ++ rm vpsed ++ fi ++ fi ++], ++[ ++# Variables needed in config.status (file generation) which aren't already ++# passed by autoconf. ++SUBDIRS="$SUBDIRS" ++]) ++ ++AC_OUTPUT diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/dwarf.c index 000000000,000000000..37682c003 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/dwarf.c @@@ -1,0 -1,0 +1,3038 @@@ ++/* dwarf.c -- Get file/line information from DWARF for backtraces. ++ Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++#include "config.h" ++ ++#include ++#include ++#include ++#include ++ ++#include "dwarf2.h" ++#include "filenames.h" ++ ++#include "backtrace.h" ++#include "internal.h" ++ ++#if !defined(HAVE_DECL_STRNLEN) || !HAVE_DECL_STRNLEN ++ ++/* If strnlen is not declared, provide our own version. */ ++ ++static size_t ++xstrnlen (const char *s, size_t maxlen) ++{ ++ size_t i; ++ ++ for (i = 0; i < maxlen; ++i) ++ if (s[i] == '\0') ++ break; ++ return i; ++} ++ ++#define strnlen xstrnlen ++ ++#endif ++ ++/* A buffer to read DWARF info. */ ++ ++struct dwarf_buf ++{ ++ /* Buffer name for error messages. */ ++ const char *name; ++ /* Start of the buffer. */ ++ const unsigned char *start; ++ /* Next byte to read. */ ++ const unsigned char *buf; ++ /* The number of bytes remaining. */ ++ size_t left; ++ /* Whether the data is big-endian. */ ++ int is_bigendian; ++ /* Error callback routine. */ ++ backtrace_error_callback error_callback; ++ /* Data for error_callback. */ ++ void *data; ++ /* Non-zero if we've reported an underflow error. */ ++ int reported_underflow; ++}; ++ ++/* A single attribute in a DWARF abbreviation. */ ++ ++struct attr ++{ ++ /* The attribute name. */ ++ enum dwarf_attribute name; ++ /* The attribute form. */ ++ enum dwarf_form form; ++}; ++ ++/* A single DWARF abbreviation. */ ++ ++struct abbrev ++{ ++ /* The abbrev code--the number used to refer to the abbrev. */ ++ uint64_t code; ++ /* The entry tag. */ ++ enum dwarf_tag tag; ++ /* Non-zero if this abbrev has child entries. */ ++ int has_children; ++ /* The number of attributes. */ ++ size_t num_attrs; ++ /* The attributes. */ ++ struct attr *attrs; ++}; ++ ++/* The DWARF abbreviations for a compilation unit. This structure ++ only exists while reading the compilation unit. Most DWARF readers ++ seem to a hash table to map abbrev ID's to abbrev entries. ++ However, we primarily care about GCC, and GCC simply issues ID's in ++ numerical order starting at 1. So we simply keep a sorted vector, ++ and try to just look up the code. */ ++ ++struct abbrevs ++{ ++ /* The number of abbrevs in the vector. */ ++ size_t num_abbrevs; ++ /* The abbrevs, sorted by the code field. */ ++ struct abbrev *abbrevs; ++}; ++ ++/* The different kinds of attribute values. */ ++ ++enum attr_val_encoding ++{ ++ /* An address. */ ++ ATTR_VAL_ADDRESS, ++ /* A unsigned integer. */ ++ ATTR_VAL_UINT, ++ /* A sigd integer. */ ++ ATTR_VAL_SINT, ++ /* A string. */ ++ ATTR_VAL_STRING, ++ /* An offset to other data in the containing unit. */ ++ ATTR_VAL_REF_UNIT, ++ /* An offset to other data within the .dwarf_info section. */ ++ ATTR_VAL_REF_INFO, ++ /* An offset to data in some other section. */ ++ ATTR_VAL_REF_SECTION, ++ /* A type signature. */ ++ ATTR_VAL_REF_TYPE, ++ /* A block of data (not represented). */ ++ ATTR_VAL_BLOCK, ++ /* An expression (not represented). */ ++ ATTR_VAL_EXPR, ++}; ++ ++/* An attribute value. */ ++ ++struct attr_val ++{ ++ /* How the value is stored in the field u. */ ++ enum attr_val_encoding encoding; ++ union ++ { ++ /* ATTR_VAL_ADDRESS, ATTR_VAL_UINT, ATTR_VAL_REF*. */ ++ uint64_t uint; ++ /* ATTR_VAL_SINT. */ ++ int64_t sint; ++ /* ATTR_VAL_STRING. */ ++ const char *string; ++ /* ATTR_VAL_BLOCK not stored. */ ++ } u; ++}; ++ ++/* The line number program header. */ ++ ++struct line_header ++{ ++ /* The version of the line number information. */ ++ int version; ++ /* The minimum instruction length. */ ++ unsigned int min_insn_len; ++ /* The maximum number of ops per instruction. */ ++ unsigned int max_ops_per_insn; ++ /* The line base for special opcodes. */ ++ int line_base; ++ /* The line range for special opcodes. */ ++ unsigned int line_range; ++ /* The opcode base--the first special opcode. */ ++ unsigned int opcode_base; ++ /* Opcode lengths, indexed by opcode - 1. */ ++ const unsigned char *opcode_lengths; ++ /* The number of directory entries. */ ++ size_t dirs_count; ++ /* The directory entries. */ ++ const char **dirs; ++ /* The number of filenames. */ ++ size_t filenames_count; ++ /* The filenames. */ ++ const char **filenames; ++}; ++ ++/* Map a single PC value to a file/line. We will keep a vector of ++ these sorted by PC value. Each file/line will be correct from the ++ PC up to the PC of the next entry if there is one. We allocate one ++ extra entry at the end so that we can use bsearch. */ ++ ++struct line ++{ ++ /* PC. */ ++ uintptr_t pc; ++ /* File name. Many entries in the array are expected to point to ++ the same file name. */ ++ const char *filename; ++ /* Line number. */ ++ int lineno; ++ /* Index of the object in the original array read from the DWARF ++ section, before it has been sorted. The index makes it possible ++ to use Quicksort and maintain stability. */ ++ int idx; ++}; ++ ++/* A growable vector of line number information. This is used while ++ reading the line numbers. */ ++ ++struct line_vector ++{ ++ /* Memory. This is an array of struct line. */ ++ struct backtrace_vector vec; ++ /* Number of valid mappings. */ ++ size_t count; ++}; ++ ++/* A function described in the debug info. */ ++ ++struct function ++{ ++ /* The name of the function. */ ++ const char *name; ++ /* If this is an inlined function, the filename of the call ++ site. */ ++ const char *caller_filename; ++ /* If this is an inlined function, the line number of the call ++ site. */ ++ int caller_lineno; ++ /* Map PC ranges to inlined functions. */ ++ struct function_addrs *function_addrs; ++ size_t function_addrs_count; ++}; ++ ++/* An address range for a function. This maps a PC value to a ++ specific function. */ ++ ++struct function_addrs ++{ ++ /* Range is LOW <= PC < HIGH. */ ++ uint64_t low; ++ uint64_t high; ++ /* Function for this address range. */ ++ struct function *function; ++}; ++ ++/* A growable vector of function address ranges. */ ++ ++struct function_vector ++{ ++ /* Memory. This is an array of struct function_addrs. */ ++ struct backtrace_vector vec; ++ /* Number of address ranges present. */ ++ size_t count; ++}; ++ ++/* A DWARF compilation unit. This only holds the information we need ++ to map a PC to a file and line. */ ++ ++struct unit ++{ ++ /* The first entry for this compilation unit. */ ++ const unsigned char *unit_data; ++ /* The length of the data for this compilation unit. */ ++ size_t unit_data_len; ++ /* The offset of UNIT_DATA from the start of the information for ++ this compilation unit. */ ++ size_t unit_data_offset; ++ /* DWARF version. */ ++ int version; ++ /* Whether unit is DWARF64. */ ++ int is_dwarf64; ++ /* Address size. */ ++ int addrsize; ++ /* Offset into line number information. */ ++ off_t lineoff; ++ /* Primary source file. */ ++ const char *filename; ++ /* Compilation command working directory. */ ++ const char *comp_dir; ++ /* Absolute file name, only set if needed. */ ++ const char *abs_filename; ++ /* The abbreviations for this unit. */ ++ struct abbrevs abbrevs; ++ ++ /* The fields above this point are read in during initialization and ++ may be accessed freely. The fields below this point are read in ++ as needed, and therefore require care, as different threads may ++ try to initialize them simultaneously. */ ++ ++ /* PC to line number mapping. This is NULL if the values have not ++ been read. This is (struct line *) -1 if there was an error ++ reading the values. */ ++ struct line *lines; ++ /* Number of entries in lines. */ ++ size_t lines_count; ++ /* PC ranges to function. */ ++ struct function_addrs *function_addrs; ++ size_t function_addrs_count; ++}; ++ ++/* An address range for a compilation unit. This maps a PC value to a ++ specific compilation unit. Note that we invert the representation ++ in DWARF: instead of listing the units and attaching a list of ++ ranges, we list the ranges and have each one point to the unit. ++ This lets us do a binary search to find the unit. */ ++ ++struct unit_addrs ++{ ++ /* Range is LOW <= PC < HIGH. */ ++ uint64_t low; ++ uint64_t high; ++ /* Compilation unit for this address range. */ ++ struct unit *u; ++}; ++ ++/* A growable vector of compilation unit address ranges. */ ++ ++struct unit_addrs_vector ++{ ++ /* Memory. This is an array of struct unit_addrs. */ ++ struct backtrace_vector vec; ++ /* Number of address ranges present. */ ++ size_t count; ++}; ++ ++/* The information we need to map a PC to a file and line. */ ++ ++struct dwarf_data ++{ ++ /* The data for the next file we know about. */ ++ struct dwarf_data *next; ++ /* The base address for this file. */ ++ uintptr_t base_address; ++ /* A sorted list of address ranges. */ ++ struct unit_addrs *addrs; ++ /* Number of address ranges in list. */ ++ size_t addrs_count; ++ /* The unparsed .debug_info section. */ ++ const unsigned char *dwarf_info; ++ size_t dwarf_info_size; ++ /* The unparsed .debug_line section. */ ++ const unsigned char *dwarf_line; ++ size_t dwarf_line_size; ++ /* The unparsed .debug_ranges section. */ ++ const unsigned char *dwarf_ranges; ++ size_t dwarf_ranges_size; ++ /* The unparsed .debug_str section. */ ++ const unsigned char *dwarf_str; ++ size_t dwarf_str_size; ++ /* Whether the data is big-endian or not. */ ++ int is_bigendian; ++ /* A vector used for function addresses. We keep this here so that ++ we can grow the vector as we read more functions. */ ++ struct function_vector fvec; ++}; ++ ++/* Report an error for a DWARF buffer. */ ++ ++static void ++dwarf_buf_error (struct dwarf_buf *buf, const char *msg) ++{ ++ char b[200]; ++ ++ snprintf (b, sizeof b, "%s in %s at %d", ++ msg, buf->name, (int) (buf->buf - buf->start)); ++ buf->error_callback (buf->data, b, 0); ++} ++ ++/* Require at least COUNT bytes in BUF. Return 1 if all is well, 0 on ++ error. */ ++ ++static int ++require (struct dwarf_buf *buf, size_t count) ++{ ++ if (buf->left >= count) ++ return 1; ++ ++ if (!buf->reported_underflow) ++ { ++ dwarf_buf_error (buf, "DWARF underflow"); ++ buf->reported_underflow = 1; ++ } ++ ++ return 0; ++} ++ ++/* Advance COUNT bytes in BUF. Return 1 if all is well, 0 on ++ error. */ ++ ++static int ++advance (struct dwarf_buf *buf, size_t count) ++{ ++ if (!require (buf, count)) ++ return 0; ++ buf->buf += count; ++ buf->left -= count; ++ return 1; ++} ++ ++/* Read one byte from BUF and advance 1 byte. */ ++ ++static unsigned char ++read_byte (struct dwarf_buf *buf) ++{ ++ const unsigned char *p = buf->buf; ++ ++ if (!advance (buf, 1)) ++ return 0; ++ return p[0]; ++} ++ ++/* Read a signed char from BUF and advance 1 byte. */ ++ ++static signed char ++read_sbyte (struct dwarf_buf *buf) ++{ ++ const unsigned char *p = buf->buf; ++ ++ if (!advance (buf, 1)) ++ return 0; ++ return (*p ^ 0x80) - 0x80; ++} ++ ++/* Read a uint16 from BUF and advance 2 bytes. */ ++ ++static uint16_t ++read_uint16 (struct dwarf_buf *buf) ++{ ++ const unsigned char *p = buf->buf; ++ ++ if (!advance (buf, 2)) ++ return 0; ++ if (buf->is_bigendian) ++ return ((uint16_t) p[0] << 8) | (uint16_t) p[1]; ++ else ++ return ((uint16_t) p[1] << 8) | (uint16_t) p[0]; ++} ++ ++/* Read a uint32 from BUF and advance 4 bytes. */ ++ ++static uint32_t ++read_uint32 (struct dwarf_buf *buf) ++{ ++ const unsigned char *p = buf->buf; ++ ++ if (!advance (buf, 4)) ++ return 0; ++ if (buf->is_bigendian) ++ return (((uint32_t) p[0] << 24) | ((uint32_t) p[1] << 16) ++ | ((uint32_t) p[2] << 8) | (uint32_t) p[3]); ++ else ++ return (((uint32_t) p[3] << 24) | ((uint32_t) p[2] << 16) ++ | ((uint32_t) p[1] << 8) | (uint32_t) p[0]); ++} ++ ++/* Read a uint64 from BUF and advance 8 bytes. */ ++ ++static uint64_t ++read_uint64 (struct dwarf_buf *buf) ++{ ++ const unsigned char *p = buf->buf; ++ ++ if (!advance (buf, 8)) ++ return 0; ++ if (buf->is_bigendian) ++ return (((uint64_t) p[0] << 56) | ((uint64_t) p[1] << 48) ++ | ((uint64_t) p[2] << 40) | ((uint64_t) p[3] << 32) ++ | ((uint64_t) p[4] << 24) | ((uint64_t) p[5] << 16) ++ | ((uint64_t) p[6] << 8) | (uint64_t) p[7]); ++ else ++ return (((uint64_t) p[7] << 56) | ((uint64_t) p[6] << 48) ++ | ((uint64_t) p[5] << 40) | ((uint64_t) p[4] << 32) ++ | ((uint64_t) p[3] << 24) | ((uint64_t) p[2] << 16) ++ | ((uint64_t) p[1] << 8) | (uint64_t) p[0]); ++} ++ ++/* Read an offset from BUF and advance the appropriate number of ++ bytes. */ ++ ++static uint64_t ++read_offset (struct dwarf_buf *buf, int is_dwarf64) ++{ ++ if (is_dwarf64) ++ return read_uint64 (buf); ++ else ++ return read_uint32 (buf); ++} ++ ++/* Read an address from BUF and advance the appropriate number of ++ bytes. */ ++ ++static uint64_t ++read_address (struct dwarf_buf *buf, int addrsize) ++{ ++ switch (addrsize) ++ { ++ case 1: ++ return read_byte (buf); ++ case 2: ++ return read_uint16 (buf); ++ case 4: ++ return read_uint32 (buf); ++ case 8: ++ return read_uint64 (buf); ++ default: ++ dwarf_buf_error (buf, "unrecognized address size"); ++ return 0; ++ } ++} ++ ++/* Return whether a value is the highest possible address, given the ++ address size. */ ++ ++static int ++is_highest_address (uint64_t address, int addrsize) ++{ ++ switch (addrsize) ++ { ++ case 1: ++ return address == (unsigned char) -1; ++ case 2: ++ return address == (uint16_t) -1; ++ case 4: ++ return address == (uint32_t) -1; ++ case 8: ++ return address == (uint64_t) -1; ++ default: ++ return 0; ++ } ++} ++ ++/* Read an unsigned LEB128 number. */ ++ ++static uint64_t ++read_uleb128 (struct dwarf_buf *buf) ++{ ++ uint64_t ret; ++ unsigned int shift; ++ int overflow; ++ unsigned char b; ++ ++ ret = 0; ++ shift = 0; ++ overflow = 0; ++ do ++ { ++ const unsigned char *p; ++ ++ p = buf->buf; ++ if (!advance (buf, 1)) ++ return 0; ++ b = *p; ++ if (shift < 64) ++ ret |= ((uint64_t) (b & 0x7f)) << shift; ++ else if (!overflow) ++ { ++ dwarf_buf_error (buf, "LEB128 overflows uint64_t"); ++ overflow = 1; ++ } ++ shift += 7; ++ } ++ while ((b & 0x80) != 0); ++ ++ return ret; ++} ++ ++/* Read a signed LEB128 number. */ ++ ++static int64_t ++read_sleb128 (struct dwarf_buf *buf) ++{ ++ uint64_t val; ++ unsigned int shift; ++ int overflow; ++ unsigned char b; ++ ++ val = 0; ++ shift = 0; ++ overflow = 0; ++ do ++ { ++ const unsigned char *p; ++ ++ p = buf->buf; ++ if (!advance (buf, 1)) ++ return 0; ++ b = *p; ++ if (shift < 64) ++ val |= ((uint64_t) (b & 0x7f)) << shift; ++ else if (!overflow) ++ { ++ dwarf_buf_error (buf, "signed LEB128 overflows uint64_t"); ++ overflow = 1; ++ } ++ shift += 7; ++ } ++ while ((b & 0x80) != 0); ++ ++ if ((b & 0x40) != 0 && shift < 64) ++ val |= ((uint64_t) -1) << shift; ++ ++ return (int64_t) val; ++} ++ ++/* Return the length of an LEB128 number. */ ++ ++static size_t ++leb128_len (const unsigned char *p) ++{ ++ size_t ret; ++ ++ ret = 1; ++ while ((*p & 0x80) != 0) ++ { ++ ++p; ++ ++ret; ++ } ++ return ret; ++} ++ ++/* Free an abbreviations structure. */ ++ ++static void ++free_abbrevs (struct backtrace_state *state, struct abbrevs *abbrevs, ++ backtrace_error_callback error_callback, void *data) ++{ ++ size_t i; ++ ++ for (i = 0; i < abbrevs->num_abbrevs; ++i) ++ backtrace_free (state, abbrevs->abbrevs[i].attrs, ++ abbrevs->abbrevs[i].num_attrs * sizeof (struct attr), ++ error_callback, data); ++ backtrace_free (state, abbrevs->abbrevs, ++ abbrevs->num_abbrevs * sizeof (struct abbrev), ++ error_callback, data); ++ abbrevs->num_abbrevs = 0; ++ abbrevs->abbrevs = NULL; ++} ++ ++/* Read an attribute value. Returns 1 on success, 0 on failure. If ++ the value can be represented as a uint64_t, sets *VAL and sets ++ *IS_VALID to 1. We don't try to store the value of other attribute ++ forms, because we don't care about them. */ ++ ++static int ++read_attribute (enum dwarf_form form, struct dwarf_buf *buf, ++ int is_dwarf64, int version, int addrsize, ++ const unsigned char *dwarf_str, size_t dwarf_str_size, ++ struct attr_val *val) ++{ ++ /* Avoid warnings about val.u.FIELD may be used uninitialized if ++ this function is inlined. The warnings aren't valid but can ++ occur because the different fields are set and used ++ conditionally. */ ++ memset (val, 0, sizeof *val); ++ ++ switch (form) ++ { ++ case DW_FORM_addr: ++ val->encoding = ATTR_VAL_ADDRESS; ++ val->u.uint = read_address (buf, addrsize); ++ return 1; ++ case DW_FORM_block2: ++ val->encoding = ATTR_VAL_BLOCK; ++ return advance (buf, read_uint16 (buf)); ++ case DW_FORM_block4: ++ val->encoding = ATTR_VAL_BLOCK; ++ return advance (buf, read_uint32 (buf)); ++ case DW_FORM_data2: ++ val->encoding = ATTR_VAL_UINT; ++ val->u.uint = read_uint16 (buf); ++ return 1; ++ case DW_FORM_data4: ++ val->encoding = ATTR_VAL_UINT; ++ val->u.uint = read_uint32 (buf); ++ return 1; ++ case DW_FORM_data8: ++ val->encoding = ATTR_VAL_UINT; ++ val->u.uint = read_uint64 (buf); ++ return 1; ++ case DW_FORM_string: ++ val->encoding = ATTR_VAL_STRING; ++ val->u.string = (const char *) buf->buf; ++ return advance (buf, strnlen ((const char *) buf->buf, buf->left) + 1); ++ case DW_FORM_block: ++ val->encoding = ATTR_VAL_BLOCK; ++ return advance (buf, read_uleb128 (buf)); ++ case DW_FORM_block1: ++ val->encoding = ATTR_VAL_BLOCK; ++ return advance (buf, read_byte (buf)); ++ case DW_FORM_data1: ++ val->encoding = ATTR_VAL_UINT; ++ val->u.uint = read_byte (buf); ++ return 1; ++ case DW_FORM_flag: ++ val->encoding = ATTR_VAL_UINT; ++ val->u.uint = read_byte (buf); ++ return 1; ++ case DW_FORM_sdata: ++ val->encoding = ATTR_VAL_SINT; ++ val->u.sint = read_sleb128 (buf); ++ return 1; ++ case DW_FORM_strp: ++ { ++ uint64_t offset; ++ ++ offset = read_offset (buf, is_dwarf64); ++ if (offset >= dwarf_str_size) ++ { ++ dwarf_buf_error (buf, "DW_FORM_strp out of range"); ++ return 0; ++ } ++ val->encoding = ATTR_VAL_STRING; ++ val->u.string = (const char *) dwarf_str + offset; ++ return 1; ++ } ++ case DW_FORM_udata: ++ val->encoding = ATTR_VAL_UINT; ++ val->u.uint = read_uleb128 (buf); ++ return 1; ++ case DW_FORM_ref_addr: ++ val->encoding = ATTR_VAL_REF_INFO; ++ if (version == 2) ++ val->u.uint = read_address (buf, addrsize); ++ else ++ val->u.uint = read_offset (buf, is_dwarf64); ++ return 1; ++ case DW_FORM_ref1: ++ val->encoding = ATTR_VAL_REF_UNIT; ++ val->u.uint = read_byte (buf); ++ return 1; ++ case DW_FORM_ref2: ++ val->encoding = ATTR_VAL_REF_UNIT; ++ val->u.uint = read_uint16 (buf); ++ return 1; ++ case DW_FORM_ref4: ++ val->encoding = ATTR_VAL_REF_UNIT; ++ val->u.uint = read_uint32 (buf); ++ return 1; ++ case DW_FORM_ref8: ++ val->encoding = ATTR_VAL_REF_UNIT; ++ val->u.uint = read_uint64 (buf); ++ return 1; ++ case DW_FORM_ref_udata: ++ val->encoding = ATTR_VAL_REF_UNIT; ++ val->u.uint = read_uleb128 (buf); ++ return 1; ++ case DW_FORM_indirect: ++ { ++ uint64_t form; ++ ++ form = read_uleb128 (buf); ++ return read_attribute ((enum dwarf_form) form, buf, is_dwarf64, ++ version, addrsize, dwarf_str, dwarf_str_size, ++ val); ++ } ++ case DW_FORM_sec_offset: ++ val->encoding = ATTR_VAL_REF_SECTION; ++ val->u.uint = read_offset (buf, is_dwarf64); ++ return 1; ++ case DW_FORM_exprloc: ++ val->encoding = ATTR_VAL_EXPR; ++ return advance (buf, read_uleb128 (buf)); ++ case DW_FORM_flag_present: ++ val->encoding = ATTR_VAL_UINT; ++ val->u.uint = 1; ++ return 1; ++ case DW_FORM_ref_sig8: ++ val->encoding = ATTR_VAL_REF_TYPE; ++ val->u.uint = read_uint64 (buf); ++ return 1; ++ case DW_FORM_GNU_addr_index: ++ val->encoding = ATTR_VAL_REF_SECTION; ++ val->u.uint = read_uleb128 (buf); ++ return 1; ++ case DW_FORM_GNU_str_index: ++ val->encoding = ATTR_VAL_REF_SECTION; ++ val->u.uint = read_uleb128 (buf); ++ return 1; ++ case DW_FORM_GNU_ref_alt: ++ val->encoding = ATTR_VAL_REF_SECTION; ++ val->u.uint = read_offset (buf, is_dwarf64); ++ return 1; ++ case DW_FORM_GNU_strp_alt: ++ val->encoding = ATTR_VAL_REF_SECTION; ++ val->u.uint = read_offset (buf, is_dwarf64); ++ return 1; ++ default: ++ dwarf_buf_error (buf, "unrecognized DWARF form"); ++ return 0; ++ } ++} ++ ++/* Compare function_addrs for qsort. When ranges are nested, make the ++ smallest one sort last. */ ++ ++static int ++function_addrs_compare (const void *v1, const void *v2) ++{ ++ const struct function_addrs *a1 = (const struct function_addrs *) v1; ++ const struct function_addrs *a2 = (const struct function_addrs *) v2; ++ ++ if (a1->low < a2->low) ++ return -1; ++ if (a1->low > a2->low) ++ return 1; ++ if (a1->high < a2->high) ++ return 1; ++ if (a1->high > a2->high) ++ return -1; ++ return strcmp (a1->function->name, a2->function->name); ++} ++ ++/* Compare a PC against a function_addrs for bsearch. Note that if ++ there are multiple ranges containing PC, which one will be returned ++ is unpredictable. We compensate for that in dwarf_fileline. */ ++ ++static int ++function_addrs_search (const void *vkey, const void *ventry) ++{ ++ const uintptr_t *key = (const uintptr_t *) vkey; ++ const struct function_addrs *entry = (const struct function_addrs *) ventry; ++ uintptr_t pc; ++ ++ pc = *key; ++ if (pc < entry->low) ++ return -1; ++ else if (pc >= entry->high) ++ return 1; ++ else ++ return 0; ++} ++ ++/* Add a new compilation unit address range to a vector. Returns 1 on ++ success, 0 on failure. */ ++ ++static int ++add_unit_addr (struct backtrace_state *state, uintptr_t base_address, ++ struct unit_addrs addrs, ++ backtrace_error_callback error_callback, void *data, ++ struct unit_addrs_vector *vec) ++{ ++ struct unit_addrs *p; ++ ++ /* Add in the base address of the module here, so that we can look ++ up the PC directly. */ ++ addrs.low += base_address; ++ addrs.high += base_address; ++ ++ /* Try to merge with the last entry. */ ++ if (vec->count > 0) ++ { ++ p = (struct unit_addrs *) vec->vec.base + (vec->count - 1); ++ if ((addrs.low == p->high || addrs.low == p->high + 1) ++ && addrs.u == p->u) ++ { ++ if (addrs.high > p->high) ++ p->high = addrs.high; ++ return 1; ++ } ++ } ++ ++ p = ((struct unit_addrs *) ++ backtrace_vector_grow (state, sizeof (struct unit_addrs), ++ error_callback, data, &vec->vec)); ++ if (p == NULL) ++ return 0; ++ ++ *p = addrs; ++ ++vec->count; ++ return 1; ++} ++ ++/* Free a unit address vector. */ ++ ++static void ++free_unit_addrs_vector (struct backtrace_state *state, ++ struct unit_addrs_vector *vec, ++ backtrace_error_callback error_callback, void *data) ++{ ++ struct unit_addrs *addrs; ++ size_t i; ++ ++ addrs = (struct unit_addrs *) vec->vec.base; ++ for (i = 0; i < vec->count; ++i) ++ free_abbrevs (state, &addrs[i].u->abbrevs, error_callback, data); ++} ++ ++/* Compare unit_addrs for qsort. When ranges are nested, make the ++ smallest one sort last. */ ++ ++static int ++unit_addrs_compare (const void *v1, const void *v2) ++{ ++ const struct unit_addrs *a1 = (const struct unit_addrs *) v1; ++ const struct unit_addrs *a2 = (const struct unit_addrs *) v2; ++ ++ if (a1->low < a2->low) ++ return -1; ++ if (a1->low > a2->low) ++ return 1; ++ if (a1->high < a2->high) ++ return 1; ++ if (a1->high > a2->high) ++ return -1; ++ if (a1->u->lineoff < a2->u->lineoff) ++ return -1; ++ if (a1->u->lineoff > a2->u->lineoff) ++ return 1; ++ return 0; ++} ++ ++/* Compare a PC against a unit_addrs for bsearch. Note that if there ++ are multiple ranges containing PC, which one will be returned is ++ unpredictable. We compensate for that in dwarf_fileline. */ ++ ++static int ++unit_addrs_search (const void *vkey, const void *ventry) ++{ ++ const uintptr_t *key = (const uintptr_t *) vkey; ++ const struct unit_addrs *entry = (const struct unit_addrs *) ventry; ++ uintptr_t pc; ++ ++ pc = *key; ++ if (pc < entry->low) ++ return -1; ++ else if (pc >= entry->high) ++ return 1; ++ else ++ return 0; ++} ++ ++/* Sort the line vector by PC. We want a stable sort here to maintain ++ the order of lines for the same PC values. Since the sequence is ++ being sorted in place, their addresses cannot be relied on to ++ maintain stability. That is the purpose of the index member. */ ++ ++static int ++line_compare (const void *v1, const void *v2) ++{ ++ const struct line *ln1 = (const struct line *) v1; ++ const struct line *ln2 = (const struct line *) v2; ++ ++ if (ln1->pc < ln2->pc) ++ return -1; ++ else if (ln1->pc > ln2->pc) ++ return 1; ++ else if (ln1->idx < ln2->idx) ++ return -1; ++ else if (ln1->idx > ln2->idx) ++ return 1; ++ else ++ return 0; ++} ++ ++/* Find a PC in a line vector. We always allocate an extra entry at ++ the end of the lines vector, so that this routine can safely look ++ at the next entry. Note that when there are multiple mappings for ++ the same PC value, this will return the last one. */ ++ ++static int ++line_search (const void *vkey, const void *ventry) ++{ ++ const uintptr_t *key = (const uintptr_t *) vkey; ++ const struct line *entry = (const struct line *) ventry; ++ uintptr_t pc; ++ ++ pc = *key; ++ if (pc < entry->pc) ++ return -1; ++ else if (pc >= (entry + 1)->pc) ++ return 1; ++ else ++ return 0; ++} ++ ++/* Sort the abbrevs by the abbrev code. This function is passed to ++ both qsort and bsearch. */ ++ ++static int ++abbrev_compare (const void *v1, const void *v2) ++{ ++ const struct abbrev *a1 = (const struct abbrev *) v1; ++ const struct abbrev *a2 = (const struct abbrev *) v2; ++ ++ if (a1->code < a2->code) ++ return -1; ++ else if (a1->code > a2->code) ++ return 1; ++ else ++ { ++ /* This really shouldn't happen. It means there are two ++ different abbrevs with the same code, and that means we don't ++ know which one lookup_abbrev should return. */ ++ return 0; ++ } ++} ++ ++/* Read the abbreviation table for a compilation unit. Returns 1 on ++ success, 0 on failure. */ ++ ++static int ++read_abbrevs (struct backtrace_state *state, uint64_t abbrev_offset, ++ const unsigned char *dwarf_abbrev, size_t dwarf_abbrev_size, ++ int is_bigendian, backtrace_error_callback error_callback, ++ void *data, struct abbrevs *abbrevs) ++{ ++ struct dwarf_buf abbrev_buf; ++ struct dwarf_buf count_buf; ++ size_t num_abbrevs; ++ ++ abbrevs->num_abbrevs = 0; ++ abbrevs->abbrevs = NULL; ++ ++ if (abbrev_offset >= dwarf_abbrev_size) ++ { ++ error_callback (data, "abbrev offset out of range", 0); ++ return 0; ++ } ++ ++ abbrev_buf.name = ".debug_abbrev"; ++ abbrev_buf.start = dwarf_abbrev; ++ abbrev_buf.buf = dwarf_abbrev + abbrev_offset; ++ abbrev_buf.left = dwarf_abbrev_size - abbrev_offset; ++ abbrev_buf.is_bigendian = is_bigendian; ++ abbrev_buf.error_callback = error_callback; ++ abbrev_buf.data = data; ++ abbrev_buf.reported_underflow = 0; ++ ++ /* Count the number of abbrevs in this list. */ ++ ++ count_buf = abbrev_buf; ++ num_abbrevs = 0; ++ while (read_uleb128 (&count_buf) != 0) ++ { ++ if (count_buf.reported_underflow) ++ return 0; ++ ++num_abbrevs; ++ // Skip tag. ++ read_uleb128 (&count_buf); ++ // Skip has_children. ++ read_byte (&count_buf); ++ // Skip attributes. ++ while (read_uleb128 (&count_buf) != 0) ++ read_uleb128 (&count_buf); ++ // Skip form of last attribute. ++ read_uleb128 (&count_buf); ++ } ++ ++ if (count_buf.reported_underflow) ++ return 0; ++ ++ if (num_abbrevs == 0) ++ return 1; ++ ++ abbrevs->num_abbrevs = num_abbrevs; ++ abbrevs->abbrevs = ((struct abbrev *) ++ backtrace_alloc (state, ++ num_abbrevs * sizeof (struct abbrev), ++ error_callback, data)); ++ if (abbrevs->abbrevs == NULL) ++ return 0; ++ memset (abbrevs->abbrevs, 0, num_abbrevs * sizeof (struct abbrev)); ++ ++ num_abbrevs = 0; ++ while (1) ++ { ++ uint64_t code; ++ struct abbrev a; ++ size_t num_attrs; ++ struct attr *attrs; ++ ++ if (abbrev_buf.reported_underflow) ++ goto fail; ++ ++ code = read_uleb128 (&abbrev_buf); ++ if (code == 0) ++ break; ++ ++ a.code = code; ++ a.tag = (enum dwarf_tag) read_uleb128 (&abbrev_buf); ++ a.has_children = read_byte (&abbrev_buf); ++ ++ count_buf = abbrev_buf; ++ num_attrs = 0; ++ while (read_uleb128 (&count_buf) != 0) ++ { ++ ++num_attrs; ++ read_uleb128 (&count_buf); ++ } ++ ++ if (num_attrs == 0) ++ { ++ attrs = NULL; ++ read_uleb128 (&abbrev_buf); ++ read_uleb128 (&abbrev_buf); ++ } ++ else ++ { ++ attrs = ((struct attr *) ++ backtrace_alloc (state, num_attrs * sizeof *attrs, ++ error_callback, data)); ++ if (attrs == NULL) ++ goto fail; ++ num_attrs = 0; ++ while (1) ++ { ++ uint64_t name; ++ uint64_t form; ++ ++ name = read_uleb128 (&abbrev_buf); ++ form = read_uleb128 (&abbrev_buf); ++ if (name == 0) ++ break; ++ attrs[num_attrs].name = (enum dwarf_attribute) name; ++ attrs[num_attrs].form = (enum dwarf_form) form; ++ ++num_attrs; ++ } ++ } ++ ++ a.num_attrs = num_attrs; ++ a.attrs = attrs; ++ ++ abbrevs->abbrevs[num_abbrevs] = a; ++ ++num_abbrevs; ++ } ++ ++ backtrace_qsort (abbrevs->abbrevs, abbrevs->num_abbrevs, ++ sizeof (struct abbrev), abbrev_compare); ++ ++ return 1; ++ ++ fail: ++ free_abbrevs (state, abbrevs, error_callback, data); ++ return 0; ++} ++ ++/* Return the abbrev information for an abbrev code. */ ++ ++static const struct abbrev * ++lookup_abbrev (struct abbrevs *abbrevs, uint64_t code, ++ backtrace_error_callback error_callback, void *data) ++{ ++ struct abbrev key; ++ void *p; ++ ++ /* With GCC, where abbrevs are simply numbered in order, we should ++ be able to just look up the entry. */ ++ if (code - 1 < abbrevs->num_abbrevs ++ && abbrevs->abbrevs[code - 1].code == code) ++ return &abbrevs->abbrevs[code - 1]; ++ ++ /* Otherwise we have to search. */ ++ memset (&key, 0, sizeof key); ++ key.code = code; ++ p = bsearch (&key, abbrevs->abbrevs, abbrevs->num_abbrevs, ++ sizeof (struct abbrev), abbrev_compare); ++ if (p == NULL) ++ { ++ error_callback (data, "invalid abbreviation code", 0); ++ return NULL; ++ } ++ return (const struct abbrev *) p; ++} ++ ++/* Add non-contiguous address ranges for a compilation unit. Returns ++ 1 on success, 0 on failure. */ ++ ++static int ++add_unit_ranges (struct backtrace_state *state, uintptr_t base_address, ++ struct unit *u, uint64_t ranges, uint64_t base, ++ int is_bigendian, const unsigned char *dwarf_ranges, ++ size_t dwarf_ranges_size, ++ backtrace_error_callback error_callback, void *data, ++ struct unit_addrs_vector *addrs) ++{ ++ struct dwarf_buf ranges_buf; ++ ++ if (ranges >= dwarf_ranges_size) ++ { ++ error_callback (data, "ranges offset out of range", 0); ++ return 0; ++ } ++ ++ ranges_buf.name = ".debug_ranges"; ++ ranges_buf.start = dwarf_ranges; ++ ranges_buf.buf = dwarf_ranges + ranges; ++ ranges_buf.left = dwarf_ranges_size - ranges; ++ ranges_buf.is_bigendian = is_bigendian; ++ ranges_buf.error_callback = error_callback; ++ ranges_buf.data = data; ++ ranges_buf.reported_underflow = 0; ++ ++ while (1) ++ { ++ uint64_t low; ++ uint64_t high; ++ ++ if (ranges_buf.reported_underflow) ++ return 0; ++ ++ low = read_address (&ranges_buf, u->addrsize); ++ high = read_address (&ranges_buf, u->addrsize); ++ ++ if (low == 0 && high == 0) ++ break; ++ ++ if (is_highest_address (low, u->addrsize)) ++ base = high; ++ else ++ { ++ struct unit_addrs a; ++ ++ a.low = low + base; ++ a.high = high + base; ++ a.u = u; ++ if (!add_unit_addr (state, base_address, a, error_callback, data, ++ addrs)) ++ return 0; ++ } ++ } ++ ++ if (ranges_buf.reported_underflow) ++ return 0; ++ ++ return 1; ++} ++ ++/* Find the address range covered by a compilation unit, reading from ++ UNIT_BUF and adding values to U. Returns 1 if all data could be ++ read, 0 if there is some error. */ ++ ++static int ++find_address_ranges (struct backtrace_state *state, uintptr_t base_address, ++ struct dwarf_buf *unit_buf, ++ const unsigned char *dwarf_str, size_t dwarf_str_size, ++ const unsigned char *dwarf_ranges, ++ size_t dwarf_ranges_size, ++ int is_bigendian, backtrace_error_callback error_callback, ++ void *data, struct unit *u, ++ struct unit_addrs_vector *addrs) ++{ ++ while (unit_buf->left > 0) ++ { ++ uint64_t code; ++ const struct abbrev *abbrev; ++ uint64_t lowpc; ++ int have_lowpc; ++ uint64_t highpc; ++ int have_highpc; ++ int highpc_is_relative; ++ uint64_t ranges; ++ int have_ranges; ++ size_t i; ++ ++ code = read_uleb128 (unit_buf); ++ if (code == 0) ++ return 1; ++ ++ abbrev = lookup_abbrev (&u->abbrevs, code, error_callback, data); ++ if (abbrev == NULL) ++ return 0; ++ ++ lowpc = 0; ++ have_lowpc = 0; ++ highpc = 0; ++ have_highpc = 0; ++ highpc_is_relative = 0; ++ ranges = 0; ++ have_ranges = 0; ++ for (i = 0; i < abbrev->num_attrs; ++i) ++ { ++ struct attr_val val; ++ ++ if (!read_attribute (abbrev->attrs[i].form, unit_buf, ++ u->is_dwarf64, u->version, u->addrsize, ++ dwarf_str, dwarf_str_size, &val)) ++ return 0; ++ ++ switch (abbrev->attrs[i].name) ++ { ++ case DW_AT_low_pc: ++ if (val.encoding == ATTR_VAL_ADDRESS) ++ { ++ lowpc = val.u.uint; ++ have_lowpc = 1; ++ } ++ break; ++ ++ case DW_AT_high_pc: ++ if (val.encoding == ATTR_VAL_ADDRESS) ++ { ++ highpc = val.u.uint; ++ have_highpc = 1; ++ } ++ else if (val.encoding == ATTR_VAL_UINT) ++ { ++ highpc = val.u.uint; ++ have_highpc = 1; ++ highpc_is_relative = 1; ++ } ++ break; ++ ++ case DW_AT_ranges: ++ if (val.encoding == ATTR_VAL_UINT ++ || val.encoding == ATTR_VAL_REF_SECTION) ++ { ++ ranges = val.u.uint; ++ have_ranges = 1; ++ } ++ break; ++ ++ case DW_AT_stmt_list: ++ if (abbrev->tag == DW_TAG_compile_unit ++ && (val.encoding == ATTR_VAL_UINT ++ || val.encoding == ATTR_VAL_REF_SECTION)) ++ u->lineoff = val.u.uint; ++ break; ++ ++ case DW_AT_name: ++ if (abbrev->tag == DW_TAG_compile_unit ++ && val.encoding == ATTR_VAL_STRING) ++ u->filename = val.u.string; ++ break; ++ ++ case DW_AT_comp_dir: ++ if (abbrev->tag == DW_TAG_compile_unit ++ && val.encoding == ATTR_VAL_STRING) ++ u->comp_dir = val.u.string; ++ break; ++ ++ default: ++ break; ++ } ++ } ++ ++ if (abbrev->tag == DW_TAG_compile_unit ++ || abbrev->tag == DW_TAG_subprogram) ++ { ++ if (have_ranges) ++ { ++ if (!add_unit_ranges (state, base_address, u, ranges, lowpc, ++ is_bigendian, dwarf_ranges, ++ dwarf_ranges_size, error_callback, ++ data, addrs)) ++ return 0; ++ } ++ else if (have_lowpc && have_highpc) ++ { ++ struct unit_addrs a; ++ ++ if (highpc_is_relative) ++ highpc += lowpc; ++ a.low = lowpc; ++ a.high = highpc; ++ a.u = u; ++ ++ if (!add_unit_addr (state, base_address, a, error_callback, data, ++ addrs)) ++ return 0; ++ } ++ ++ /* If we found the PC range in the DW_TAG_compile_unit, we ++ can stop now. */ ++ if (abbrev->tag == DW_TAG_compile_unit ++ && (have_ranges || (have_lowpc && have_highpc))) ++ return 1; ++ } ++ ++ if (abbrev->has_children) ++ { ++ if (!find_address_ranges (state, base_address, unit_buf, ++ dwarf_str, dwarf_str_size, ++ dwarf_ranges, dwarf_ranges_size, ++ is_bigendian, error_callback, data, ++ u, addrs)) ++ return 0; ++ } ++ } ++ ++ return 1; ++} ++ ++/* Build a mapping from address ranges to the compilation units where ++ the line number information for that range can be found. Returns 1 ++ on success, 0 on failure. */ ++ ++static int ++build_address_map (struct backtrace_state *state, uintptr_t base_address, ++ const unsigned char *dwarf_info, size_t dwarf_info_size, ++ const unsigned char *dwarf_abbrev, size_t dwarf_abbrev_size, ++ const unsigned char *dwarf_ranges, size_t dwarf_ranges_size, ++ const unsigned char *dwarf_str, size_t dwarf_str_size, ++ int is_bigendian, backtrace_error_callback error_callback, ++ void *data, struct unit_addrs_vector *addrs) ++{ ++ struct dwarf_buf info; ++ struct abbrevs abbrevs; ++ ++ memset (&addrs->vec, 0, sizeof addrs->vec); ++ addrs->count = 0; ++ ++ /* Read through the .debug_info section. FIXME: Should we use the ++ .debug_aranges section? gdb and addr2line don't use it, but I'm ++ not sure why. */ ++ ++ info.name = ".debug_info"; ++ info.start = dwarf_info; ++ info.buf = dwarf_info; ++ info.left = dwarf_info_size; ++ info.is_bigendian = is_bigendian; ++ info.error_callback = error_callback; ++ info.data = data; ++ info.reported_underflow = 0; ++ ++ memset (&abbrevs, 0, sizeof abbrevs); ++ while (info.left > 0) ++ { ++ const unsigned char *unit_data_start; ++ uint64_t len; ++ int is_dwarf64; ++ struct dwarf_buf unit_buf; ++ int version; ++ uint64_t abbrev_offset; ++ int addrsize; ++ struct unit *u; ++ ++ if (info.reported_underflow) ++ goto fail; ++ ++ unit_data_start = info.buf; ++ ++ is_dwarf64 = 0; ++ len = read_uint32 (&info); ++ if (len == 0xffffffff) ++ { ++ len = read_uint64 (&info); ++ is_dwarf64 = 1; ++ } ++ ++ unit_buf = info; ++ unit_buf.left = len; ++ ++ if (!advance (&info, len)) ++ goto fail; ++ ++ version = read_uint16 (&unit_buf); ++ if (version < 2 || version > 4) ++ { ++ dwarf_buf_error (&unit_buf, "unrecognized DWARF version"); ++ goto fail; ++ } ++ ++ abbrev_offset = read_offset (&unit_buf, is_dwarf64); ++ if (!read_abbrevs (state, abbrev_offset, dwarf_abbrev, dwarf_abbrev_size, ++ is_bigendian, error_callback, data, &abbrevs)) ++ goto fail; ++ ++ addrsize = read_byte (&unit_buf); ++ ++ u = ((struct unit *) ++ backtrace_alloc (state, sizeof *u, error_callback, data)); ++ if (u == NULL) ++ goto fail; ++ u->unit_data = unit_buf.buf; ++ u->unit_data_len = unit_buf.left; ++ u->unit_data_offset = unit_buf.buf - unit_data_start; ++ u->version = version; ++ u->is_dwarf64 = is_dwarf64; ++ u->addrsize = addrsize; ++ u->filename = NULL; ++ u->comp_dir = NULL; ++ u->abs_filename = NULL; ++ u->lineoff = 0; ++ u->abbrevs = abbrevs; ++ memset (&abbrevs, 0, sizeof abbrevs); ++ ++ /* The actual line number mappings will be read as needed. */ ++ u->lines = NULL; ++ u->lines_count = 0; ++ u->function_addrs = NULL; ++ u->function_addrs_count = 0; ++ ++ if (!find_address_ranges (state, base_address, &unit_buf, ++ dwarf_str, dwarf_str_size, ++ dwarf_ranges, dwarf_ranges_size, ++ is_bigendian, error_callback, data, ++ u, addrs)) ++ { ++ free_abbrevs (state, &u->abbrevs, error_callback, data); ++ backtrace_free (state, u, sizeof *u, error_callback, data); ++ goto fail; ++ } ++ ++ if (unit_buf.reported_underflow) ++ { ++ free_abbrevs (state, &u->abbrevs, error_callback, data); ++ backtrace_free (state, u, sizeof *u, error_callback, data); ++ goto fail; ++ } ++ } ++ if (info.reported_underflow) ++ goto fail; ++ ++ return 1; ++ ++ fail: ++ free_abbrevs (state, &abbrevs, error_callback, data); ++ free_unit_addrs_vector (state, addrs, error_callback, data); ++ return 0; ++} ++ ++/* Add a new mapping to the vector of line mappings that we are ++ building. Returns 1 on success, 0 on failure. */ ++ ++static int ++add_line (struct backtrace_state *state, struct dwarf_data *ddata, ++ uintptr_t pc, const char *filename, int lineno, ++ backtrace_error_callback error_callback, void *data, ++ struct line_vector *vec) ++{ ++ struct line *ln; ++ ++ /* If we are adding the same mapping, ignore it. This can happen ++ when using discriminators. */ ++ if (vec->count > 0) ++ { ++ ln = (struct line *) vec->vec.base + (vec->count - 1); ++ if (pc == ln->pc && filename == ln->filename && lineno == ln->lineno) ++ return 1; ++ } ++ ++ ln = ((struct line *) ++ backtrace_vector_grow (state, sizeof (struct line), error_callback, ++ data, &vec->vec)); ++ if (ln == NULL) ++ return 0; ++ ++ /* Add in the base address here, so that we can look up the PC ++ directly. */ ++ ln->pc = pc + ddata->base_address; ++ ++ ln->filename = filename; ++ ln->lineno = lineno; ++ ln->idx = vec->count; ++ ++ ++vec->count; ++ ++ return 1; ++} ++ ++/* Free the line header information. If FREE_FILENAMES is true we ++ free the file names themselves, otherwise we leave them, as there ++ may be line structures pointing to them. */ ++ ++static void ++free_line_header (struct backtrace_state *state, struct line_header *hdr, ++ backtrace_error_callback error_callback, void *data) ++{ ++ backtrace_free (state, hdr->dirs, hdr->dirs_count * sizeof (const char *), ++ error_callback, data); ++ backtrace_free (state, hdr->filenames, ++ hdr->filenames_count * sizeof (char *), ++ error_callback, data); ++} ++ ++/* Read the line header. Return 1 on success, 0 on failure. */ ++ ++static int ++read_line_header (struct backtrace_state *state, struct unit *u, ++ int is_dwarf64, struct dwarf_buf *line_buf, ++ struct line_header *hdr) ++{ ++ uint64_t hdrlen; ++ struct dwarf_buf hdr_buf; ++ const unsigned char *p; ++ const unsigned char *pend; ++ size_t i; ++ ++ hdr->version = read_uint16 (line_buf); ++ if (hdr->version < 2 || hdr->version > 4) ++ { ++ dwarf_buf_error (line_buf, "unsupported line number version"); ++ return 0; ++ } ++ ++ hdrlen = read_offset (line_buf, is_dwarf64); ++ ++ hdr_buf = *line_buf; ++ hdr_buf.left = hdrlen; ++ ++ if (!advance (line_buf, hdrlen)) ++ return 0; ++ ++ hdr->min_insn_len = read_byte (&hdr_buf); ++ if (hdr->version < 4) ++ hdr->max_ops_per_insn = 1; ++ else ++ hdr->max_ops_per_insn = read_byte (&hdr_buf); ++ ++ /* We don't care about default_is_stmt. */ ++ read_byte (&hdr_buf); ++ ++ hdr->line_base = read_sbyte (&hdr_buf); ++ hdr->line_range = read_byte (&hdr_buf); ++ ++ hdr->opcode_base = read_byte (&hdr_buf); ++ hdr->opcode_lengths = hdr_buf.buf; ++ if (!advance (&hdr_buf, hdr->opcode_base - 1)) ++ return 0; ++ ++ /* Count the number of directory entries. */ ++ hdr->dirs_count = 0; ++ p = hdr_buf.buf; ++ pend = p + hdr_buf.left; ++ while (p < pend && *p != '\0') ++ { ++ p += strnlen((const char *) p, pend - p) + 1; ++ ++hdr->dirs_count; ++ } ++ ++ hdr->dirs = ((const char **) ++ backtrace_alloc (state, ++ hdr->dirs_count * sizeof (const char *), ++ line_buf->error_callback, line_buf->data)); ++ if (hdr->dirs == NULL) ++ return 0; ++ ++ i = 0; ++ while (*hdr_buf.buf != '\0') ++ { ++ if (hdr_buf.reported_underflow) ++ return 0; ++ ++ hdr->dirs[i] = (const char *) hdr_buf.buf; ++ ++i; ++ if (!advance (&hdr_buf, ++ strnlen ((const char *) hdr_buf.buf, hdr_buf.left) + 1)) ++ return 0; ++ } ++ if (!advance (&hdr_buf, 1)) ++ return 0; ++ ++ /* Count the number of file entries. */ ++ hdr->filenames_count = 0; ++ p = hdr_buf.buf; ++ pend = p + hdr_buf.left; ++ while (p < pend && *p != '\0') ++ { ++ p += strnlen ((const char *) p, pend - p) + 1; ++ p += leb128_len (p); ++ p += leb128_len (p); ++ p += leb128_len (p); ++ ++hdr->filenames_count; ++ } ++ ++ hdr->filenames = ((const char **) ++ backtrace_alloc (state, ++ hdr->filenames_count * sizeof (char *), ++ line_buf->error_callback, ++ line_buf->data)); ++ if (hdr->filenames == NULL) ++ return 0; ++ i = 0; ++ while (*hdr_buf.buf != '\0') ++ { ++ const char *filename; ++ uint64_t dir_index; ++ ++ if (hdr_buf.reported_underflow) ++ return 0; ++ ++ filename = (const char *) hdr_buf.buf; ++ if (!advance (&hdr_buf, ++ strnlen ((const char *) hdr_buf.buf, hdr_buf.left) + 1)) ++ return 0; ++ dir_index = read_uleb128 (&hdr_buf); ++ if (IS_ABSOLUTE_PATH (filename) ++ || (dir_index == 0 && u->comp_dir == NULL)) ++ hdr->filenames[i] = filename; ++ else ++ { ++ const char *dir; ++ size_t dir_len; ++ size_t filename_len; ++ char *s; ++ ++ if (dir_index == 0) ++ dir = u->comp_dir; ++ else if (dir_index - 1 < hdr->dirs_count) ++ dir = hdr->dirs[dir_index - 1]; ++ else ++ { ++ dwarf_buf_error (line_buf, ++ ("invalid directory index in " ++ "line number program header")); ++ return 0; ++ } ++ dir_len = strlen (dir); ++ filename_len = strlen (filename); ++ s = ((char *) ++ backtrace_alloc (state, dir_len + filename_len + 2, ++ line_buf->error_callback, line_buf->data)); ++ if (s == NULL) ++ return 0; ++ memcpy (s, dir, dir_len); ++ /* FIXME: If we are on a DOS-based file system, and the ++ directory or the file name use backslashes, then we ++ should use a backslash here. */ ++ s[dir_len] = '/'; ++ memcpy (s + dir_len + 1, filename, filename_len + 1); ++ hdr->filenames[i] = s; ++ } ++ ++ /* Ignore the modification time and size. */ ++ read_uleb128 (&hdr_buf); ++ read_uleb128 (&hdr_buf); ++ ++ ++i; ++ } ++ ++ if (hdr_buf.reported_underflow) ++ return 0; ++ ++ return 1; ++} ++ ++/* Read the line program, adding line mappings to VEC. Return 1 on ++ success, 0 on failure. */ ++ ++static int ++read_line_program (struct backtrace_state *state, struct dwarf_data *ddata, ++ struct unit *u, const struct line_header *hdr, ++ struct dwarf_buf *line_buf, struct line_vector *vec) ++{ ++ uint64_t address; ++ unsigned int op_index; ++ const char *reset_filename; ++ const char *filename; ++ int lineno; ++ ++ address = 0; ++ op_index = 0; ++ if (hdr->filenames_count > 0) ++ reset_filename = hdr->filenames[0]; ++ else ++ reset_filename = ""; ++ filename = reset_filename; ++ lineno = 1; ++ while (line_buf->left > 0) ++ { ++ unsigned int op; ++ ++ op = read_byte (line_buf); ++ if (op >= hdr->opcode_base) ++ { ++ unsigned int advance; ++ ++ /* Special opcode. */ ++ op -= hdr->opcode_base; ++ advance = op / hdr->line_range; ++ address += (hdr->min_insn_len * (op_index + advance) ++ / hdr->max_ops_per_insn); ++ op_index = (op_index + advance) % hdr->max_ops_per_insn; ++ lineno += hdr->line_base + (int) (op % hdr->line_range); ++ add_line (state, ddata, address, filename, lineno, ++ line_buf->error_callback, line_buf->data, vec); ++ } ++ else if (op == DW_LNS_extended_op) ++ { ++ uint64_t len; ++ ++ len = read_uleb128 (line_buf); ++ op = read_byte (line_buf); ++ switch (op) ++ { ++ case DW_LNE_end_sequence: ++ /* FIXME: Should we mark the high PC here? It seems ++ that we already have that information from the ++ compilation unit. */ ++ address = 0; ++ op_index = 0; ++ filename = reset_filename; ++ lineno = 1; ++ break; ++ case DW_LNE_set_address: ++ address = read_address (line_buf, u->addrsize); ++ break; ++ case DW_LNE_define_file: ++ { ++ const char *f; ++ unsigned int dir_index; ++ ++ f = (const char *) line_buf->buf; ++ if (!advance (line_buf, strnlen (f, line_buf->left) + 1)) ++ return 0; ++ dir_index = read_uleb128 (line_buf); ++ /* Ignore that time and length. */ ++ read_uleb128 (line_buf); ++ read_uleb128 (line_buf); ++ if (IS_ABSOLUTE_PATH (f)) ++ filename = f; ++ else ++ { ++ const char *dir; ++ size_t dir_len; ++ size_t f_len; ++ char *p; ++ ++ if (dir_index == 0) ++ dir = u->comp_dir; ++ else if (dir_index - 1 < hdr->dirs_count) ++ dir = hdr->dirs[dir_index - 1]; ++ else ++ { ++ dwarf_buf_error (line_buf, ++ ("invalid directory index " ++ "in line number program")); ++ return 0; ++ } ++ dir_len = strlen (dir); ++ f_len = strlen (f); ++ p = ((char *) ++ backtrace_alloc (state, dir_len + f_len + 2, ++ line_buf->error_callback, ++ line_buf->data)); ++ if (p == NULL) ++ return 0; ++ memcpy (p, dir, dir_len); ++ /* FIXME: If we are on a DOS-based file system, ++ and the directory or the file name use ++ backslashes, then we should use a backslash ++ here. */ ++ p[dir_len] = '/'; ++ memcpy (p + dir_len + 1, f, f_len + 1); ++ filename = p; ++ } ++ } ++ break; ++ case DW_LNE_set_discriminator: ++ /* We don't care about discriminators. */ ++ read_uleb128 (line_buf); ++ break; ++ default: ++ if (!advance (line_buf, len - 1)) ++ return 0; ++ break; ++ } ++ } ++ else ++ { ++ switch (op) ++ { ++ case DW_LNS_copy: ++ add_line (state, ddata, address, filename, lineno, ++ line_buf->error_callback, line_buf->data, vec); ++ break; ++ case DW_LNS_advance_pc: ++ { ++ uint64_t advance; ++ ++ advance = read_uleb128 (line_buf); ++ address += (hdr->min_insn_len * (op_index + advance) ++ / hdr->max_ops_per_insn); ++ op_index = (op_index + advance) % hdr->max_ops_per_insn; ++ } ++ break; ++ case DW_LNS_advance_line: ++ lineno += (int) read_sleb128 (line_buf); ++ break; ++ case DW_LNS_set_file: ++ { ++ uint64_t fileno; ++ ++ fileno = read_uleb128 (line_buf); ++ if (fileno == 0) ++ filename = ""; ++ else ++ { ++ if (fileno - 1 >= hdr->filenames_count) ++ { ++ dwarf_buf_error (line_buf, ++ ("invalid file number in " ++ "line number program")); ++ return 0; ++ } ++ filename = hdr->filenames[fileno - 1]; ++ } ++ } ++ break; ++ case DW_LNS_set_column: ++ read_uleb128 (line_buf); ++ break; ++ case DW_LNS_negate_stmt: ++ break; ++ case DW_LNS_set_basic_block: ++ break; ++ case DW_LNS_const_add_pc: ++ { ++ unsigned int advance; ++ ++ op = 255 - hdr->opcode_base; ++ advance = op / hdr->line_range; ++ address += (hdr->min_insn_len * (op_index + advance) ++ / hdr->max_ops_per_insn); ++ op_index = (op_index + advance) % hdr->max_ops_per_insn; ++ } ++ break; ++ case DW_LNS_fixed_advance_pc: ++ address += read_uint16 (line_buf); ++ op_index = 0; ++ break; ++ case DW_LNS_set_prologue_end: ++ break; ++ case DW_LNS_set_epilogue_begin: ++ break; ++ case DW_LNS_set_isa: ++ read_uleb128 (line_buf); ++ break; ++ default: ++ { ++ unsigned int i; ++ ++ for (i = hdr->opcode_lengths[op - 1]; i > 0; --i) ++ read_uleb128 (line_buf); ++ } ++ break; ++ } ++ } ++ } ++ ++ return 1; ++} ++ ++/* Read the line number information for a compilation unit. Returns 1 ++ on success, 0 on failure. */ ++ ++static int ++read_line_info (struct backtrace_state *state, struct dwarf_data *ddata, ++ backtrace_error_callback error_callback, void *data, ++ struct unit *u, struct line_header *hdr, struct line **lines, ++ size_t *lines_count) ++{ ++ struct line_vector vec; ++ struct dwarf_buf line_buf; ++ uint64_t len; ++ int is_dwarf64; ++ struct line *ln; ++ ++ memset (&vec.vec, 0, sizeof vec.vec); ++ vec.count = 0; ++ ++ memset (hdr, 0, sizeof *hdr); ++ ++ if (u->lineoff != (off_t) (size_t) u->lineoff ++ || (size_t) u->lineoff >= ddata->dwarf_line_size) ++ { ++ error_callback (data, "unit line offset out of range", 0); ++ goto fail; ++ } ++ ++ line_buf.name = ".debug_line"; ++ line_buf.start = ddata->dwarf_line; ++ line_buf.buf = ddata->dwarf_line + u->lineoff; ++ line_buf.left = ddata->dwarf_line_size - u->lineoff; ++ line_buf.is_bigendian = ddata->is_bigendian; ++ line_buf.error_callback = error_callback; ++ line_buf.data = data; ++ line_buf.reported_underflow = 0; ++ ++ is_dwarf64 = 0; ++ len = read_uint32 (&line_buf); ++ if (len == 0xffffffff) ++ { ++ len = read_uint64 (&line_buf); ++ is_dwarf64 = 1; ++ } ++ line_buf.left = len; ++ ++ if (!read_line_header (state, u, is_dwarf64, &line_buf, hdr)) ++ goto fail; ++ ++ if (!read_line_program (state, ddata, u, hdr, &line_buf, &vec)) ++ goto fail; ++ ++ if (line_buf.reported_underflow) ++ goto fail; ++ ++ if (vec.count == 0) ++ { ++ /* This is not a failure in the sense of a generating an error, ++ but it is a failure in that sense that we have no useful ++ information. */ ++ goto fail; ++ } ++ ++ /* Allocate one extra entry at the end. */ ++ ln = ((struct line *) ++ backtrace_vector_grow (state, sizeof (struct line), error_callback, ++ data, &vec.vec)); ++ if (ln == NULL) ++ goto fail; ++ ln->pc = (uintptr_t) -1; ++ ln->filename = NULL; ++ ln->lineno = 0; ++ ln->idx = 0; ++ ++ if (!backtrace_vector_release (state, &vec.vec, error_callback, data)) ++ goto fail; ++ ++ ln = (struct line *) vec.vec.base; ++ backtrace_qsort (ln, vec.count, sizeof (struct line), line_compare); ++ ++ *lines = ln; ++ *lines_count = vec.count; ++ ++ return 1; ++ ++ fail: ++ vec.vec.alc += vec.vec.size; ++ vec.vec.size = 0; ++ backtrace_vector_release (state, &vec.vec, error_callback, data); ++ free_line_header (state, hdr, error_callback, data); ++ *lines = (struct line *) (uintptr_t) -1; ++ *lines_count = 0; ++ return 0; ++} ++ ++/* Read the name of a function from a DIE referenced by a ++ DW_AT_abstract_origin or DW_AT_specification tag. OFFSET is within ++ the same compilation unit. */ ++ ++static const char * ++read_referenced_name (struct dwarf_data *ddata, struct unit *u, ++ uint64_t offset, backtrace_error_callback error_callback, ++ void *data) ++{ ++ struct dwarf_buf unit_buf; ++ uint64_t code; ++ const struct abbrev *abbrev; ++ const char *ret; ++ size_t i; ++ ++ /* OFFSET is from the start of the data for this compilation unit. ++ U->unit_data is the data, but it starts U->unit_data_offset bytes ++ from the beginning. */ ++ ++ if (offset < u->unit_data_offset ++ || offset - u->unit_data_offset >= u->unit_data_len) ++ { ++ error_callback (data, ++ "abstract origin or specification out of range", ++ 0); ++ return NULL; ++ } ++ ++ offset -= u->unit_data_offset; ++ ++ unit_buf.name = ".debug_info"; ++ unit_buf.start = ddata->dwarf_info; ++ unit_buf.buf = u->unit_data + offset; ++ unit_buf.left = u->unit_data_len - offset; ++ unit_buf.is_bigendian = ddata->is_bigendian; ++ unit_buf.error_callback = error_callback; ++ unit_buf.data = data; ++ unit_buf.reported_underflow = 0; ++ ++ code = read_uleb128 (&unit_buf); ++ if (code == 0) ++ { ++ dwarf_buf_error (&unit_buf, "invalid abstract origin or specification"); ++ return NULL; ++ } ++ ++ abbrev = lookup_abbrev (&u->abbrevs, code, error_callback, data); ++ if (abbrev == NULL) ++ return NULL; ++ ++ ret = NULL; ++ for (i = 0; i < abbrev->num_attrs; ++i) ++ { ++ struct attr_val val; ++ ++ if (!read_attribute (abbrev->attrs[i].form, &unit_buf, ++ u->is_dwarf64, u->version, u->addrsize, ++ ddata->dwarf_str, ddata->dwarf_str_size, ++ &val)) ++ return NULL; ++ ++ switch (abbrev->attrs[i].name) ++ { ++ case DW_AT_name: ++ /* We prefer the linkage name if get one. */ ++ if (val.encoding == ATTR_VAL_STRING) ++ ret = val.u.string; ++ break; ++ ++ case DW_AT_linkage_name: ++ case DW_AT_MIPS_linkage_name: ++ if (val.encoding == ATTR_VAL_STRING) ++ return val.u.string; ++ break; ++ ++ case DW_AT_specification: ++ if (abbrev->attrs[i].form == DW_FORM_ref_addr ++ || abbrev->attrs[i].form == DW_FORM_ref_sig8) ++ { ++ /* This refers to a specification defined in some other ++ compilation unit. We can handle this case if we ++ must, but it's harder. */ ++ break; ++ } ++ if (val.encoding == ATTR_VAL_UINT ++ || val.encoding == ATTR_VAL_REF_UNIT) ++ { ++ const char *name; ++ ++ name = read_referenced_name (ddata, u, val.u.uint, ++ error_callback, data); ++ if (name != NULL) ++ ret = name; ++ } ++ break; ++ ++ default: ++ break; ++ } ++ } ++ ++ return ret; ++} ++ ++/* Add a single range to U that maps to function. Returns 1 on ++ success, 0 on error. */ ++ ++static int ++add_function_range (struct backtrace_state *state, struct dwarf_data *ddata, ++ struct function *function, uint64_t lowpc, uint64_t highpc, ++ backtrace_error_callback error_callback, ++ void *data, struct function_vector *vec) ++{ ++ struct function_addrs *p; ++ ++ /* Add in the base address here, so that we can look up the PC ++ directly. */ ++ lowpc += ddata->base_address; ++ highpc += ddata->base_address; ++ ++ if (vec->count > 0) ++ { ++ p = (struct function_addrs *) vec->vec.base + vec->count - 1; ++ if ((lowpc == p->high || lowpc == p->high + 1) ++ && function == p->function) ++ { ++ if (highpc > p->high) ++ p->high = highpc; ++ return 1; ++ } ++ } ++ ++ p = ((struct function_addrs *) ++ backtrace_vector_grow (state, sizeof (struct function_addrs), ++ error_callback, data, &vec->vec)); ++ if (p == NULL) ++ return 0; ++ ++ p->low = lowpc; ++ p->high = highpc; ++ p->function = function; ++ ++vec->count; ++ return 1; ++} ++ ++/* Add PC ranges to U that map to FUNCTION. Returns 1 on success, 0 ++ on error. */ ++ ++static int ++add_function_ranges (struct backtrace_state *state, struct dwarf_data *ddata, ++ struct unit *u, struct function *function, ++ uint64_t ranges, uint64_t base, ++ backtrace_error_callback error_callback, void *data, ++ struct function_vector *vec) ++{ ++ struct dwarf_buf ranges_buf; ++ ++ if (ranges >= ddata->dwarf_ranges_size) ++ { ++ error_callback (data, "function ranges offset out of range", 0); ++ return 0; ++ } ++ ++ ranges_buf.name = ".debug_ranges"; ++ ranges_buf.start = ddata->dwarf_ranges; ++ ranges_buf.buf = ddata->dwarf_ranges + ranges; ++ ranges_buf.left = ddata->dwarf_ranges_size - ranges; ++ ranges_buf.is_bigendian = ddata->is_bigendian; ++ ranges_buf.error_callback = error_callback; ++ ranges_buf.data = data; ++ ranges_buf.reported_underflow = 0; ++ ++ while (1) ++ { ++ uint64_t low; ++ uint64_t high; ++ ++ if (ranges_buf.reported_underflow) ++ return 0; ++ ++ low = read_address (&ranges_buf, u->addrsize); ++ high = read_address (&ranges_buf, u->addrsize); ++ ++ if (low == 0 && high == 0) ++ break; ++ ++ if (is_highest_address (low, u->addrsize)) ++ base = high; ++ else ++ { ++ if (!add_function_range (state, ddata, function, low + base, ++ high + base, error_callback, data, vec)) ++ return 0; ++ } ++ } ++ ++ if (ranges_buf.reported_underflow) ++ return 0; ++ ++ return 1; ++} ++ ++/* Read one entry plus all its children. Add function addresses to ++ VEC. Returns 1 on success, 0 on error. */ ++ ++static int ++read_function_entry (struct backtrace_state *state, struct dwarf_data *ddata, ++ struct unit *u, uint64_t base, struct dwarf_buf *unit_buf, ++ const struct line_header *lhdr, ++ backtrace_error_callback error_callback, void *data, ++ struct function_vector *vec_function, ++ struct function_vector *vec_inlined) ++{ ++ while (unit_buf->left > 0) ++ { ++ uint64_t code; ++ const struct abbrev *abbrev; ++ int is_function; ++ struct function *function; ++ struct function_vector *vec; ++ size_t i; ++ uint64_t lowpc; ++ int have_lowpc; ++ uint64_t highpc; ++ int have_highpc; ++ int highpc_is_relative; ++ uint64_t ranges; ++ int have_ranges; ++ ++ code = read_uleb128 (unit_buf); ++ if (code == 0) ++ return 1; ++ ++ abbrev = lookup_abbrev (&u->abbrevs, code, error_callback, data); ++ if (abbrev == NULL) ++ return 0; ++ ++ is_function = (abbrev->tag == DW_TAG_subprogram ++ || abbrev->tag == DW_TAG_entry_point ++ || abbrev->tag == DW_TAG_inlined_subroutine); ++ ++ if (abbrev->tag == DW_TAG_inlined_subroutine) ++ vec = vec_inlined; ++ else ++ vec = vec_function; ++ ++ function = NULL; ++ if (is_function) ++ { ++ function = ((struct function *) ++ backtrace_alloc (state, sizeof *function, ++ error_callback, data)); ++ if (function == NULL) ++ return 0; ++ memset (function, 0, sizeof *function); ++ } ++ ++ lowpc = 0; ++ have_lowpc = 0; ++ highpc = 0; ++ have_highpc = 0; ++ highpc_is_relative = 0; ++ ranges = 0; ++ have_ranges = 0; ++ for (i = 0; i < abbrev->num_attrs; ++i) ++ { ++ struct attr_val val; ++ ++ if (!read_attribute (abbrev->attrs[i].form, unit_buf, ++ u->is_dwarf64, u->version, u->addrsize, ++ ddata->dwarf_str, ddata->dwarf_str_size, ++ &val)) ++ return 0; ++ ++ /* The compile unit sets the base address for any address ++ ranges in the function entries. */ ++ if (abbrev->tag == DW_TAG_compile_unit ++ && abbrev->attrs[i].name == DW_AT_low_pc ++ && val.encoding == ATTR_VAL_ADDRESS) ++ base = val.u.uint; ++ ++ if (is_function) ++ { ++ switch (abbrev->attrs[i].name) ++ { ++ case DW_AT_call_file: ++ if (val.encoding == ATTR_VAL_UINT) ++ { ++ if (val.u.uint == 0) ++ function->caller_filename = ""; ++ else ++ { ++ if (val.u.uint - 1 >= lhdr->filenames_count) ++ { ++ dwarf_buf_error (unit_buf, ++ ("invalid file number in " ++ "DW_AT_call_file attribute")); ++ return 0; ++ } ++ function->caller_filename = ++ lhdr->filenames[val.u.uint - 1]; ++ } ++ } ++ break; ++ ++ case DW_AT_call_line: ++ if (val.encoding == ATTR_VAL_UINT) ++ function->caller_lineno = val.u.uint; ++ break; ++ ++ case DW_AT_abstract_origin: ++ case DW_AT_specification: ++ if (abbrev->attrs[i].form == DW_FORM_ref_addr ++ || abbrev->attrs[i].form == DW_FORM_ref_sig8) ++ { ++ /* This refers to an abstract origin defined in ++ some other compilation unit. We can handle ++ this case if we must, but it's harder. */ ++ break; ++ } ++ if (val.encoding == ATTR_VAL_UINT ++ || val.encoding == ATTR_VAL_REF_UNIT) ++ { ++ const char *name; ++ ++ name = read_referenced_name (ddata, u, val.u.uint, ++ error_callback, data); ++ if (name != NULL) ++ function->name = name; ++ } ++ break; ++ ++ case DW_AT_name: ++ if (val.encoding == ATTR_VAL_STRING) ++ { ++ /* Don't override a name we found in some other ++ way, as it will normally be more ++ useful--e.g., this name is normally not ++ mangled. */ ++ if (function->name == NULL) ++ function->name = val.u.string; ++ } ++ break; ++ ++ case DW_AT_linkage_name: ++ case DW_AT_MIPS_linkage_name: ++ if (val.encoding == ATTR_VAL_STRING) ++ function->name = val.u.string; ++ break; ++ ++ case DW_AT_low_pc: ++ if (val.encoding == ATTR_VAL_ADDRESS) ++ { ++ lowpc = val.u.uint; ++ have_lowpc = 1; ++ } ++ break; ++ ++ case DW_AT_high_pc: ++ if (val.encoding == ATTR_VAL_ADDRESS) ++ { ++ highpc = val.u.uint; ++ have_highpc = 1; ++ } ++ else if (val.encoding == ATTR_VAL_UINT) ++ { ++ highpc = val.u.uint; ++ have_highpc = 1; ++ highpc_is_relative = 1; ++ } ++ break; ++ ++ case DW_AT_ranges: ++ if (val.encoding == ATTR_VAL_UINT ++ || val.encoding == ATTR_VAL_REF_SECTION) ++ { ++ ranges = val.u.uint; ++ have_ranges = 1; ++ } ++ break; ++ ++ default: ++ break; ++ } ++ } ++ } ++ ++ /* If we couldn't find a name for the function, we have no use ++ for it. */ ++ if (is_function && function->name == NULL) ++ { ++ backtrace_free (state, function, sizeof *function, ++ error_callback, data); ++ is_function = 0; ++ } ++ ++ if (is_function) ++ { ++ if (have_ranges) ++ { ++ if (!add_function_ranges (state, ddata, u, function, ranges, ++ base, error_callback, data, vec)) ++ return 0; ++ } ++ else if (have_lowpc && have_highpc) ++ { ++ if (highpc_is_relative) ++ highpc += lowpc; ++ if (!add_function_range (state, ddata, function, lowpc, highpc, ++ error_callback, data, vec)) ++ return 0; ++ } ++ else ++ { ++ backtrace_free (state, function, sizeof *function, ++ error_callback, data); ++ is_function = 0; ++ } ++ } ++ ++ if (abbrev->has_children) ++ { ++ if (!is_function) ++ { ++ if (!read_function_entry (state, ddata, u, base, unit_buf, lhdr, ++ error_callback, data, vec_function, ++ vec_inlined)) ++ return 0; ++ } ++ else ++ { ++ struct function_vector fvec; ++ ++ /* Gather any information for inlined functions in ++ FVEC. */ ++ ++ memset (&fvec, 0, sizeof fvec); ++ ++ if (!read_function_entry (state, ddata, u, base, unit_buf, lhdr, ++ error_callback, data, vec_function, ++ &fvec)) ++ return 0; ++ ++ if (fvec.count > 0) ++ { ++ struct function_addrs *faddrs; ++ ++ if (!backtrace_vector_release (state, &fvec.vec, ++ error_callback, data)) ++ return 0; ++ ++ faddrs = (struct function_addrs *) fvec.vec.base; ++ backtrace_qsort (faddrs, fvec.count, ++ sizeof (struct function_addrs), ++ function_addrs_compare); ++ ++ function->function_addrs = faddrs; ++ function->function_addrs_count = fvec.count; ++ } ++ } ++ } ++ } ++ ++ return 1; ++} ++ ++/* Read function name information for a compilation unit. We look ++ through the whole unit looking for function tags. */ ++ ++static void ++read_function_info (struct backtrace_state *state, struct dwarf_data *ddata, ++ const struct line_header *lhdr, ++ backtrace_error_callback error_callback, void *data, ++ struct unit *u, struct function_vector *fvec, ++ struct function_addrs **ret_addrs, ++ size_t *ret_addrs_count) ++{ ++ struct function_vector lvec; ++ struct function_vector *pfvec; ++ struct dwarf_buf unit_buf; ++ struct function_addrs *addrs; ++ size_t addrs_count; ++ ++ /* Use FVEC if it is not NULL. Otherwise use our own vector. */ ++ if (fvec != NULL) ++ pfvec = fvec; ++ else ++ { ++ memset (&lvec, 0, sizeof lvec); ++ pfvec = &lvec; ++ } ++ ++ unit_buf.name = ".debug_info"; ++ unit_buf.start = ddata->dwarf_info; ++ unit_buf.buf = u->unit_data; ++ unit_buf.left = u->unit_data_len; ++ unit_buf.is_bigendian = ddata->is_bigendian; ++ unit_buf.error_callback = error_callback; ++ unit_buf.data = data; ++ unit_buf.reported_underflow = 0; ++ ++ while (unit_buf.left > 0) ++ { ++ if (!read_function_entry (state, ddata, u, 0, &unit_buf, lhdr, ++ error_callback, data, pfvec, pfvec)) ++ return; ++ } ++ ++ if (pfvec->count == 0) ++ return; ++ ++ addrs_count = pfvec->count; ++ ++ if (fvec == NULL) ++ { ++ if (!backtrace_vector_release (state, &lvec.vec, error_callback, data)) ++ return; ++ addrs = (struct function_addrs *) pfvec->vec.base; ++ } ++ else ++ { ++ /* Finish this list of addresses, but leave the remaining space in ++ the vector available for the next function unit. */ ++ addrs = ((struct function_addrs *) ++ backtrace_vector_finish (state, &fvec->vec, ++ error_callback, data)); ++ if (addrs == NULL) ++ return; ++ fvec->count = 0; ++ } ++ ++ backtrace_qsort (addrs, addrs_count, sizeof (struct function_addrs), ++ function_addrs_compare); ++ ++ *ret_addrs = addrs; ++ *ret_addrs_count = addrs_count; ++} ++ ++/* See if PC is inlined in FUNCTION. If it is, print out the inlined ++ information, and update FILENAME and LINENO for the caller. ++ Returns whatever CALLBACK returns, or 0 to keep going. */ ++ ++static int ++report_inlined_functions (uintptr_t pc, struct function *function, ++ backtrace_full_callback callback, void *data, ++ const char **filename, int *lineno) ++{ ++ struct function_addrs *function_addrs; ++ struct function *inlined; ++ int ret; ++ ++ if (function->function_addrs_count == 0) ++ return 0; ++ ++ function_addrs = ((struct function_addrs *) ++ bsearch (&pc, function->function_addrs, ++ function->function_addrs_count, ++ sizeof (struct function_addrs), ++ function_addrs_search)); ++ if (function_addrs == NULL) ++ return 0; ++ ++ while (((size_t) (function_addrs - function->function_addrs) + 1 ++ < function->function_addrs_count) ++ && pc >= (function_addrs + 1)->low ++ && pc < (function_addrs + 1)->high) ++ ++function_addrs; ++ ++ /* We found an inlined call. */ ++ ++ inlined = function_addrs->function; ++ ++ /* Report any calls inlined into this one. */ ++ ret = report_inlined_functions (pc, inlined, callback, data, ++ filename, lineno); ++ if (ret != 0) ++ return ret; ++ ++ /* Report this inlined call. */ ++ ret = callback (data, pc, *filename, *lineno, inlined->name); ++ if (ret != 0) ++ return ret; ++ ++ /* Our caller will report the caller of the inlined function; tell ++ it the appropriate filename and line number. */ ++ *filename = inlined->caller_filename; ++ *lineno = inlined->caller_lineno; ++ ++ return 0; ++} ++ ++/* Look for a PC in the DWARF mapping for one module. On success, ++ call CALLBACK and return whatever it returns. On error, call ++ ERROR_CALLBACK and return 0. Sets *FOUND to 1 if the PC is found, ++ 0 if not. */ ++ ++static int ++dwarf_lookup_pc (struct backtrace_state *state, struct dwarf_data *ddata, ++ uintptr_t pc, backtrace_full_callback callback, ++ backtrace_error_callback error_callback, void *data, ++ int *found) ++{ ++ struct unit_addrs *entry; ++ struct unit *u; ++ int new_data; ++ struct line *lines; ++ struct line *ln; ++ struct function_addrs *function_addrs; ++ struct function *function; ++ const char *filename; ++ int lineno; ++ int ret; ++ ++ *found = 1; ++ ++ /* Find an address range that includes PC. */ ++ entry = bsearch (&pc, ddata->addrs, ddata->addrs_count, ++ sizeof (struct unit_addrs), unit_addrs_search); ++ ++ if (entry == NULL) ++ { ++ *found = 0; ++ return 0; ++ } ++ ++ /* If there are multiple ranges that contain PC, use the last one, ++ in order to produce predictable results. If we assume that all ++ ranges are properly nested, then the last range will be the ++ smallest one. */ ++ while ((size_t) (entry - ddata->addrs) + 1 < ddata->addrs_count ++ && pc >= (entry + 1)->low ++ && pc < (entry + 1)->high) ++ ++entry; ++ ++ /* We need the lines, lines_count, function_addrs, ++ function_addrs_count fields of u. If they are not set, we need ++ to set them. When running in threaded mode, we need to allow for ++ the possibility that some other thread is setting them ++ simultaneously. */ ++ ++ u = entry->u; ++ lines = u->lines; ++ ++ /* Skip units with no useful line number information by walking ++ backward. Useless line number information is marked by setting ++ lines == -1. */ ++ while (entry > ddata->addrs ++ && pc >= (entry - 1)->low ++ && pc < (entry - 1)->high) ++ { ++ if (state->threaded) ++ lines = (struct line *) backtrace_atomic_load_pointer (&u->lines); ++ ++ if (lines != (struct line *) (uintptr_t) -1) ++ break; ++ ++ --entry; ++ ++ u = entry->u; ++ lines = u->lines; ++ } ++ ++ if (state->threaded) ++ lines = backtrace_atomic_load_pointer (&u->lines); ++ ++ new_data = 0; ++ if (lines == NULL) ++ { ++ size_t function_addrs_count; ++ struct line_header lhdr; ++ size_t count; ++ ++ /* We have never read the line information for this unit. Read ++ it now. */ ++ ++ function_addrs = NULL; ++ function_addrs_count = 0; ++ if (read_line_info (state, ddata, error_callback, data, entry->u, &lhdr, ++ &lines, &count)) ++ { ++ struct function_vector *pfvec; ++ ++ /* If not threaded, reuse DDATA->FVEC for better memory ++ consumption. */ ++ if (state->threaded) ++ pfvec = NULL; ++ else ++ pfvec = &ddata->fvec; ++ read_function_info (state, ddata, &lhdr, error_callback, data, ++ entry->u, pfvec, &function_addrs, ++ &function_addrs_count); ++ free_line_header (state, &lhdr, error_callback, data); ++ new_data = 1; ++ } ++ ++ /* Atomically store the information we just read into the unit. ++ If another thread is simultaneously writing, it presumably ++ read the same information, and we don't care which one we ++ wind up with; we just leak the other one. We do have to ++ write the lines field last, so that the acquire-loads above ++ ensure that the other fields are set. */ ++ ++ if (!state->threaded) ++ { ++ u->lines_count = count; ++ u->function_addrs = function_addrs; ++ u->function_addrs_count = function_addrs_count; ++ u->lines = lines; ++ } ++ else ++ { ++ backtrace_atomic_store_size_t (&u->lines_count, count); ++ backtrace_atomic_store_pointer (&u->function_addrs, function_addrs); ++ backtrace_atomic_store_size_t (&u->function_addrs_count, ++ function_addrs_count); ++ backtrace_atomic_store_pointer (&u->lines, lines); ++ } ++ } ++ ++ /* Now all fields of U have been initialized. */ ++ ++ if (lines == (struct line *) (uintptr_t) -1) ++ { ++ /* If reading the line number information failed in some way, ++ try again to see if there is a better compilation unit for ++ this PC. */ ++ if (new_data) ++ return dwarf_lookup_pc (state, ddata, pc, callback, error_callback, ++ data, found); ++ return callback (data, pc, NULL, 0, NULL); ++ } ++ ++ /* Search for PC within this unit. */ ++ ++ ln = (struct line *) bsearch (&pc, lines, entry->u->lines_count, ++ sizeof (struct line), line_search); ++ if (ln == NULL) ++ { ++ /* The PC is between the low_pc and high_pc attributes of the ++ compilation unit, but no entry in the line table covers it. ++ This implies that the start of the compilation unit has no ++ line number information. */ ++ ++ if (entry->u->abs_filename == NULL) ++ { ++ const char *filename; ++ ++ filename = entry->u->filename; ++ if (filename != NULL ++ && !IS_ABSOLUTE_PATH (filename) ++ && entry->u->comp_dir != NULL) ++ { ++ size_t filename_len; ++ const char *dir; ++ size_t dir_len; ++ char *s; ++ ++ filename_len = strlen (filename); ++ dir = entry->u->comp_dir; ++ dir_len = strlen (dir); ++ s = (char *) backtrace_alloc (state, dir_len + filename_len + 2, ++ error_callback, data); ++ if (s == NULL) ++ { ++ *found = 0; ++ return 0; ++ } ++ memcpy (s, dir, dir_len); ++ /* FIXME: Should use backslash if DOS file system. */ ++ s[dir_len] = '/'; ++ memcpy (s + dir_len + 1, filename, filename_len + 1); ++ filename = s; ++ } ++ entry->u->abs_filename = filename; ++ } ++ ++ return callback (data, pc, entry->u->abs_filename, 0, NULL); ++ } ++ ++ /* Search for function name within this unit. */ ++ ++ if (entry->u->function_addrs_count == 0) ++ return callback (data, pc, ln->filename, ln->lineno, NULL); ++ ++ function_addrs = ((struct function_addrs *) ++ bsearch (&pc, entry->u->function_addrs, ++ entry->u->function_addrs_count, ++ sizeof (struct function_addrs), ++ function_addrs_search)); ++ if (function_addrs == NULL) ++ return callback (data, pc, ln->filename, ln->lineno, NULL); ++ ++ /* If there are multiple function ranges that contain PC, use the ++ last one, in order to produce predictable results. */ ++ ++ while (((size_t) (function_addrs - entry->u->function_addrs + 1) ++ < entry->u->function_addrs_count) ++ && pc >= (function_addrs + 1)->low ++ && pc < (function_addrs + 1)->high) ++ ++function_addrs; ++ ++ function = function_addrs->function; ++ ++ filename = ln->filename; ++ lineno = ln->lineno; ++ ++ ret = report_inlined_functions (pc, function, callback, data, ++ &filename, &lineno); ++ if (ret != 0) ++ return ret; ++ ++ return callback (data, pc, filename, lineno, function->name); ++} ++ ++ ++/* Return the file/line information for a PC using the DWARF mapping ++ we built earlier. */ ++ ++static int ++dwarf_fileline (struct backtrace_state *state, uintptr_t pc, ++ backtrace_full_callback callback, ++ backtrace_error_callback error_callback, void *data) ++{ ++ struct dwarf_data *ddata; ++ int found; ++ int ret; ++ ++ if (!state->threaded) ++ { ++ for (ddata = (struct dwarf_data *) state->fileline_data; ++ ddata != NULL; ++ ddata = ddata->next) ++ { ++ ret = dwarf_lookup_pc (state, ddata, pc, callback, error_callback, ++ data, &found); ++ if (ret != 0 || found) ++ return ret; ++ } ++ } ++ else ++ { ++ struct dwarf_data **pp; ++ ++ pp = (struct dwarf_data **) (void *) &state->fileline_data; ++ while (1) ++ { ++ ddata = backtrace_atomic_load_pointer (pp); ++ if (ddata == NULL) ++ break; ++ ++ ret = dwarf_lookup_pc (state, ddata, pc, callback, error_callback, ++ data, &found); ++ if (ret != 0 || found) ++ return ret; ++ ++ pp = &ddata->next; ++ } ++ } ++ ++ /* FIXME: See if any libraries have been dlopen'ed. */ ++ ++ return callback (data, pc, NULL, 0, NULL); ++} ++ ++/* Initialize our data structures from the DWARF debug info for a ++ file. Return NULL on failure. */ ++ ++static struct dwarf_data * ++build_dwarf_data (struct backtrace_state *state, ++ uintptr_t base_address, ++ const unsigned char *dwarf_info, ++ size_t dwarf_info_size, ++ const unsigned char *dwarf_line, ++ size_t dwarf_line_size, ++ const unsigned char *dwarf_abbrev, ++ size_t dwarf_abbrev_size, ++ const unsigned char *dwarf_ranges, ++ size_t dwarf_ranges_size, ++ const unsigned char *dwarf_str, ++ size_t dwarf_str_size, ++ int is_bigendian, ++ backtrace_error_callback error_callback, ++ void *data) ++{ ++ struct unit_addrs_vector addrs_vec; ++ struct unit_addrs *addrs; ++ size_t addrs_count; ++ struct dwarf_data *fdata; ++ ++ if (!build_address_map (state, base_address, dwarf_info, dwarf_info_size, ++ dwarf_abbrev, dwarf_abbrev_size, dwarf_ranges, ++ dwarf_ranges_size, dwarf_str, dwarf_str_size, ++ is_bigendian, error_callback, data, &addrs_vec)) ++ return NULL; ++ ++ if (!backtrace_vector_release (state, &addrs_vec.vec, error_callback, data)) ++ return NULL; ++ addrs = (struct unit_addrs *) addrs_vec.vec.base; ++ addrs_count = addrs_vec.count; ++ backtrace_qsort (addrs, addrs_count, sizeof (struct unit_addrs), ++ unit_addrs_compare); ++ ++ fdata = ((struct dwarf_data *) ++ backtrace_alloc (state, sizeof (struct dwarf_data), ++ error_callback, data)); ++ if (fdata == NULL) ++ return NULL; ++ ++ fdata->next = NULL; ++ fdata->base_address = base_address; ++ fdata->addrs = addrs; ++ fdata->addrs_count = addrs_count; ++ fdata->dwarf_info = dwarf_info; ++ fdata->dwarf_info_size = dwarf_info_size; ++ fdata->dwarf_line = dwarf_line; ++ fdata->dwarf_line_size = dwarf_line_size; ++ fdata->dwarf_ranges = dwarf_ranges; ++ fdata->dwarf_ranges_size = dwarf_ranges_size; ++ fdata->dwarf_str = dwarf_str; ++ fdata->dwarf_str_size = dwarf_str_size; ++ fdata->is_bigendian = is_bigendian; ++ memset (&fdata->fvec, 0, sizeof fdata->fvec); ++ ++ return fdata; ++} ++ ++/* Build our data structures from the DWARF sections for a module. ++ Set FILELINE_FN and STATE->FILELINE_DATA. Return 1 on success, 0 ++ on failure. */ ++ ++int ++backtrace_dwarf_add (struct backtrace_state *state, ++ uintptr_t base_address, ++ const unsigned char *dwarf_info, ++ size_t dwarf_info_size, ++ const unsigned char *dwarf_line, ++ size_t dwarf_line_size, ++ const unsigned char *dwarf_abbrev, ++ size_t dwarf_abbrev_size, ++ const unsigned char *dwarf_ranges, ++ size_t dwarf_ranges_size, ++ const unsigned char *dwarf_str, ++ size_t dwarf_str_size, ++ int is_bigendian, ++ backtrace_error_callback error_callback, ++ void *data, fileline *fileline_fn) ++{ ++ struct dwarf_data *fdata; ++ ++ fdata = build_dwarf_data (state, base_address, dwarf_info, dwarf_info_size, ++ dwarf_line, dwarf_line_size, dwarf_abbrev, ++ dwarf_abbrev_size, dwarf_ranges, dwarf_ranges_size, ++ dwarf_str, dwarf_str_size, is_bigendian, ++ error_callback, data); ++ if (fdata == NULL) ++ return 0; ++ ++ if (!state->threaded) ++ { ++ struct dwarf_data **pp; ++ ++ for (pp = (struct dwarf_data **) (void *) &state->fileline_data; ++ *pp != NULL; ++ pp = &(*pp)->next) ++ ; ++ *pp = fdata; ++ } ++ else ++ { ++ while (1) ++ { ++ struct dwarf_data **pp; ++ ++ pp = (struct dwarf_data **) (void *) &state->fileline_data; ++ ++ while (1) ++ { ++ struct dwarf_data *p; ++ ++ p = backtrace_atomic_load_pointer (pp); ++ ++ if (p == NULL) ++ break; ++ ++ pp = &p->next; ++ } ++ ++ if (__sync_bool_compare_and_swap (pp, NULL, fdata)) ++ break; ++ } ++ } ++ ++ *fileline_fn = dwarf_fileline; ++ ++ return 1; ++} diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/dwarf2.def index 000000000,000000000..bb916ca23 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/dwarf2.def @@@ -1,0 -1,0 +1,773 @@@ ++/* -*- c -*- ++ Declarations and definitions of codes relating to the DWARF2 and ++ DWARF3 symbolic debugging information formats. ++ Copyright (C) 1992-2015 Free Software Foundation, Inc. ++ ++ Written by Gary Funck (gary@intrepid.com) The Ada Joint Program ++ Office (AJPO), Florida State University and Silicon Graphics Inc. ++ provided support for this effort -- June 21, 1995. ++ ++ Derived from the DWARF 1 implementation written by Ron Guilmette ++ (rfg@netcom.com), November 1990. ++ ++ This file is part of GCC. ++ ++ GCC is free software; you can redistribute it and/or modify it under ++ the terms of the GNU General Public License as published by the Free ++ Software Foundation; either version 3, or (at your option) any later ++ version. ++ ++ GCC is distributed in the hope that it will be useful, but WITHOUT ++ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY ++ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public ++ License for more details. ++ ++ Under Section 7 of GPL version 3, you are granted additional ++ permissions described in the GCC Runtime Library Exception, version ++ 3.1, as published by the Free Software Foundation. ++ ++ You should have received a copy of the GNU General Public License and ++ a copy of the GCC Runtime Library Exception along with this program; ++ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see ++ . */ ++ ++/* This file is derived from the DWARF specification (a public document) ++ Revision 2.0.0 (July 27, 1993) developed by the UNIX International ++ Programming Languages Special Interest Group (UI/PLSIG) and distributed ++ by UNIX International. Copies of this specification are available from ++ UNIX International, 20 Waterview Boulevard, Parsippany, NJ, 07054. ++ ++ This file also now contains definitions from the DWARF 3 specification ++ published Dec 20, 2005, available from: http://dwarf.freestandards.org. ++ ++ This file also now contains definitions from the DWARF 4 ++ specification, available from: http://dwarfstd.org/ */ ++ ++/* This file declares various DWARF-related constants using a set of ++ macros which can be redefined by the including file. ++ ++ The macros are in sections. Each section corresponds to a single ++ set of DWARF constants and has a corresponding key. The key is ++ used in all the macro names. ++ ++ The sections are TAG (for DW_TAG_ constants), FORM (DW_FORM_), AT ++ (DW_AT_), OP (DW_OP_), ATE (DW_ATE_), and CFA (DW_CFA_). ++ ++ Using TAG as an example, the following macros may be used for each ++ key: ++ ++ DW_FIRST_TAG(name, value) - Introduce the first DW_TAG constant. ++ ++ DW_TAG(name, value) - Define a subsequent constant. ++ ++ DW_TAG_DUP(name, value) - Define a subsequent constant whose value ++ is a duplicate of some other constant. Not all keys use the _DUP ++ macro form. If more than one name shares a value, then the base ++ (DW_TAG) form will be the preferred name and DW_TAG_DUP will hold ++ any alternate names. ++ ++ DW_END_TAG - Invoked at the end of the DW_TAG constants. */ ++ ++DW_FIRST_TAG (DW_TAG_padding, 0x00) ++DW_TAG (DW_TAG_array_type, 0x01) ++DW_TAG (DW_TAG_class_type, 0x02) ++DW_TAG (DW_TAG_entry_point, 0x03) ++DW_TAG (DW_TAG_enumeration_type, 0x04) ++DW_TAG (DW_TAG_formal_parameter, 0x05) ++DW_TAG (DW_TAG_imported_declaration, 0x08) ++DW_TAG (DW_TAG_label, 0x0a) ++DW_TAG (DW_TAG_lexical_block, 0x0b) ++DW_TAG (DW_TAG_member, 0x0d) ++DW_TAG (DW_TAG_pointer_type, 0x0f) ++DW_TAG (DW_TAG_reference_type, 0x10) ++DW_TAG (DW_TAG_compile_unit, 0x11) ++DW_TAG (DW_TAG_string_type, 0x12) ++DW_TAG (DW_TAG_structure_type, 0x13) ++DW_TAG (DW_TAG_subroutine_type, 0x15) ++DW_TAG (DW_TAG_typedef, 0x16) ++DW_TAG (DW_TAG_union_type, 0x17) ++DW_TAG (DW_TAG_unspecified_parameters, 0x18) ++DW_TAG (DW_TAG_variant, 0x19) ++DW_TAG (DW_TAG_common_block, 0x1a) ++DW_TAG (DW_TAG_common_inclusion, 0x1b) ++DW_TAG (DW_TAG_inheritance, 0x1c) ++DW_TAG (DW_TAG_inlined_subroutine, 0x1d) ++DW_TAG (DW_TAG_module, 0x1e) ++DW_TAG (DW_TAG_ptr_to_member_type, 0x1f) ++DW_TAG (DW_TAG_set_type, 0x20) ++DW_TAG (DW_TAG_subrange_type, 0x21) ++DW_TAG (DW_TAG_with_stmt, 0x22) ++DW_TAG (DW_TAG_access_declaration, 0x23) ++DW_TAG (DW_TAG_base_type, 0x24) ++DW_TAG (DW_TAG_catch_block, 0x25) ++DW_TAG (DW_TAG_const_type, 0x26) ++DW_TAG (DW_TAG_constant, 0x27) ++DW_TAG (DW_TAG_enumerator, 0x28) ++DW_TAG (DW_TAG_file_type, 0x29) ++DW_TAG (DW_TAG_friend, 0x2a) ++DW_TAG (DW_TAG_namelist, 0x2b) ++DW_TAG (DW_TAG_namelist_item, 0x2c) ++DW_TAG (DW_TAG_packed_type, 0x2d) ++DW_TAG (DW_TAG_subprogram, 0x2e) ++DW_TAG (DW_TAG_template_type_param, 0x2f) ++DW_TAG (DW_TAG_template_value_param, 0x30) ++DW_TAG (DW_TAG_thrown_type, 0x31) ++DW_TAG (DW_TAG_try_block, 0x32) ++DW_TAG (DW_TAG_variant_part, 0x33) ++DW_TAG (DW_TAG_variable, 0x34) ++DW_TAG (DW_TAG_volatile_type, 0x35) ++/* DWARF 3. */ ++DW_TAG (DW_TAG_dwarf_procedure, 0x36) ++DW_TAG (DW_TAG_restrict_type, 0x37) ++DW_TAG (DW_TAG_interface_type, 0x38) ++DW_TAG (DW_TAG_namespace, 0x39) ++DW_TAG (DW_TAG_imported_module, 0x3a) ++DW_TAG (DW_TAG_unspecified_type, 0x3b) ++DW_TAG (DW_TAG_partial_unit, 0x3c) ++DW_TAG (DW_TAG_imported_unit, 0x3d) ++DW_TAG (DW_TAG_condition, 0x3f) ++DW_TAG (DW_TAG_shared_type, 0x40) ++/* DWARF 4. */ ++DW_TAG (DW_TAG_type_unit, 0x41) ++DW_TAG (DW_TAG_rvalue_reference_type, 0x42) ++DW_TAG (DW_TAG_template_alias, 0x43) ++/* DWARF 5. */ ++DW_TAG (DW_TAG_coarray_type, 0x44) ++DW_TAG (DW_TAG_generic_subrange, 0x45) ++DW_TAG (DW_TAG_dynamic_type, 0x46) ++DW_TAG (DW_TAG_atomic_type, 0x47) ++DW_TAG (DW_TAG_call_site, 0x48) ++DW_TAG (DW_TAG_call_site_parameter, 0x49) ++DW_TAG (DW_TAG_skeleton_unit, 0x4a) ++DW_TAG (DW_TAG_immutable_type, 0x4b) ++ ++DW_TAG_DUP (DW_TAG_lo_user, 0x4080) ++DW_TAG_DUP (DW_TAG_hi_user, 0xffff) ++ ++/* SGI/MIPS Extensions. */ ++DW_TAG (DW_TAG_MIPS_loop, 0x4081) ++ ++/* HP extensions. See: ftp://ftp.hp.com/pub/lang/tools/WDB/wdb-4.0.tar.gz . */ ++DW_TAG (DW_TAG_HP_array_descriptor, 0x4090) ++DW_TAG (DW_TAG_HP_Bliss_field, 0x4091) ++DW_TAG (DW_TAG_HP_Bliss_field_set, 0x4092) ++ ++/* GNU extensions. */ ++DW_TAG (DW_TAG_format_label, 0x4101) /* For FORTRAN 77 and Fortran 90. */ ++DW_TAG (DW_TAG_function_template, 0x4102) /* For C++. */ ++DW_TAG (DW_TAG_class_template, 0x4103) /* For C++. */ ++DW_TAG (DW_TAG_GNU_BINCL, 0x4104) ++DW_TAG (DW_TAG_GNU_EINCL, 0x4105) ++/* Template template parameter. ++ See http://gcc.gnu.org/wiki/TemplateParmsDwarf . */ ++DW_TAG (DW_TAG_GNU_template_template_param, 0x4106) ++ ++/* Template parameter pack extension, specified at ++ http://wiki.dwarfstd.org/index.php?title=C%2B%2B0x:_Variadic_templates ++ The values of these two TAGS are in the DW_TAG_GNU_* space until the tags ++ are properly part of DWARF 5. */ ++DW_TAG (DW_TAG_GNU_template_parameter_pack, 0x4107) ++DW_TAG (DW_TAG_GNU_formal_parameter_pack, 0x4108) ++/* The GNU call site extension, specified at ++ http://www.dwarfstd.org/ShowIssue.php?issue=100909.2&type=open . ++ The values of these two TAGS are in the DW_TAG_GNU_* space until the tags ++ are properly part of DWARF 5. */ ++DW_TAG (DW_TAG_GNU_call_site, 0x4109) ++DW_TAG (DW_TAG_GNU_call_site_parameter, 0x410a) ++/* Extensions for UPC. See: http://dwarfstd.org/doc/DWARF4.pdf. */ ++DW_TAG (DW_TAG_upc_shared_type, 0x8765) ++DW_TAG (DW_TAG_upc_strict_type, 0x8766) ++DW_TAG (DW_TAG_upc_relaxed_type, 0x8767) ++/* PGI (STMicroelectronics) extensions. No documentation available. */ ++DW_TAG (DW_TAG_PGI_kanji_type, 0xA000) ++DW_TAG (DW_TAG_PGI_interface_block, 0xA020) ++DW_END_TAG ++ ++DW_FIRST_FORM (DW_FORM_addr, 0x01) ++DW_FORM (DW_FORM_block2, 0x03) ++DW_FORM (DW_FORM_block4, 0x04) ++DW_FORM (DW_FORM_data2, 0x05) ++DW_FORM (DW_FORM_data4, 0x06) ++DW_FORM (DW_FORM_data8, 0x07) ++DW_FORM (DW_FORM_string, 0x08) ++DW_FORM (DW_FORM_block, 0x09) ++DW_FORM (DW_FORM_block1, 0x0a) ++DW_FORM (DW_FORM_data1, 0x0b) ++DW_FORM (DW_FORM_flag, 0x0c) ++DW_FORM (DW_FORM_sdata, 0x0d) ++DW_FORM (DW_FORM_strp, 0x0e) ++DW_FORM (DW_FORM_udata, 0x0f) ++DW_FORM (DW_FORM_ref_addr, 0x10) ++DW_FORM (DW_FORM_ref1, 0x11) ++DW_FORM (DW_FORM_ref2, 0x12) ++DW_FORM (DW_FORM_ref4, 0x13) ++DW_FORM (DW_FORM_ref8, 0x14) ++DW_FORM (DW_FORM_ref_udata, 0x15) ++DW_FORM (DW_FORM_indirect, 0x16) ++/* DWARF 4. */ ++DW_FORM (DW_FORM_sec_offset, 0x17) ++DW_FORM (DW_FORM_exprloc, 0x18) ++DW_FORM (DW_FORM_flag_present, 0x19) ++DW_FORM (DW_FORM_ref_sig8, 0x20) ++/* DWARF 5. */ ++DW_FORM (DW_FORM_strx, 0x1a) ++DW_FORM (DW_FORM_addrx, 0x1b) ++DW_FORM (DW_FORM_ref_sup, 0x1c) ++DW_FORM (DW_FORM_strp_sup, 0x1d) ++DW_FORM (DW_FORM_data16, 0x1e) ++DW_FORM (DW_FORM_line_strp, 0x1f) ++DW_FORM (DW_FORM_implicit_const, 0x21) ++DW_FORM (DW_FORM_loclistx, 0x22) ++DW_FORM (DW_FORM_rnglistx, 0x23) ++/* Extensions for Fission. See http://gcc.gnu.org/wiki/DebugFission. */ ++DW_FORM (DW_FORM_GNU_addr_index, 0x1f01) ++DW_FORM (DW_FORM_GNU_str_index, 0x1f02) ++/* Extensions for DWZ multifile. ++ See http://www.dwarfstd.org/ShowIssue.php?issue=120604.1&type=open . */ ++DW_FORM (DW_FORM_GNU_ref_alt, 0x1f20) ++DW_FORM (DW_FORM_GNU_strp_alt, 0x1f21) ++DW_END_FORM ++ ++DW_FIRST_AT (DW_AT_sibling, 0x01) ++DW_AT (DW_AT_location, 0x02) ++DW_AT (DW_AT_name, 0x03) ++DW_AT (DW_AT_ordering, 0x09) ++DW_AT (DW_AT_subscr_data, 0x0a) ++DW_AT (DW_AT_byte_size, 0x0b) ++DW_AT (DW_AT_bit_offset, 0x0c) ++DW_AT (DW_AT_bit_size, 0x0d) ++DW_AT (DW_AT_element_list, 0x0f) ++DW_AT (DW_AT_stmt_list, 0x10) ++DW_AT (DW_AT_low_pc, 0x11) ++DW_AT (DW_AT_high_pc, 0x12) ++DW_AT (DW_AT_language, 0x13) ++DW_AT (DW_AT_member, 0x14) ++DW_AT (DW_AT_discr, 0x15) ++DW_AT (DW_AT_discr_value, 0x16) ++DW_AT (DW_AT_visibility, 0x17) ++DW_AT (DW_AT_import, 0x18) ++DW_AT (DW_AT_string_length, 0x19) ++DW_AT (DW_AT_common_reference, 0x1a) ++DW_AT (DW_AT_comp_dir, 0x1b) ++DW_AT (DW_AT_const_value, 0x1c) ++DW_AT (DW_AT_containing_type, 0x1d) ++DW_AT (DW_AT_default_value, 0x1e) ++DW_AT (DW_AT_inline, 0x20) ++DW_AT (DW_AT_is_optional, 0x21) ++DW_AT (DW_AT_lower_bound, 0x22) ++DW_AT (DW_AT_producer, 0x25) ++DW_AT (DW_AT_prototyped, 0x27) ++DW_AT (DW_AT_return_addr, 0x2a) ++DW_AT (DW_AT_start_scope, 0x2c) ++DW_AT (DW_AT_bit_stride, 0x2e) ++DW_AT (DW_AT_upper_bound, 0x2f) ++DW_AT (DW_AT_abstract_origin, 0x31) ++DW_AT (DW_AT_accessibility, 0x32) ++DW_AT (DW_AT_address_class, 0x33) ++DW_AT (DW_AT_artificial, 0x34) ++DW_AT (DW_AT_base_types, 0x35) ++DW_AT (DW_AT_calling_convention, 0x36) ++DW_AT (DW_AT_count, 0x37) ++DW_AT (DW_AT_data_member_location, 0x38) ++DW_AT (DW_AT_decl_column, 0x39) ++DW_AT (DW_AT_decl_file, 0x3a) ++DW_AT (DW_AT_decl_line, 0x3b) ++DW_AT (DW_AT_declaration, 0x3c) ++DW_AT (DW_AT_discr_list, 0x3d) ++DW_AT (DW_AT_encoding, 0x3e) ++DW_AT (DW_AT_external, 0x3f) ++DW_AT (DW_AT_frame_base, 0x40) ++DW_AT (DW_AT_friend, 0x41) ++DW_AT (DW_AT_identifier_case, 0x42) ++DW_AT (DW_AT_macro_info, 0x43) ++DW_AT (DW_AT_namelist_items, 0x44) ++DW_AT (DW_AT_priority, 0x45) ++DW_AT (DW_AT_segment, 0x46) ++DW_AT (DW_AT_specification, 0x47) ++DW_AT (DW_AT_static_link, 0x48) ++DW_AT (DW_AT_type, 0x49) ++DW_AT (DW_AT_use_location, 0x4a) ++DW_AT (DW_AT_variable_parameter, 0x4b) ++DW_AT (DW_AT_virtuality, 0x4c) ++DW_AT (DW_AT_vtable_elem_location, 0x4d) ++/* DWARF 3 values. */ ++DW_AT (DW_AT_allocated, 0x4e) ++DW_AT (DW_AT_associated, 0x4f) ++DW_AT (DW_AT_data_location, 0x50) ++DW_AT (DW_AT_byte_stride, 0x51) ++DW_AT (DW_AT_entry_pc, 0x52) ++DW_AT (DW_AT_use_UTF8, 0x53) ++DW_AT (DW_AT_extension, 0x54) ++DW_AT (DW_AT_ranges, 0x55) ++DW_AT (DW_AT_trampoline, 0x56) ++DW_AT (DW_AT_call_column, 0x57) ++DW_AT (DW_AT_call_file, 0x58) ++DW_AT (DW_AT_call_line, 0x59) ++DW_AT (DW_AT_description, 0x5a) ++DW_AT (DW_AT_binary_scale, 0x5b) ++DW_AT (DW_AT_decimal_scale, 0x5c) ++DW_AT (DW_AT_small, 0x5d) ++DW_AT (DW_AT_decimal_sign, 0x5e) ++DW_AT (DW_AT_digit_count, 0x5f) ++DW_AT (DW_AT_picture_string, 0x60) ++DW_AT (DW_AT_mutable, 0x61) ++DW_AT (DW_AT_threads_scaled, 0x62) ++DW_AT (DW_AT_explicit, 0x63) ++DW_AT (DW_AT_object_pointer, 0x64) ++DW_AT (DW_AT_endianity, 0x65) ++DW_AT (DW_AT_elemental, 0x66) ++DW_AT (DW_AT_pure, 0x67) ++DW_AT (DW_AT_recursive, 0x68) ++/* DWARF 4. */ ++DW_AT (DW_AT_signature, 0x69) ++DW_AT (DW_AT_main_subprogram, 0x6a) ++DW_AT (DW_AT_data_bit_offset, 0x6b) ++DW_AT (DW_AT_const_expr, 0x6c) ++DW_AT (DW_AT_enum_class, 0x6d) ++DW_AT (DW_AT_linkage_name, 0x6e) ++/* DWARF 5. */ ++DW_AT (DW_AT_string_length_bit_size, 0x6f) ++DW_AT (DW_AT_string_length_byte_size, 0x70) ++DW_AT (DW_AT_rank, 0x71) ++DW_AT (DW_AT_str_offsets_base, 0x72) ++DW_AT (DW_AT_addr_base, 0x73) ++DW_AT (DW_AT_rnglists_base, 0x74) ++DW_AT (DW_AT_dwo_name, 0x76) ++DW_AT (DW_AT_reference, 0x77) ++DW_AT (DW_AT_rvalue_reference, 0x78) ++DW_AT (DW_AT_macros, 0x79) ++DW_AT (DW_AT_call_all_calls, 0x7a) ++DW_AT (DW_AT_call_all_source_calls, 0x7b) ++DW_AT (DW_AT_call_all_tail_calls, 0x7c) ++DW_AT (DW_AT_call_return_pc, 0x7d) ++DW_AT (DW_AT_call_value, 0x7e) ++DW_AT (DW_AT_call_origin, 0x7f) ++DW_AT (DW_AT_call_parameter, 0x80) ++DW_AT (DW_AT_call_pc, 0x81) ++DW_AT (DW_AT_call_tail_call, 0x82) ++DW_AT (DW_AT_call_target, 0x83) ++DW_AT (DW_AT_call_target_clobbered, 0x84) ++DW_AT (DW_AT_call_data_location, 0x85) ++DW_AT (DW_AT_call_data_value, 0x86) ++DW_AT (DW_AT_noreturn, 0x87) ++DW_AT (DW_AT_alignment, 0x88) ++DW_AT (DW_AT_export_symbols, 0x89) ++DW_AT (DW_AT_deleted, 0x8a) ++DW_AT (DW_AT_defaulted, 0x8b) ++DW_AT (DW_AT_loclists_base, 0x8c) ++ ++DW_AT_DUP (DW_AT_lo_user, 0x2000) /* Implementation-defined range start. */ ++DW_AT_DUP (DW_AT_hi_user, 0x3fff) /* Implementation-defined range end. */ ++ ++/* SGI/MIPS extensions. */ ++DW_AT (DW_AT_MIPS_fde, 0x2001) ++DW_AT (DW_AT_MIPS_loop_begin, 0x2002) ++DW_AT (DW_AT_MIPS_tail_loop_begin, 0x2003) ++DW_AT (DW_AT_MIPS_epilog_begin, 0x2004) ++DW_AT (DW_AT_MIPS_loop_unroll_factor, 0x2005) ++DW_AT (DW_AT_MIPS_software_pipeline_depth, 0x2006) ++DW_AT (DW_AT_MIPS_linkage_name, 0x2007) ++DW_AT (DW_AT_MIPS_stride, 0x2008) ++DW_AT (DW_AT_MIPS_abstract_name, 0x2009) ++DW_AT (DW_AT_MIPS_clone_origin, 0x200a) ++DW_AT (DW_AT_MIPS_has_inlines, 0x200b) ++/* HP extensions. */ ++DW_AT (DW_AT_HP_block_index, 0x2000) ++DW_AT_DUP (DW_AT_HP_unmodifiable, 0x2001) /* Same as DW_AT_MIPS_fde. */ ++DW_AT_DUP (DW_AT_HP_prologue, 0x2005) /* Same as DW_AT_MIPS_loop_unroll. */ ++DW_AT_DUP (DW_AT_HP_epilogue, 0x2008) /* Same as DW_AT_MIPS_stride. */ ++DW_AT (DW_AT_HP_actuals_stmt_list, 0x2010) ++DW_AT (DW_AT_HP_proc_per_section, 0x2011) ++DW_AT (DW_AT_HP_raw_data_ptr, 0x2012) ++DW_AT (DW_AT_HP_pass_by_reference, 0x2013) ++DW_AT (DW_AT_HP_opt_level, 0x2014) ++DW_AT (DW_AT_HP_prof_version_id, 0x2015) ++DW_AT (DW_AT_HP_opt_flags, 0x2016) ++DW_AT (DW_AT_HP_cold_region_low_pc, 0x2017) ++DW_AT (DW_AT_HP_cold_region_high_pc, 0x2018) ++DW_AT (DW_AT_HP_all_variables_modifiable, 0x2019) ++DW_AT (DW_AT_HP_linkage_name, 0x201a) ++DW_AT (DW_AT_HP_prof_flags, 0x201b) /* In comp unit of procs_info for -g. */ ++DW_AT (DW_AT_HP_unit_name, 0x201f) ++DW_AT (DW_AT_HP_unit_size, 0x2020) ++DW_AT (DW_AT_HP_widened_byte_size, 0x2021) ++DW_AT (DW_AT_HP_definition_points, 0x2022) ++DW_AT (DW_AT_HP_default_location, 0x2023) ++DW_AT (DW_AT_HP_is_result_param, 0x2029) ++ ++/* GNU extensions. */ ++DW_AT (DW_AT_sf_names, 0x2101) ++DW_AT (DW_AT_src_info, 0x2102) ++DW_AT (DW_AT_mac_info, 0x2103) ++DW_AT (DW_AT_src_coords, 0x2104) ++DW_AT (DW_AT_body_begin, 0x2105) ++DW_AT (DW_AT_body_end, 0x2106) ++DW_AT (DW_AT_GNU_vector, 0x2107) ++/* Thread-safety annotations. ++ See http://gcc.gnu.org/wiki/ThreadSafetyAnnotation . */ ++DW_AT (DW_AT_GNU_guarded_by, 0x2108) ++DW_AT (DW_AT_GNU_pt_guarded_by, 0x2109) ++DW_AT (DW_AT_GNU_guarded, 0x210a) ++DW_AT (DW_AT_GNU_pt_guarded, 0x210b) ++DW_AT (DW_AT_GNU_locks_excluded, 0x210c) ++DW_AT (DW_AT_GNU_exclusive_locks_required, 0x210d) ++DW_AT (DW_AT_GNU_shared_locks_required, 0x210e) ++/* One-definition rule violation detection. ++ See http://gcc.gnu.org/wiki/DwarfSeparateTypeInfo . */ ++DW_AT (DW_AT_GNU_odr_signature, 0x210f) ++/* Template template argument name. ++ See http://gcc.gnu.org/wiki/TemplateParmsDwarf . */ ++DW_AT (DW_AT_GNU_template_name, 0x2110) ++/* The GNU call site extension. ++ See http://www.dwarfstd.org/ShowIssue.php?issue=100909.2&type=open . */ ++DW_AT (DW_AT_GNU_call_site_value, 0x2111) ++DW_AT (DW_AT_GNU_call_site_data_value, 0x2112) ++DW_AT (DW_AT_GNU_call_site_target, 0x2113) ++DW_AT (DW_AT_GNU_call_site_target_clobbered, 0x2114) ++DW_AT (DW_AT_GNU_tail_call, 0x2115) ++DW_AT (DW_AT_GNU_all_tail_call_sites, 0x2116) ++DW_AT (DW_AT_GNU_all_call_sites, 0x2117) ++DW_AT (DW_AT_GNU_all_source_call_sites, 0x2118) ++/* Section offset into .debug_macro section. */ ++DW_AT (DW_AT_GNU_macros, 0x2119) ++/* Attribute for C++ deleted special member functions (= delete;). */ ++DW_AT (DW_AT_GNU_deleted, 0x211a) ++/* Extensions for Fission. See http://gcc.gnu.org/wiki/DebugFission. */ ++DW_AT (DW_AT_GNU_dwo_name, 0x2130) ++DW_AT (DW_AT_GNU_dwo_id, 0x2131) ++DW_AT (DW_AT_GNU_ranges_base, 0x2132) ++DW_AT (DW_AT_GNU_addr_base, 0x2133) ++DW_AT (DW_AT_GNU_pubnames, 0x2134) ++DW_AT (DW_AT_GNU_pubtypes, 0x2135) ++/* Attribute for discriminator. ++ See http://gcc.gnu.org/wiki/Discriminator */ ++DW_AT (DW_AT_GNU_discriminator, 0x2136) ++/* VMS extensions. */ ++DW_AT (DW_AT_VMS_rtnbeg_pd_address, 0x2201) ++/* GNAT extensions. */ ++/* GNAT descriptive type. ++ See http://gcc.gnu.org/wiki/DW_AT_GNAT_descriptive_type . */ ++DW_AT (DW_AT_use_GNAT_descriptive_type, 0x2301) ++DW_AT (DW_AT_GNAT_descriptive_type, 0x2302) ++/* Rational constant extension. ++ See https://gcc.gnu.org/wiki/DW_AT_GNU_numerator_denominator . */ ++DW_TAG (DW_AT_GNU_numerator, 0x2303) ++DW_TAG (DW_AT_GNU_denominator, 0x2304) ++/* Biased integer extension. ++ See https://gcc.gnu.org/wiki/DW_AT_GNU_bias . */ ++DW_TAG (DW_AT_GNU_bias, 0x2305) ++/* UPC extension. */ ++DW_AT (DW_AT_upc_threads_scaled, 0x3210) ++/* PGI (STMicroelectronics) extensions. */ ++DW_AT (DW_AT_PGI_lbase, 0x3a00) ++DW_AT (DW_AT_PGI_soffset, 0x3a01) ++DW_AT (DW_AT_PGI_lstride, 0x3a02) ++/* Apple extensions. */ ++DW_AT (DW_AT_APPLE_optimized, 0x3fe1) ++DW_AT (DW_AT_APPLE_flags, 0x3fe2) ++DW_AT (DW_AT_APPLE_isa, 0x3fe3) ++DW_AT (DW_AT_APPLE_block, 0x3fe4) ++DW_AT (DW_AT_APPLE_major_runtime_vers, 0x3fe5) ++DW_AT (DW_AT_APPLE_runtime_class, 0x3fe6) ++DW_AT (DW_AT_APPLE_omit_frame_ptr, 0x3fe7) ++DW_AT (DW_AT_APPLE_property_name, 0x3fe8) ++DW_AT (DW_AT_APPLE_property_getter, 0x3fe9) ++DW_AT (DW_AT_APPLE_property_setter, 0x3fea) ++DW_AT (DW_AT_APPLE_property_attribute, 0x3feb) ++DW_AT (DW_AT_APPLE_objc_complete_type, 0x3fec) ++DW_AT (DW_AT_APPLE_property, 0x3fed) ++DW_END_AT ++ ++DW_FIRST_OP (DW_OP_addr, 0x03) ++DW_OP (DW_OP_deref, 0x06) ++DW_OP (DW_OP_const1u, 0x08) ++DW_OP (DW_OP_const1s, 0x09) ++DW_OP (DW_OP_const2u, 0x0a) ++DW_OP (DW_OP_const2s, 0x0b) ++DW_OP (DW_OP_const4u, 0x0c) ++DW_OP (DW_OP_const4s, 0x0d) ++DW_OP (DW_OP_const8u, 0x0e) ++DW_OP (DW_OP_const8s, 0x0f) ++DW_OP (DW_OP_constu, 0x10) ++DW_OP (DW_OP_consts, 0x11) ++DW_OP (DW_OP_dup, 0x12) ++DW_OP (DW_OP_drop, 0x13) ++DW_OP (DW_OP_over, 0x14) ++DW_OP (DW_OP_pick, 0x15) ++DW_OP (DW_OP_swap, 0x16) ++DW_OP (DW_OP_rot, 0x17) ++DW_OP (DW_OP_xderef, 0x18) ++DW_OP (DW_OP_abs, 0x19) ++DW_OP (DW_OP_and, 0x1a) ++DW_OP (DW_OP_div, 0x1b) ++DW_OP (DW_OP_minus, 0x1c) ++DW_OP (DW_OP_mod, 0x1d) ++DW_OP (DW_OP_mul, 0x1e) ++DW_OP (DW_OP_neg, 0x1f) ++DW_OP (DW_OP_not, 0x20) ++DW_OP (DW_OP_or, 0x21) ++DW_OP (DW_OP_plus, 0x22) ++DW_OP (DW_OP_plus_uconst, 0x23) ++DW_OP (DW_OP_shl, 0x24) ++DW_OP (DW_OP_shr, 0x25) ++DW_OP (DW_OP_shra, 0x26) ++DW_OP (DW_OP_xor, 0x27) ++DW_OP (DW_OP_bra, 0x28) ++DW_OP (DW_OP_eq, 0x29) ++DW_OP (DW_OP_ge, 0x2a) ++DW_OP (DW_OP_gt, 0x2b) ++DW_OP (DW_OP_le, 0x2c) ++DW_OP (DW_OP_lt, 0x2d) ++DW_OP (DW_OP_ne, 0x2e) ++DW_OP (DW_OP_skip, 0x2f) ++DW_OP (DW_OP_lit0, 0x30) ++DW_OP (DW_OP_lit1, 0x31) ++DW_OP (DW_OP_lit2, 0x32) ++DW_OP (DW_OP_lit3, 0x33) ++DW_OP (DW_OP_lit4, 0x34) ++DW_OP (DW_OP_lit5, 0x35) ++DW_OP (DW_OP_lit6, 0x36) ++DW_OP (DW_OP_lit7, 0x37) ++DW_OP (DW_OP_lit8, 0x38) ++DW_OP (DW_OP_lit9, 0x39) ++DW_OP (DW_OP_lit10, 0x3a) ++DW_OP (DW_OP_lit11, 0x3b) ++DW_OP (DW_OP_lit12, 0x3c) ++DW_OP (DW_OP_lit13, 0x3d) ++DW_OP (DW_OP_lit14, 0x3e) ++DW_OP (DW_OP_lit15, 0x3f) ++DW_OP (DW_OP_lit16, 0x40) ++DW_OP (DW_OP_lit17, 0x41) ++DW_OP (DW_OP_lit18, 0x42) ++DW_OP (DW_OP_lit19, 0x43) ++DW_OP (DW_OP_lit20, 0x44) ++DW_OP (DW_OP_lit21, 0x45) ++DW_OP (DW_OP_lit22, 0x46) ++DW_OP (DW_OP_lit23, 0x47) ++DW_OP (DW_OP_lit24, 0x48) ++DW_OP (DW_OP_lit25, 0x49) ++DW_OP (DW_OP_lit26, 0x4a) ++DW_OP (DW_OP_lit27, 0x4b) ++DW_OP (DW_OP_lit28, 0x4c) ++DW_OP (DW_OP_lit29, 0x4d) ++DW_OP (DW_OP_lit30, 0x4e) ++DW_OP (DW_OP_lit31, 0x4f) ++DW_OP (DW_OP_reg0, 0x50) ++DW_OP (DW_OP_reg1, 0x51) ++DW_OP (DW_OP_reg2, 0x52) ++DW_OP (DW_OP_reg3, 0x53) ++DW_OP (DW_OP_reg4, 0x54) ++DW_OP (DW_OP_reg5, 0x55) ++DW_OP (DW_OP_reg6, 0x56) ++DW_OP (DW_OP_reg7, 0x57) ++DW_OP (DW_OP_reg8, 0x58) ++DW_OP (DW_OP_reg9, 0x59) ++DW_OP (DW_OP_reg10, 0x5a) ++DW_OP (DW_OP_reg11, 0x5b) ++DW_OP (DW_OP_reg12, 0x5c) ++DW_OP (DW_OP_reg13, 0x5d) ++DW_OP (DW_OP_reg14, 0x5e) ++DW_OP (DW_OP_reg15, 0x5f) ++DW_OP (DW_OP_reg16, 0x60) ++DW_OP (DW_OP_reg17, 0x61) ++DW_OP (DW_OP_reg18, 0x62) ++DW_OP (DW_OP_reg19, 0x63) ++DW_OP (DW_OP_reg20, 0x64) ++DW_OP (DW_OP_reg21, 0x65) ++DW_OP (DW_OP_reg22, 0x66) ++DW_OP (DW_OP_reg23, 0x67) ++DW_OP (DW_OP_reg24, 0x68) ++DW_OP (DW_OP_reg25, 0x69) ++DW_OP (DW_OP_reg26, 0x6a) ++DW_OP (DW_OP_reg27, 0x6b) ++DW_OP (DW_OP_reg28, 0x6c) ++DW_OP (DW_OP_reg29, 0x6d) ++DW_OP (DW_OP_reg30, 0x6e) ++DW_OP (DW_OP_reg31, 0x6f) ++DW_OP (DW_OP_breg0, 0x70) ++DW_OP (DW_OP_breg1, 0x71) ++DW_OP (DW_OP_breg2, 0x72) ++DW_OP (DW_OP_breg3, 0x73) ++DW_OP (DW_OP_breg4, 0x74) ++DW_OP (DW_OP_breg5, 0x75) ++DW_OP (DW_OP_breg6, 0x76) ++DW_OP (DW_OP_breg7, 0x77) ++DW_OP (DW_OP_breg8, 0x78) ++DW_OP (DW_OP_breg9, 0x79) ++DW_OP (DW_OP_breg10, 0x7a) ++DW_OP (DW_OP_breg11, 0x7b) ++DW_OP (DW_OP_breg12, 0x7c) ++DW_OP (DW_OP_breg13, 0x7d) ++DW_OP (DW_OP_breg14, 0x7e) ++DW_OP (DW_OP_breg15, 0x7f) ++DW_OP (DW_OP_breg16, 0x80) ++DW_OP (DW_OP_breg17, 0x81) ++DW_OP (DW_OP_breg18, 0x82) ++DW_OP (DW_OP_breg19, 0x83) ++DW_OP (DW_OP_breg20, 0x84) ++DW_OP (DW_OP_breg21, 0x85) ++DW_OP (DW_OP_breg22, 0x86) ++DW_OP (DW_OP_breg23, 0x87) ++DW_OP (DW_OP_breg24, 0x88) ++DW_OP (DW_OP_breg25, 0x89) ++DW_OP (DW_OP_breg26, 0x8a) ++DW_OP (DW_OP_breg27, 0x8b) ++DW_OP (DW_OP_breg28, 0x8c) ++DW_OP (DW_OP_breg29, 0x8d) ++DW_OP (DW_OP_breg30, 0x8e) ++DW_OP (DW_OP_breg31, 0x8f) ++DW_OP (DW_OP_regx, 0x90) ++DW_OP (DW_OP_fbreg, 0x91) ++DW_OP (DW_OP_bregx, 0x92) ++DW_OP (DW_OP_piece, 0x93) ++DW_OP (DW_OP_deref_size, 0x94) ++DW_OP (DW_OP_xderef_size, 0x95) ++DW_OP (DW_OP_nop, 0x96) ++/* DWARF 3 extensions. */ ++DW_OP (DW_OP_push_object_address, 0x97) ++DW_OP (DW_OP_call2, 0x98) ++DW_OP (DW_OP_call4, 0x99) ++DW_OP (DW_OP_call_ref, 0x9a) ++DW_OP (DW_OP_form_tls_address, 0x9b) ++DW_OP (DW_OP_call_frame_cfa, 0x9c) ++DW_OP (DW_OP_bit_piece, 0x9d) ++ ++/* DWARF 4 extensions. */ ++DW_OP (DW_OP_implicit_value, 0x9e) ++DW_OP (DW_OP_stack_value, 0x9f) ++ ++/* DWARF 5 extensions. */ ++DW_OP (DW_OP_implicit_pointer, 0xa0) ++DW_OP (DW_OP_addrx, 0xa1) ++DW_OP (DW_OP_constx, 0xa2) ++DW_OP (DW_OP_entry_value, 0xa3) ++DW_OP (DW_OP_const_type, 0xa4) ++DW_OP (DW_OP_regval_type, 0xa5) ++DW_OP (DW_OP_deref_type, 0xa6) ++DW_OP (DW_OP_xderef_type, 0xa7) ++DW_OP (DW_OP_convert, 0xa8) ++DW_OP (DW_OP_reinterpret, 0xa9) ++ ++DW_OP_DUP (DW_OP_lo_user, 0xe0) /* Implementation-defined range start. */ ++DW_OP_DUP (DW_OP_hi_user, 0xff) /* Implementation-defined range end. */ ++ ++/* GNU extensions. */ ++DW_OP (DW_OP_GNU_push_tls_address, 0xe0) ++/* The following is for marking variables that are uninitialized. */ ++DW_OP (DW_OP_GNU_uninit, 0xf0) ++DW_OP (DW_OP_GNU_encoded_addr, 0xf1) ++/* The GNU implicit pointer extension. ++ See http://www.dwarfstd.org/ShowIssue.php?issue=100831.1&type=open . */ ++DW_OP (DW_OP_GNU_implicit_pointer, 0xf2) ++/* The GNU entry value extension. ++ See http://www.dwarfstd.org/ShowIssue.php?issue=100909.1&type=open . */ ++DW_OP (DW_OP_GNU_entry_value, 0xf3) ++/* The GNU typed stack extension. ++ See http://www.dwarfstd.org/doc/040408.1.html . */ ++DW_OP (DW_OP_GNU_const_type, 0xf4) ++DW_OP (DW_OP_GNU_regval_type, 0xf5) ++DW_OP (DW_OP_GNU_deref_type, 0xf6) ++DW_OP (DW_OP_GNU_convert, 0xf7) ++DW_OP (DW_OP_GNU_reinterpret, 0xf9) ++/* The GNU parameter ref extension. */ ++DW_OP (DW_OP_GNU_parameter_ref, 0xfa) ++/* Extensions for Fission. See http://gcc.gnu.org/wiki/DebugFission. */ ++DW_OP (DW_OP_GNU_addr_index, 0xfb) ++DW_OP (DW_OP_GNU_const_index, 0xfc) ++/* HP extensions. */ ++DW_OP_DUP (DW_OP_HP_unknown, 0xe0) /* Ouch, the same as GNU_push_tls_address. */ ++DW_OP (DW_OP_HP_is_value, 0xe1) ++DW_OP (DW_OP_HP_fltconst4, 0xe2) ++DW_OP (DW_OP_HP_fltconst8, 0xe3) ++DW_OP (DW_OP_HP_mod_range, 0xe4) ++DW_OP (DW_OP_HP_unmod_range, 0xe5) ++DW_OP (DW_OP_HP_tls, 0xe6) ++/* PGI (STMicroelectronics) extensions. */ ++DW_OP (DW_OP_PGI_omp_thread_num, 0xf8) ++DW_END_OP ++ ++DW_FIRST_ATE (DW_ATE_void, 0x0) ++DW_ATE (DW_ATE_address, 0x1) ++DW_ATE (DW_ATE_boolean, 0x2) ++DW_ATE (DW_ATE_complex_float, 0x3) ++DW_ATE (DW_ATE_float, 0x4) ++DW_ATE (DW_ATE_signed, 0x5) ++DW_ATE (DW_ATE_signed_char, 0x6) ++DW_ATE (DW_ATE_unsigned, 0x7) ++DW_ATE (DW_ATE_unsigned_char, 0x8) ++/* DWARF 3. */ ++DW_ATE (DW_ATE_imaginary_float, 0x9) ++DW_ATE (DW_ATE_packed_decimal, 0xa) ++DW_ATE (DW_ATE_numeric_string, 0xb) ++DW_ATE (DW_ATE_edited, 0xc) ++DW_ATE (DW_ATE_signed_fixed, 0xd) ++DW_ATE (DW_ATE_unsigned_fixed, 0xe) ++DW_ATE (DW_ATE_decimal_float, 0xf) ++/* DWARF 4. */ ++DW_ATE (DW_ATE_UTF, 0x10) ++/* DWARF 5. */ ++DW_ATE (DW_ATE_UCS, 0x11) ++DW_ATE (DW_ATE_ASCII, 0x12) ++ ++DW_ATE_DUP (DW_ATE_lo_user, 0x80) ++DW_ATE_DUP (DW_ATE_hi_user, 0xff) ++ ++/* HP extensions. */ ++DW_ATE (DW_ATE_HP_float80, 0x80) /* Floating-point (80 bit). */ ++DW_ATE (DW_ATE_HP_complex_float80, 0x81) /* Complex floating-point (80 bit). */ ++DW_ATE (DW_ATE_HP_float128, 0x82) /* Floating-point (128 bit). */ ++DW_ATE (DW_ATE_HP_complex_float128, 0x83) /* Complex fp (128 bit). */ ++DW_ATE (DW_ATE_HP_floathpintel, 0x84) /* Floating-point (82 bit IA64). */ ++DW_ATE (DW_ATE_HP_imaginary_float80, 0x85) ++DW_ATE (DW_ATE_HP_imaginary_float128, 0x86) ++DW_ATE (DW_ATE_HP_VAX_float, 0x88) /* F or G floating. */ ++DW_ATE (DW_ATE_HP_VAX_float_d, 0x89) /* D floating. */ ++DW_ATE (DW_ATE_HP_packed_decimal, 0x8a) /* Cobol. */ ++DW_ATE (DW_ATE_HP_zoned_decimal, 0x8b) /* Cobol. */ ++DW_ATE (DW_ATE_HP_edited, 0x8c) /* Cobol. */ ++DW_ATE (DW_ATE_HP_signed_fixed, 0x8d) /* Cobol. */ ++DW_ATE (DW_ATE_HP_unsigned_fixed, 0x8e) /* Cobol. */ ++DW_ATE (DW_ATE_HP_VAX_complex_float, 0x8f) /* F or G floating complex. */ ++DW_ATE (DW_ATE_HP_VAX_complex_float_d, 0x90) /* D floating complex. */ ++ ++DW_END_ATE ++ ++DW_FIRST_CFA (DW_CFA_advance_loc, 0x40) ++DW_CFA (DW_CFA_offset, 0x80) ++DW_CFA (DW_CFA_restore, 0xc0) ++DW_CFA (DW_CFA_nop, 0x00) ++DW_CFA (DW_CFA_set_loc, 0x01) ++DW_CFA (DW_CFA_advance_loc1, 0x02) ++DW_CFA (DW_CFA_advance_loc2, 0x03) ++DW_CFA (DW_CFA_advance_loc4, 0x04) ++DW_CFA (DW_CFA_offset_extended, 0x05) ++DW_CFA (DW_CFA_restore_extended, 0x06) ++DW_CFA (DW_CFA_undefined, 0x07) ++DW_CFA (DW_CFA_same_value, 0x08) ++DW_CFA (DW_CFA_register, 0x09) ++DW_CFA (DW_CFA_remember_state, 0x0a) ++DW_CFA (DW_CFA_restore_state, 0x0b) ++DW_CFA (DW_CFA_def_cfa, 0x0c) ++DW_CFA (DW_CFA_def_cfa_register, 0x0d) ++DW_CFA (DW_CFA_def_cfa_offset, 0x0e) ++/* DWARF 3. */ ++DW_CFA (DW_CFA_def_cfa_expression, 0x0f) ++DW_CFA (DW_CFA_expression, 0x10) ++DW_CFA (DW_CFA_offset_extended_sf, 0x11) ++DW_CFA (DW_CFA_def_cfa_sf, 0x12) ++DW_CFA (DW_CFA_def_cfa_offset_sf, 0x13) ++DW_CFA (DW_CFA_val_offset, 0x14) ++DW_CFA (DW_CFA_val_offset_sf, 0x15) ++DW_CFA (DW_CFA_val_expression, 0x16) ++ ++DW_CFA (DW_CFA_lo_user, 0x1c) ++DW_CFA (DW_CFA_hi_user, 0x3f) ++ ++/* SGI/MIPS specific. */ ++DW_CFA (DW_CFA_MIPS_advance_loc8, 0x1d) ++/* GNU extensions. */ ++DW_CFA (DW_CFA_GNU_window_save, 0x2d) ++DW_CFA (DW_CFA_GNU_args_size, 0x2e) ++DW_CFA (DW_CFA_GNU_negative_offset_extended, 0x2f) ++ ++DW_END_CFA diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/dwarf2.h index 000000000,000000000..ca8ff3b6d new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/dwarf2.h @@@ -1,0 -1,0 +1,531 @@@ ++/* Declarations and definitions of codes relating to the DWARF2 and ++ DWARF3 symbolic debugging information formats. ++ Copyright (C) 1992-2016 Free Software Foundation, Inc. ++ ++ Written by Gary Funck (gary@intrepid.com) The Ada Joint Program ++ Office (AJPO), Florida State University and Silicon Graphics Inc. ++ provided support for this effort -- June 21, 1995. ++ ++ Derived from the DWARF 1 implementation written by Ron Guilmette ++ (rfg@netcom.com), November 1990. ++ ++ This file is part of GCC. ++ ++ GCC is free software; you can redistribute it and/or modify it under ++ the terms of the GNU General Public License as published by the Free ++ Software Foundation; either version 3, or (at your option) any later ++ version. ++ ++ GCC is distributed in the hope that it will be useful, but WITHOUT ++ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY ++ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public ++ License for more details. ++ ++ Under Section 7 of GPL version 3, you are granted additional ++ permissions described in the GCC Runtime Library Exception, version ++ 3.1, as published by the Free Software Foundation. ++ ++ You should have received a copy of the GNU General Public License and ++ a copy of the GCC Runtime Library Exception along with this program; ++ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see ++ . */ ++ ++/* This file is derived from the DWARF specification (a public document) ++ Revision 2.0.0 (July 27, 1993) developed by the UNIX International ++ Programming Languages Special Interest Group (UI/PLSIG) and distributed ++ by UNIX International. Copies of this specification are available from ++ UNIX International, 20 Waterview Boulevard, Parsippany, NJ, 07054. ++ ++ This file also now contains definitions from the DWARF 3 specification ++ published Dec 20, 2005, available from: http://dwarf.freestandards.org. */ ++ ++#ifndef _DWARF2_H ++#define _DWARF2_H ++ ++#define DW_TAG(name, value) , name = value ++#define DW_TAG_DUP(name, value) , name = value ++#define DW_FORM(name, value) , name = value ++#define DW_AT(name, value) , name = value ++#define DW_AT_DUP(name, value) , name = value ++#define DW_OP(name, value) , name = value ++#define DW_OP_DUP(name, value) , name = value ++#define DW_ATE(name, value) , name = value ++#define DW_ATE_DUP(name, value) , name = value ++#define DW_CFA(name, value) , name = value ++ ++#define DW_FIRST_TAG(name, value) enum dwarf_tag { \ ++ name = value ++#define DW_END_TAG }; ++#define DW_FIRST_FORM(name, value) enum dwarf_form { \ ++ name = value ++#define DW_END_FORM }; ++#define DW_FIRST_AT(name, value) enum dwarf_attribute { \ ++ name = value ++#define DW_END_AT }; ++#define DW_FIRST_OP(name, value) enum dwarf_location_atom { \ ++ name = value ++#define DW_END_OP }; ++#define DW_FIRST_ATE(name, value) enum dwarf_type { \ ++ name = value ++#define DW_END_ATE }; ++#define DW_FIRST_CFA(name, value) enum dwarf_call_frame_info { \ ++ name = value ++#define DW_END_CFA }; ++ ++#include "dwarf2.def" ++ ++#undef DW_FIRST_TAG ++#undef DW_END_TAG ++#undef DW_FIRST_FORM ++#undef DW_END_FORM ++#undef DW_FIRST_AT ++#undef DW_END_AT ++#undef DW_FIRST_OP ++#undef DW_END_OP ++#undef DW_FIRST_ATE ++#undef DW_END_ATE ++#undef DW_FIRST_CFA ++#undef DW_END_CFA ++ ++#undef DW_TAG ++#undef DW_TAG_DUP ++#undef DW_FORM ++#undef DW_AT ++#undef DW_AT_DUP ++#undef DW_OP ++#undef DW_OP_DUP ++#undef DW_ATE ++#undef DW_ATE_DUP ++#undef DW_CFA ++ ++/* Flag that tells whether entry has a child or not. */ ++#define DW_children_no 0 ++#define DW_children_yes 1 ++ ++#define DW_AT_stride_size DW_AT_bit_stride /* Note: The use of DW_AT_stride_size is deprecated. */ ++#define DW_AT_stride DW_AT_byte_stride /* Note: The use of DW_AT_stride is deprecated. */ ++ ++/* Decimal sign encodings. */ ++enum dwarf_decimal_sign_encoding ++ { ++ /* DWARF 3. */ ++ DW_DS_unsigned = 0x01, ++ DW_DS_leading_overpunch = 0x02, ++ DW_DS_trailing_overpunch = 0x03, ++ DW_DS_leading_separate = 0x04, ++ DW_DS_trailing_separate = 0x05 ++ }; ++ ++/* Endianity encodings. */ ++enum dwarf_endianity_encoding ++ { ++ /* DWARF 3. */ ++ DW_END_default = 0x00, ++ DW_END_big = 0x01, ++ DW_END_little = 0x02, ++ ++ DW_END_lo_user = 0x40, ++ DW_END_hi_user = 0xff ++ }; ++ ++/* Array ordering names and codes. */ ++enum dwarf_array_dim_ordering ++ { ++ DW_ORD_row_major = 0, ++ DW_ORD_col_major = 1 ++ }; ++ ++/* Access attribute. */ ++enum dwarf_access_attribute ++ { ++ DW_ACCESS_public = 1, ++ DW_ACCESS_protected = 2, ++ DW_ACCESS_private = 3 ++ }; ++ ++/* Visibility. */ ++enum dwarf_visibility_attribute ++ { ++ DW_VIS_local = 1, ++ DW_VIS_exported = 2, ++ DW_VIS_qualified = 3 ++ }; ++ ++/* Virtuality. */ ++enum dwarf_virtuality_attribute ++ { ++ DW_VIRTUALITY_none = 0, ++ DW_VIRTUALITY_virtual = 1, ++ DW_VIRTUALITY_pure_virtual = 2 ++ }; ++ ++/* Case sensitivity. */ ++enum dwarf_id_case ++ { ++ DW_ID_case_sensitive = 0, ++ DW_ID_up_case = 1, ++ DW_ID_down_case = 2, ++ DW_ID_case_insensitive = 3 ++ }; ++ ++/* Calling convention. */ ++enum dwarf_calling_convention ++ { ++ DW_CC_normal = 0x1, ++ DW_CC_program = 0x2, ++ DW_CC_nocall = 0x3, ++ ++ /* DWARF 5. */ ++ DW_CC_pass_by_reference = 0x4, ++ DW_CC_pass_by_value = 0x5, ++ ++ DW_CC_lo_user = 0x40, ++ DW_CC_hi_user = 0xff, ++ ++ DW_CC_GNU_renesas_sh = 0x40, ++ DW_CC_GNU_borland_fastcall_i386 = 0x41, ++ ++ /* This DW_CC_ value is not currently generated by any toolchain. It is ++ used internally to GDB to indicate OpenCL C functions that have been ++ compiled with the IBM XL C for OpenCL compiler and use a non-platform ++ calling convention for passing OpenCL C vector types. This value may ++ be changed freely as long as it does not conflict with any other DW_CC_ ++ value defined here. */ ++ DW_CC_GDB_IBM_OpenCL = 0xff ++ }; ++ ++/* Inline attribute. */ ++enum dwarf_inline_attribute ++ { ++ DW_INL_not_inlined = 0, ++ DW_INL_inlined = 1, ++ DW_INL_declared_not_inlined = 2, ++ DW_INL_declared_inlined = 3 ++ }; ++ ++/* Discriminant lists. */ ++enum dwarf_discrim_list ++ { ++ DW_DSC_label = 0, ++ DW_DSC_range = 1 ++ }; ++ ++/* Line number opcodes. */ ++enum dwarf_line_number_ops ++ { ++ DW_LNS_extended_op = 0, ++ DW_LNS_copy = 1, ++ DW_LNS_advance_pc = 2, ++ DW_LNS_advance_line = 3, ++ DW_LNS_set_file = 4, ++ DW_LNS_set_column = 5, ++ DW_LNS_negate_stmt = 6, ++ DW_LNS_set_basic_block = 7, ++ DW_LNS_const_add_pc = 8, ++ DW_LNS_fixed_advance_pc = 9, ++ /* DWARF 3. */ ++ DW_LNS_set_prologue_end = 10, ++ DW_LNS_set_epilogue_begin = 11, ++ DW_LNS_set_isa = 12 ++ }; ++ ++/* Line number extended opcodes. */ ++enum dwarf_line_number_x_ops ++ { ++ DW_LNE_end_sequence = 1, ++ DW_LNE_set_address = 2, ++ DW_LNE_define_file = 3, ++ DW_LNE_set_discriminator = 4, ++ /* HP extensions. */ ++ DW_LNE_HP_negate_is_UV_update = 0x11, ++ DW_LNE_HP_push_context = 0x12, ++ DW_LNE_HP_pop_context = 0x13, ++ DW_LNE_HP_set_file_line_column = 0x14, ++ DW_LNE_HP_set_routine_name = 0x15, ++ DW_LNE_HP_set_sequence = 0x16, ++ DW_LNE_HP_negate_post_semantics = 0x17, ++ DW_LNE_HP_negate_function_exit = 0x18, ++ DW_LNE_HP_negate_front_end_logical = 0x19, ++ DW_LNE_HP_define_proc = 0x20, ++ DW_LNE_HP_source_file_correlation = 0x80, ++ ++ DW_LNE_lo_user = 0x80, ++ DW_LNE_hi_user = 0xff ++ }; ++ ++/* Sub-opcodes for DW_LNE_HP_source_file_correlation. */ ++enum dwarf_line_number_hp_sfc_ops ++ { ++ DW_LNE_HP_SFC_formfeed = 1, ++ DW_LNE_HP_SFC_set_listing_line = 2, ++ DW_LNE_HP_SFC_associate = 3 ++ }; ++ ++/* Content type codes in line table directory_entry_format ++ and file_name_entry_format sequences. */ ++enum dwarf_line_number_content_type ++ { ++ DW_LNCT_path = 0x1, ++ DW_LNCT_directory_index = 0x2, ++ DW_LNCT_timestamp = 0x3, ++ DW_LNCT_size = 0x4, ++ DW_LNCT_MD5 = 0x5, ++ DW_LNCT_lo_user = 0x2000, ++ DW_LNCT_hi_user = 0x3fff ++ }; ++ ++/* Type codes for location list entries. */ ++enum dwarf_location_list_entry_type ++ { ++ DW_LLE_end_of_list = 0x00, ++ DW_LLE_base_addressx = 0x01, ++ DW_LLE_startx_endx = 0x02, ++ DW_LLE_startx_length = 0x03, ++ DW_LLE_offset_pair = 0x04, ++ DW_LLE_default_location = 0x05, ++ DW_LLE_base_address = 0x06, ++ DW_LLE_start_end = 0x07, ++ DW_LLE_start_length = 0x08, ++ ++ /* Former extension for Fission. ++ See http://gcc.gnu.org/wiki/DebugFission. */ ++ DW_LLE_GNU_end_of_list_entry = 0x00, ++ DW_LLE_GNU_base_address_selection_entry = 0x01, ++ DW_LLE_GNU_start_end_entry = 0x02, ++ DW_LLE_GNU_start_length_entry = 0x03 ++ }; ++ ++#define DW_CIE_ID 0xffffffff ++#define DW64_CIE_ID 0xffffffffffffffffULL ++#define DW_CIE_VERSION 1 ++ ++#define DW_CFA_extended 0 ++ ++#define DW_CHILDREN_no 0x00 ++#define DW_CHILDREN_yes 0x01 ++ ++#define DW_ADDR_none 0 ++ ++/* Source language names and codes. */ ++enum dwarf_source_language ++ { ++ DW_LANG_C89 = 0x0001, ++ DW_LANG_C = 0x0002, ++ DW_LANG_Ada83 = 0x0003, ++ DW_LANG_C_plus_plus = 0x0004, ++ DW_LANG_Cobol74 = 0x0005, ++ DW_LANG_Cobol85 = 0x0006, ++ DW_LANG_Fortran77 = 0x0007, ++ DW_LANG_Fortran90 = 0x0008, ++ DW_LANG_Pascal83 = 0x0009, ++ DW_LANG_Modula2 = 0x000a, ++ /* DWARF 3. */ ++ DW_LANG_Java = 0x000b, ++ DW_LANG_C99 = 0x000c, ++ DW_LANG_Ada95 = 0x000d, ++ DW_LANG_Fortran95 = 0x000e, ++ DW_LANG_PLI = 0x000f, ++ DW_LANG_ObjC = 0x0010, ++ DW_LANG_ObjC_plus_plus = 0x0011, ++ DW_LANG_UPC = 0x0012, ++ DW_LANG_D = 0x0013, ++ /* DWARF 4. */ ++ DW_LANG_Python = 0x0014, ++ /* DWARF 5. */ ++ DW_LANG_OpenCL = 0x0015, ++ DW_LANG_Go = 0x0016, ++ DW_LANG_Modula3 = 0x0017, ++ DW_LANG_Haskell = 0x0018, ++ DW_LANG_C_plus_plus_03 = 0x0019, ++ DW_LANG_C_plus_plus_11 = 0x001a, ++ DW_LANG_OCaml = 0x001b, ++ DW_LANG_Rust = 0x001c, ++ DW_LANG_C11 = 0x001d, ++ DW_LANG_Swift = 0x001e, ++ DW_LANG_Julia = 0x001f, ++ DW_LANG_Dylan = 0x0020, ++ DW_LANG_C_plus_plus_14 = 0x0021, ++ DW_LANG_Fortran03 = 0x0022, ++ DW_LANG_Fortran08 = 0x0023, ++ DW_LANG_RenderScript = 0x0024, ++ ++ DW_LANG_lo_user = 0x8000, /* Implementation-defined range start. */ ++ DW_LANG_hi_user = 0xffff, /* Implementation-defined range start. */ ++ ++ /* MIPS. */ ++ DW_LANG_Mips_Assembler = 0x8001, ++ /* UPC. */ ++ DW_LANG_Upc = 0x8765, ++ /* HP extensions. */ ++ DW_LANG_HP_Bliss = 0x8003, ++ DW_LANG_HP_Basic91 = 0x8004, ++ DW_LANG_HP_Pascal91 = 0x8005, ++ DW_LANG_HP_IMacro = 0x8006, ++ DW_LANG_HP_Assembler = 0x8007, ++ ++ /* Rust extension, but replaced in DWARF 5. */ ++ DW_LANG_Rust_old = 0x9000 ++ }; ++ ++/* Names and codes for macro information. */ ++enum dwarf_macinfo_record_type ++ { ++ DW_MACINFO_define = 1, ++ DW_MACINFO_undef = 2, ++ DW_MACINFO_start_file = 3, ++ DW_MACINFO_end_file = 4, ++ DW_MACINFO_vendor_ext = 255 ++ }; ++ ++/* DW_TAG_defaulted/DW_TAG_GNU_defaulted attributes. */ ++enum dwarf_defaulted_attribute ++ { ++ DW_DEFAULTED_no = 0x00, ++ DW_DEFAULTED_in_class = 0x01, ++ DW_DEFAULTED_out_of_class = 0x02 ++ }; ++ ++/* Names and codes for new style macro information. */ ++enum dwarf_macro_record_type ++ { ++ DW_MACRO_define = 0x01, ++ DW_MACRO_undef = 0x02, ++ DW_MACRO_start_file = 0x03, ++ DW_MACRO_end_file = 0x04, ++ DW_MACRO_define_strp = 0x05, ++ DW_MACRO_undef_strp = 0x06, ++ DW_MACRO_import = 0x07, ++ DW_MACRO_define_sup = 0x08, ++ DW_MACRO_undef_sup = 0x09, ++ DW_MACRO_import_sup = 0x0a, ++ DW_MACRO_define_strx = 0x0b, ++ DW_MACRO_undef_strx = 0x0c, ++ DW_MACRO_lo_user = 0xe0, ++ DW_MACRO_hi_user = 0xff, ++ ++ /* Compatibility macros for the GNU .debug_macro extension. */ ++ DW_MACRO_GNU_define = 0x01, ++ DW_MACRO_GNU_undef = 0x02, ++ DW_MACRO_GNU_start_file = 0x03, ++ DW_MACRO_GNU_end_file = 0x04, ++ DW_MACRO_GNU_define_indirect = 0x05, ++ DW_MACRO_GNU_undef_indirect = 0x06, ++ DW_MACRO_GNU_transparent_include = 0x07, ++ /* Extensions for DWZ multifile. ++ See http://www.dwarfstd.org/ShowIssue.php?issue=120604.1&type=open . */ ++ DW_MACRO_GNU_define_indirect_alt = 0x08, ++ DW_MACRO_GNU_undef_indirect_alt = 0x09, ++ DW_MACRO_GNU_transparent_include_alt = 0x0a, ++ DW_MACRO_GNU_lo_user = 0xe0, ++ DW_MACRO_GNU_hi_user = 0xff ++ }; ++ ++/* Index attributes in the Abbreviations Table. */ ++enum dwarf_name_index_attribute ++ { ++ DW_IDX_compile_unit = 1, ++ DW_IDX_type_unit = 2, ++ DW_IDX_die_offset = 3, ++ DW_IDX_parent = 4, ++ DW_IDX_type_hash = 5, ++ DW_IDX_lo_user = 0x2000, ++ DW_IDX_hi_user = 0x3fff ++ }; ++ ++/* Range list entry kinds in .debug_rnglists* section. */ ++enum dwarf_range_list_entry ++ { ++ DW_RLE_end_of_list = 0x00, ++ DW_RLE_base_addressx = 0x01, ++ DW_RLE_startx_endx = 0x02, ++ DW_RLE_startx_length = 0x03, ++ DW_RLE_offset_pair = 0x04, ++ DW_RLE_base_address = 0x05, ++ DW_RLE_start_end = 0x06, ++ DW_RLE_start_length = 0x07 ++ }; ++ ++/* Unit types in unit_type unit header field. */ ++enum dwarf_unit_type ++ { ++ DW_UT_compile = 0x01, ++ DW_UT_type = 0x02, ++ DW_UT_partial = 0x03, ++ DW_UT_skeleton = 0x04, ++ DW_UT_split_compile = 0x05, ++ DW_UT_split_type = 0x06, ++ DW_UT_lo_user = 0x80, ++ DW_UT_hi_user = 0xff ++ }; ++ ++/* @@@ For use with GNU frame unwind information. */ ++ ++#define DW_EH_PE_absptr 0x00 ++#define DW_EH_PE_omit 0xff ++ ++#define DW_EH_PE_uleb128 0x01 ++#define DW_EH_PE_udata2 0x02 ++#define DW_EH_PE_udata4 0x03 ++#define DW_EH_PE_udata8 0x04 ++#define DW_EH_PE_sleb128 0x09 ++#define DW_EH_PE_sdata2 0x0A ++#define DW_EH_PE_sdata4 0x0B ++#define DW_EH_PE_sdata8 0x0C ++#define DW_EH_PE_signed 0x08 ++ ++#define DW_EH_PE_pcrel 0x10 ++#define DW_EH_PE_textrel 0x20 ++#define DW_EH_PE_datarel 0x30 ++#define DW_EH_PE_funcrel 0x40 ++#define DW_EH_PE_aligned 0x50 ++ ++#define DW_EH_PE_indirect 0x80 ++ ++/* Codes for the debug sections in a dwarf package (.dwp) file. ++ Extensions for Fission. See http://gcc.gnu.org/wiki/DebugFissionDWP. */ ++enum dwarf_sect ++ { ++ DW_SECT_INFO = 1, ++ DW_SECT_TYPES = 2, ++ DW_SECT_ABBREV = 3, ++ DW_SECT_LINE = 4, ++ DW_SECT_LOC = 5, ++ DW_SECT_STR_OFFSETS = 6, ++ DW_SECT_MACINFO = 7, ++ DW_SECT_MACRO = 8, ++ DW_SECT_MAX = 8 ++ }; ++ ++#ifdef __cplusplus ++extern "C" { ++#endif /* __cplusplus */ ++ ++/* Return the name of a DW_TAG_ constant, or NULL if the value is not ++ recognized. */ ++extern const char *get_DW_TAG_name (unsigned int tag); ++ ++/* Return the name of a DW_AT_ constant, or NULL if the value is not ++ recognized. */ ++extern const char *get_DW_AT_name (unsigned int attr); ++ ++/* Return the name of a DW_FORM_ constant, or NULL if the value is not ++ recognized. */ ++extern const char *get_DW_FORM_name (unsigned int form); ++ ++/* Return the name of a DW_OP_ constant, or NULL if the value is not ++ recognized. */ ++extern const char *get_DW_OP_name (unsigned int op); ++ ++/* Return the name of a DW_ATE_ constant, or NULL if the value is not ++ recognized. */ ++extern const char *get_DW_ATE_name (unsigned int enc); ++ ++/* Return the name of a DW_CFA_ constant, or NULL if the value is not ++ recognized. */ ++extern const char *get_DW_CFA_name (unsigned int opc); ++ ++#ifdef __cplusplus ++} ++#endif /* __cplusplus */ ++ ++#endif /* _DWARF2_H */ diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/elf.c index 000000000,000000000..e87741382 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/elf.c @@@ -1,0 -1,0 +1,979 @@@ ++/* elf.c -- Get debug data from an ELF file for backtraces. ++ Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++#include "config.h" ++ ++#include ++#include ++#include ++ ++#ifdef HAVE_DL_ITERATE_PHDR ++#include ++#endif ++ ++#include "backtrace.h" ++#include "internal.h" ++ ++#ifndef HAVE_DL_ITERATE_PHDR ++ ++/* Dummy version of dl_iterate_phdr for systems that don't have it. */ ++ ++#define dl_phdr_info x_dl_phdr_info ++#define dl_iterate_phdr x_dl_iterate_phdr ++ ++struct dl_phdr_info ++{ ++ uintptr_t dlpi_addr; ++ const char *dlpi_name; ++}; ++ ++static int ++dl_iterate_phdr (int (*callback) (struct dl_phdr_info *, ++ size_t, void *) ATTRIBUTE_UNUSED, ++ void *data ATTRIBUTE_UNUSED) ++{ ++ return 0; ++} ++ ++#endif /* ! defined (HAVE_DL_ITERATE_PHDR) */ ++ ++/* The configure script must tell us whether we are 32-bit or 64-bit ++ ELF. We could make this code test and support either possibility, ++ but there is no point. This code only works for the currently ++ running executable, which means that we know the ELF mode at ++ configure mode. */ ++ ++#if BACKTRACE_ELF_SIZE != 32 && BACKTRACE_ELF_SIZE != 64 ++#error "Unknown BACKTRACE_ELF_SIZE" ++#endif ++ ++/* might #include which might define our constants ++ with slightly different values. Undefine them to be safe. */ ++ ++#undef EI_NIDENT ++#undef EI_MAG0 ++#undef EI_MAG1 ++#undef EI_MAG2 ++#undef EI_MAG3 ++#undef EI_CLASS ++#undef EI_DATA ++#undef EI_VERSION ++#undef ELF_MAG0 ++#undef ELF_MAG1 ++#undef ELF_MAG2 ++#undef ELF_MAG3 ++#undef ELFCLASS32 ++#undef ELFCLASS64 ++#undef ELFDATA2LSB ++#undef ELFDATA2MSB ++#undef EV_CURRENT ++#undef ET_DYN ++#undef SHN_LORESERVE ++#undef SHN_XINDEX ++#undef SHN_UNDEF ++#undef SHT_SYMTAB ++#undef SHT_STRTAB ++#undef SHT_DYNSYM ++#undef STT_OBJECT ++#undef STT_FUNC ++ ++/* Basic types. */ ++ ++typedef uint16_t b_elf_half; /* Elf_Half. */ ++typedef uint32_t b_elf_word; /* Elf_Word. */ ++typedef int32_t b_elf_sword; /* Elf_Sword. */ ++ ++#if BACKTRACE_ELF_SIZE == 32 ++ ++typedef uint32_t b_elf_addr; /* Elf_Addr. */ ++typedef uint32_t b_elf_off; /* Elf_Off. */ ++ ++typedef uint32_t b_elf_wxword; /* 32-bit Elf_Word, 64-bit ELF_Xword. */ ++ ++#else ++ ++typedef uint64_t b_elf_addr; /* Elf_Addr. */ ++typedef uint64_t b_elf_off; /* Elf_Off. */ ++typedef uint64_t b_elf_xword; /* Elf_Xword. */ ++typedef int64_t b_elf_sxword; /* Elf_Sxword. */ ++ ++typedef uint64_t b_elf_wxword; /* 32-bit Elf_Word, 64-bit ELF_Xword. */ ++ ++#endif ++ ++/* Data structures and associated constants. */ ++ ++#define EI_NIDENT 16 ++ ++typedef struct { ++ unsigned char e_ident[EI_NIDENT]; /* ELF "magic number" */ ++ b_elf_half e_type; /* Identifies object file type */ ++ b_elf_half e_machine; /* Specifies required architecture */ ++ b_elf_word e_version; /* Identifies object file version */ ++ b_elf_addr e_entry; /* Entry point virtual address */ ++ b_elf_off e_phoff; /* Program header table file offset */ ++ b_elf_off e_shoff; /* Section header table file offset */ ++ b_elf_word e_flags; /* Processor-specific flags */ ++ b_elf_half e_ehsize; /* ELF header size in bytes */ ++ b_elf_half e_phentsize; /* Program header table entry size */ ++ b_elf_half e_phnum; /* Program header table entry count */ ++ b_elf_half e_shentsize; /* Section header table entry size */ ++ b_elf_half e_shnum; /* Section header table entry count */ ++ b_elf_half e_shstrndx; /* Section header string table index */ ++} b_elf_ehdr; /* Elf_Ehdr. */ ++ ++#define EI_MAG0 0 ++#define EI_MAG1 1 ++#define EI_MAG2 2 ++#define EI_MAG3 3 ++#define EI_CLASS 4 ++#define EI_DATA 5 ++#define EI_VERSION 6 ++ ++#define ELFMAG0 0x7f ++#define ELFMAG1 'E' ++#define ELFMAG2 'L' ++#define ELFMAG3 'F' ++ ++#define ELFCLASS32 1 ++#define ELFCLASS64 2 ++ ++#define ELFDATA2LSB 1 ++#define ELFDATA2MSB 2 ++ ++#define EV_CURRENT 1 ++ ++#define ET_DYN 3 ++ ++typedef struct { ++ b_elf_word sh_name; /* Section name, index in string tbl */ ++ b_elf_word sh_type; /* Type of section */ ++ b_elf_wxword sh_flags; /* Miscellaneous section attributes */ ++ b_elf_addr sh_addr; /* Section virtual addr at execution */ ++ b_elf_off sh_offset; /* Section file offset */ ++ b_elf_wxword sh_size; /* Size of section in bytes */ ++ b_elf_word sh_link; /* Index of another section */ ++ b_elf_word sh_info; /* Additional section information */ ++ b_elf_wxword sh_addralign; /* Section alignment */ ++ b_elf_wxword sh_entsize; /* Entry size if section holds table */ ++} b_elf_shdr; /* Elf_Shdr. */ ++ ++#define SHN_UNDEF 0x0000 /* Undefined section */ ++#define SHN_LORESERVE 0xFF00 /* Begin range of reserved indices */ ++#define SHN_XINDEX 0xFFFF /* Section index is held elsewhere */ ++ ++#define SHT_SYMTAB 2 ++#define SHT_STRTAB 3 ++#define SHT_DYNSYM 11 ++ ++#if BACKTRACE_ELF_SIZE == 32 ++ ++typedef struct ++{ ++ b_elf_word st_name; /* Symbol name, index in string tbl */ ++ b_elf_addr st_value; /* Symbol value */ ++ b_elf_word st_size; /* Symbol size */ ++ unsigned char st_info; /* Symbol binding and type */ ++ unsigned char st_other; /* Visibility and other data */ ++ b_elf_half st_shndx; /* Symbol section index */ ++} b_elf_sym; /* Elf_Sym. */ ++ ++#else /* BACKTRACE_ELF_SIZE != 32 */ ++ ++typedef struct ++{ ++ b_elf_word st_name; /* Symbol name, index in string tbl */ ++ unsigned char st_info; /* Symbol binding and type */ ++ unsigned char st_other; /* Visibility and other data */ ++ b_elf_half st_shndx; /* Symbol section index */ ++ b_elf_addr st_value; /* Symbol value */ ++ b_elf_xword st_size; /* Symbol size */ ++} b_elf_sym; /* Elf_Sym. */ ++ ++#endif /* BACKTRACE_ELF_SIZE != 32 */ ++ ++#define STT_OBJECT 1 ++#define STT_FUNC 2 ++ ++/* An index of ELF sections we care about. */ ++ ++enum debug_section ++{ ++ DEBUG_INFO, ++ DEBUG_LINE, ++ DEBUG_ABBREV, ++ DEBUG_RANGES, ++ DEBUG_STR, ++ DEBUG_MAX ++}; ++ ++/* Names of sections, indexed by enum elf_section. */ ++ ++static const char * const debug_section_names[DEBUG_MAX] = ++{ ++ ".debug_info", ++ ".debug_line", ++ ".debug_abbrev", ++ ".debug_ranges", ++ ".debug_str" ++}; ++ ++/* Information we gather for the sections we care about. */ ++ ++struct debug_section_info ++{ ++ /* Section file offset. */ ++ off_t offset; ++ /* Section size. */ ++ size_t size; ++ /* Section contents, after read from file. */ ++ const unsigned char *data; ++}; ++ ++/* Information we keep for an ELF symbol. */ ++ ++struct elf_symbol ++{ ++ /* The name of the symbol. */ ++ const char *name; ++ /* The address of the symbol. */ ++ uintptr_t address; ++ /* The size of the symbol. */ ++ size_t size; ++}; ++ ++/* Information to pass to elf_syminfo. */ ++ ++struct elf_syminfo_data ++{ ++ /* Symbols for the next module. */ ++ struct elf_syminfo_data *next; ++ /* The ELF symbols, sorted by address. */ ++ struct elf_symbol *symbols; ++ /* The number of symbols. */ ++ size_t count; ++}; ++ ++/* A dummy callback function used when we can't find any debug info. */ ++ ++static int ++elf_nodebug (struct backtrace_state *state ATTRIBUTE_UNUSED, ++ uintptr_t pc ATTRIBUTE_UNUSED, ++ backtrace_full_callback callback ATTRIBUTE_UNUSED, ++ backtrace_error_callback error_callback, void *data) ++{ ++ error_callback (data, "no debug info in ELF executable", -1); ++ return 0; ++} ++ ++/* A dummy callback function used when we can't find a symbol ++ table. */ ++ ++static void ++elf_nosyms (struct backtrace_state *state ATTRIBUTE_UNUSED, ++ uintptr_t addr ATTRIBUTE_UNUSED, ++ backtrace_syminfo_callback callback ATTRIBUTE_UNUSED, ++ backtrace_error_callback error_callback, void *data) ++{ ++ error_callback (data, "no symbol table in ELF executable", -1); ++} ++ ++/* Compare struct elf_symbol for qsort. */ ++ ++static int ++elf_symbol_compare (const void *v1, const void *v2) ++{ ++ const struct elf_symbol *e1 = (const struct elf_symbol *) v1; ++ const struct elf_symbol *e2 = (const struct elf_symbol *) v2; ++ ++ if (e1->address < e2->address) ++ return -1; ++ else if (e1->address > e2->address) ++ return 1; ++ else ++ return 0; ++} ++ ++/* Compare an ADDR against an elf_symbol for bsearch. We allocate one ++ extra entry in the array so that this can look safely at the next ++ entry. */ ++ ++static int ++elf_symbol_search (const void *vkey, const void *ventry) ++{ ++ const uintptr_t *key = (const uintptr_t *) vkey; ++ const struct elf_symbol *entry = (const struct elf_symbol *) ventry; ++ uintptr_t addr; ++ ++ addr = *key; ++ if (addr < entry->address) ++ return -1; ++ else if (addr >= entry->address + entry->size) ++ return 1; ++ else ++ return 0; ++} ++ ++/* Initialize the symbol table info for elf_syminfo. */ ++ ++static int ++elf_initialize_syminfo (struct backtrace_state *state, ++ uintptr_t base_address, ++ const unsigned char *symtab_data, size_t symtab_size, ++ const unsigned char *strtab, size_t strtab_size, ++ backtrace_error_callback error_callback, ++ void *data, struct elf_syminfo_data *sdata) ++{ ++ size_t sym_count; ++ const b_elf_sym *sym; ++ size_t elf_symbol_count; ++ size_t elf_symbol_size; ++ struct elf_symbol *elf_symbols; ++ size_t i; ++ unsigned int j; ++ ++ sym_count = symtab_size / sizeof (b_elf_sym); ++ ++ /* We only care about function symbols. Count them. */ ++ sym = (const b_elf_sym *) symtab_data; ++ elf_symbol_count = 0; ++ for (i = 0; i < sym_count; ++i, ++sym) ++ { ++ int info; ++ ++ info = sym->st_info & 0xf; ++ if ((info == STT_FUNC || info == STT_OBJECT) ++ && sym->st_shndx != SHN_UNDEF) ++ ++elf_symbol_count; ++ } ++ ++ elf_symbol_size = elf_symbol_count * sizeof (struct elf_symbol); ++ elf_symbols = ((struct elf_symbol *) ++ backtrace_alloc (state, elf_symbol_size, error_callback, ++ data)); ++ if (elf_symbols == NULL) ++ return 0; ++ ++ sym = (const b_elf_sym *) symtab_data; ++ j = 0; ++ for (i = 0; i < sym_count; ++i, ++sym) ++ { ++ int info; ++ ++ info = sym->st_info & 0xf; ++ if (info != STT_FUNC && info != STT_OBJECT) ++ continue; ++ if (sym->st_shndx == SHN_UNDEF) ++ continue; ++ if (sym->st_name >= strtab_size) ++ { ++ error_callback (data, "symbol string index out of range", 0); ++ backtrace_free (state, elf_symbols, elf_symbol_size, error_callback, ++ data); ++ return 0; ++ } ++ elf_symbols[j].name = (const char *) strtab + sym->st_name; ++ elf_symbols[j].address = sym->st_value + base_address; ++ elf_symbols[j].size = sym->st_size; ++ ++j; ++ } ++ ++ backtrace_qsort (elf_symbols, elf_symbol_count, sizeof (struct elf_symbol), ++ elf_symbol_compare); ++ ++ sdata->next = NULL; ++ sdata->symbols = elf_symbols; ++ sdata->count = elf_symbol_count; ++ ++ return 1; ++} ++ ++/* Add EDATA to the list in STATE. */ ++ ++static void ++elf_add_syminfo_data (struct backtrace_state *state, ++ struct elf_syminfo_data *edata) ++{ ++ if (!state->threaded) ++ { ++ struct elf_syminfo_data **pp; ++ ++ for (pp = (struct elf_syminfo_data **) (void *) &state->syminfo_data; ++ *pp != NULL; ++ pp = &(*pp)->next) ++ ; ++ *pp = edata; ++ } ++ else ++ { ++ while (1) ++ { ++ struct elf_syminfo_data **pp; ++ ++ pp = (struct elf_syminfo_data **) (void *) &state->syminfo_data; ++ ++ while (1) ++ { ++ struct elf_syminfo_data *p; ++ ++ p = backtrace_atomic_load_pointer (pp); ++ ++ if (p == NULL) ++ break; ++ ++ pp = &p->next; ++ } ++ ++ if (__sync_bool_compare_and_swap (pp, NULL, edata)) ++ break; ++ } ++ } ++} ++ ++/* Return the symbol name and value for an ADDR. */ ++ ++static void ++elf_syminfo (struct backtrace_state *state, uintptr_t addr, ++ backtrace_syminfo_callback callback, ++ backtrace_error_callback error_callback ATTRIBUTE_UNUSED, ++ void *data) ++{ ++ struct elf_syminfo_data *edata; ++ struct elf_symbol *sym = NULL; ++ ++ if (!state->threaded) ++ { ++ for (edata = (struct elf_syminfo_data *) state->syminfo_data; ++ edata != NULL; ++ edata = edata->next) ++ { ++ sym = ((struct elf_symbol *) ++ bsearch (&addr, edata->symbols, edata->count, ++ sizeof (struct elf_symbol), elf_symbol_search)); ++ if (sym != NULL) ++ break; ++ } ++ } ++ else ++ { ++ struct elf_syminfo_data **pp; ++ ++ pp = (struct elf_syminfo_data **) (void *) &state->syminfo_data; ++ while (1) ++ { ++ edata = backtrace_atomic_load_pointer (pp); ++ if (edata == NULL) ++ break; ++ ++ sym = ((struct elf_symbol *) ++ bsearch (&addr, edata->symbols, edata->count, ++ sizeof (struct elf_symbol), elf_symbol_search)); ++ if (sym != NULL) ++ break; ++ ++ pp = &edata->next; ++ } ++ } ++ ++ if (sym == NULL) ++ callback (data, addr, NULL, 0, 0); ++ else ++ callback (data, addr, sym->name, sym->address, sym->size); ++} ++ ++/* Add the backtrace data for one ELF file. Returns 1 on success, ++ 0 on failure (in both cases descriptor is closed) or -1 if exe ++ is non-zero and the ELF file is ET_DYN, which tells the caller that ++ elf_add will need to be called on the descriptor again after ++ base_address is determined. */ ++ ++static int ++elf_add (struct backtrace_state *state, int descriptor, uintptr_t base_address, ++ backtrace_error_callback error_callback, void *data, ++ fileline *fileline_fn, int *found_sym, int *found_dwarf, int exe) ++{ ++ struct backtrace_view ehdr_view; ++ b_elf_ehdr ehdr; ++ off_t shoff; ++ unsigned int shnum; ++ unsigned int shstrndx; ++ struct backtrace_view shdrs_view; ++ int shdrs_view_valid; ++ const b_elf_shdr *shdrs; ++ const b_elf_shdr *shstrhdr; ++ size_t shstr_size; ++ off_t shstr_off; ++ struct backtrace_view names_view; ++ int names_view_valid; ++ const char *names; ++ unsigned int symtab_shndx; ++ unsigned int dynsym_shndx; ++ unsigned int i; ++ struct debug_section_info sections[DEBUG_MAX]; ++ struct backtrace_view symtab_view; ++ int symtab_view_valid; ++ struct backtrace_view strtab_view; ++ int strtab_view_valid; ++ off_t min_offset; ++ off_t max_offset; ++ struct backtrace_view debug_view; ++ int debug_view_valid; ++ ++ *found_sym = 0; ++ *found_dwarf = 0; ++ ++ shdrs_view_valid = 0; ++ names_view_valid = 0; ++ symtab_view_valid = 0; ++ strtab_view_valid = 0; ++ debug_view_valid = 0; ++ ++ if (!backtrace_get_view (state, descriptor, 0, sizeof ehdr, error_callback, ++ data, &ehdr_view)) ++ goto fail; ++ ++ memcpy (&ehdr, ehdr_view.data, sizeof ehdr); ++ ++ backtrace_release_view (state, &ehdr_view, error_callback, data); ++ ++ if (ehdr.e_ident[EI_MAG0] != ELFMAG0 ++ || ehdr.e_ident[EI_MAG1] != ELFMAG1 ++ || ehdr.e_ident[EI_MAG2] != ELFMAG2 ++ || ehdr.e_ident[EI_MAG3] != ELFMAG3) ++ { ++ error_callback (data, "executable file is not ELF", 0); ++ goto fail; ++ } ++ if (ehdr.e_ident[EI_VERSION] != EV_CURRENT) ++ { ++ error_callback (data, "executable file is unrecognized ELF version", 0); ++ goto fail; ++ } ++ ++#if BACKTRACE_ELF_SIZE == 32 ++#define BACKTRACE_ELFCLASS ELFCLASS32 ++#else ++#define BACKTRACE_ELFCLASS ELFCLASS64 ++#endif ++ ++ if (ehdr.e_ident[EI_CLASS] != BACKTRACE_ELFCLASS) ++ { ++ error_callback (data, "executable file is unexpected ELF class", 0); ++ goto fail; ++ } ++ ++ if (ehdr.e_ident[EI_DATA] != ELFDATA2LSB ++ && ehdr.e_ident[EI_DATA] != ELFDATA2MSB) ++ { ++ error_callback (data, "executable file has unknown endianness", 0); ++ goto fail; ++ } ++ ++ /* If the executable is ET_DYN, it is either a PIE, or we are running ++ directly a shared library with .interp. We need to wait for ++ dl_iterate_phdr in that case to determine the actual base_address. */ ++ if (exe && ehdr.e_type == ET_DYN) ++ return -1; ++ ++ shoff = ehdr.e_shoff; ++ shnum = ehdr.e_shnum; ++ shstrndx = ehdr.e_shstrndx; ++ ++ if ((shnum == 0 || shstrndx == SHN_XINDEX) ++ && shoff != 0) ++ { ++ struct backtrace_view shdr_view; ++ const b_elf_shdr *shdr; ++ ++ if (!backtrace_get_view (state, descriptor, shoff, sizeof shdr, ++ error_callback, data, &shdr_view)) ++ goto fail; ++ ++ shdr = (const b_elf_shdr *) shdr_view.data; ++ ++ if (shnum == 0) ++ shnum = shdr->sh_size; ++ ++ if (shstrndx == SHN_XINDEX) ++ { ++ shstrndx = shdr->sh_link; ++ ++ /* Versions of the GNU binutils between 2.12 and 2.18 did ++ not handle objects with more than SHN_LORESERVE sections ++ correctly. All large section indexes were offset by ++ 0x100. There is more information at ++ http://sourceware.org/bugzilla/show_bug.cgi?id-5900 . ++ Fortunately these object files are easy to detect, as the ++ GNU binutils always put the section header string table ++ near the end of the list of sections. Thus if the ++ section header string table index is larger than the ++ number of sections, then we know we have to subtract ++ 0x100 to get the real section index. */ ++ if (shstrndx >= shnum && shstrndx >= SHN_LORESERVE + 0x100) ++ shstrndx -= 0x100; ++ } ++ ++ backtrace_release_view (state, &shdr_view, error_callback, data); ++ } ++ ++ /* To translate PC to file/line when using DWARF, we need to find ++ the .debug_info and .debug_line sections. */ ++ ++ /* Read the section headers, skipping the first one. */ ++ ++ if (!backtrace_get_view (state, descriptor, shoff + sizeof (b_elf_shdr), ++ (shnum - 1) * sizeof (b_elf_shdr), ++ error_callback, data, &shdrs_view)) ++ goto fail; ++ shdrs_view_valid = 1; ++ shdrs = (const b_elf_shdr *) shdrs_view.data; ++ ++ /* Read the section names. */ ++ ++ shstrhdr = &shdrs[shstrndx - 1]; ++ shstr_size = shstrhdr->sh_size; ++ shstr_off = shstrhdr->sh_offset; ++ ++ if (!backtrace_get_view (state, descriptor, shstr_off, shstr_size, ++ error_callback, data, &names_view)) ++ goto fail; ++ names_view_valid = 1; ++ names = (const char *) names_view.data; ++ ++ symtab_shndx = 0; ++ dynsym_shndx = 0; ++ ++ memset (sections, 0, sizeof sections); ++ ++ /* Look for the symbol table. */ ++ for (i = 1; i < shnum; ++i) ++ { ++ const b_elf_shdr *shdr; ++ unsigned int sh_name; ++ const char *name; ++ int j; ++ ++ shdr = &shdrs[i - 1]; ++ ++ if (shdr->sh_type == SHT_SYMTAB) ++ symtab_shndx = i; ++ else if (shdr->sh_type == SHT_DYNSYM) ++ dynsym_shndx = i; ++ ++ sh_name = shdr->sh_name; ++ if (sh_name >= shstr_size) ++ { ++ error_callback (data, "ELF section name out of range", 0); ++ goto fail; ++ } ++ ++ name = names + sh_name; ++ ++ for (j = 0; j < (int) DEBUG_MAX; ++j) ++ { ++ if (strcmp (name, debug_section_names[j]) == 0) ++ { ++ sections[j].offset = shdr->sh_offset; ++ sections[j].size = shdr->sh_size; ++ break; ++ } ++ } ++ } ++ ++ if (symtab_shndx == 0) ++ symtab_shndx = dynsym_shndx; ++ if (symtab_shndx != 0) ++ { ++ const b_elf_shdr *symtab_shdr; ++ unsigned int strtab_shndx; ++ const b_elf_shdr *strtab_shdr; ++ struct elf_syminfo_data *sdata; ++ ++ symtab_shdr = &shdrs[symtab_shndx - 1]; ++ strtab_shndx = symtab_shdr->sh_link; ++ if (strtab_shndx >= shnum) ++ { ++ error_callback (data, ++ "ELF symbol table strtab link out of range", 0); ++ goto fail; ++ } ++ strtab_shdr = &shdrs[strtab_shndx - 1]; ++ ++ if (!backtrace_get_view (state, descriptor, symtab_shdr->sh_offset, ++ symtab_shdr->sh_size, error_callback, data, ++ &symtab_view)) ++ goto fail; ++ symtab_view_valid = 1; ++ ++ if (!backtrace_get_view (state, descriptor, strtab_shdr->sh_offset, ++ strtab_shdr->sh_size, error_callback, data, ++ &strtab_view)) ++ goto fail; ++ strtab_view_valid = 1; ++ ++ sdata = ((struct elf_syminfo_data *) ++ backtrace_alloc (state, sizeof *sdata, error_callback, data)); ++ if (sdata == NULL) ++ goto fail; ++ ++ if (!elf_initialize_syminfo (state, base_address, ++ symtab_view.data, symtab_shdr->sh_size, ++ strtab_view.data, strtab_shdr->sh_size, ++ error_callback, data, sdata)) ++ { ++ backtrace_free (state, sdata, sizeof *sdata, error_callback, data); ++ goto fail; ++ } ++ ++ /* We no longer need the symbol table, but we hold on to the ++ string table permanently. */ ++ backtrace_release_view (state, &symtab_view, error_callback, data); ++ ++ *found_sym = 1; ++ ++ elf_add_syminfo_data (state, sdata); ++ } ++ ++ /* FIXME: Need to handle compressed debug sections. */ ++ ++ backtrace_release_view (state, &shdrs_view, error_callback, data); ++ shdrs_view_valid = 0; ++ backtrace_release_view (state, &names_view, error_callback, data); ++ names_view_valid = 0; ++ ++ /* Read all the debug sections in a single view, since they are ++ probably adjacent in the file. We never release this view. */ ++ ++ min_offset = 0; ++ max_offset = 0; ++ for (i = 0; i < (int) DEBUG_MAX; ++i) ++ { ++ off_t end; ++ ++ if (sections[i].size == 0) ++ continue; ++ if (min_offset == 0 || sections[i].offset < min_offset) ++ min_offset = sections[i].offset; ++ end = sections[i].offset + sections[i].size; ++ if (end > max_offset) ++ max_offset = end; ++ } ++ if (min_offset == 0 || max_offset == 0) ++ { ++ if (!backtrace_close (descriptor, error_callback, data)) ++ goto fail; ++ return 1; ++ } ++ ++ if (!backtrace_get_view (state, descriptor, min_offset, ++ max_offset - min_offset, ++ error_callback, data, &debug_view)) ++ goto fail; ++ debug_view_valid = 1; ++ ++ /* We've read all we need from the executable. */ ++ if (!backtrace_close (descriptor, error_callback, data)) ++ goto fail; ++ descriptor = -1; ++ ++ for (i = 0; i < (int) DEBUG_MAX; ++i) ++ { ++ if (sections[i].size == 0) ++ sections[i].data = NULL; ++ else ++ sections[i].data = ((const unsigned char *) debug_view.data ++ + (sections[i].offset - min_offset)); ++ } ++ ++ if (!backtrace_dwarf_add (state, base_address, ++ sections[DEBUG_INFO].data, ++ sections[DEBUG_INFO].size, ++ sections[DEBUG_LINE].data, ++ sections[DEBUG_LINE].size, ++ sections[DEBUG_ABBREV].data, ++ sections[DEBUG_ABBREV].size, ++ sections[DEBUG_RANGES].data, ++ sections[DEBUG_RANGES].size, ++ sections[DEBUG_STR].data, ++ sections[DEBUG_STR].size, ++ ehdr.e_ident[EI_DATA] == ELFDATA2MSB, ++ error_callback, data, fileline_fn)) ++ goto fail; ++ ++ *found_dwarf = 1; ++ ++ return 1; ++ ++ fail: ++ if (shdrs_view_valid) ++ backtrace_release_view (state, &shdrs_view, error_callback, data); ++ if (names_view_valid) ++ backtrace_release_view (state, &names_view, error_callback, data); ++ if (symtab_view_valid) ++ backtrace_release_view (state, &symtab_view, error_callback, data); ++ if (strtab_view_valid) ++ backtrace_release_view (state, &strtab_view, error_callback, data); ++ if (debug_view_valid) ++ backtrace_release_view (state, &debug_view, error_callback, data); ++ if (descriptor != -1) ++ backtrace_close (descriptor, error_callback, data); ++ return 0; ++} ++ ++/* Data passed to phdr_callback. */ ++ ++struct phdr_data ++{ ++ struct backtrace_state *state; ++ backtrace_error_callback error_callback; ++ void *data; ++ fileline *fileline_fn; ++ int *found_sym; ++ int *found_dwarf; ++ int exe_descriptor; ++}; ++ ++/* Callback passed to dl_iterate_phdr. Load debug info from shared ++ libraries. */ ++ ++static int ++#ifdef __i386__ ++__attribute__ ((__force_align_arg_pointer__)) ++#endif ++phdr_callback (struct dl_phdr_info *info, size_t size ATTRIBUTE_UNUSED, ++ void *pdata) ++{ ++ struct phdr_data *pd = (struct phdr_data *) pdata; ++ int descriptor; ++ int does_not_exist; ++ fileline elf_fileline_fn; ++ int found_dwarf; ++ ++ /* There is not much we can do if we don't have the module name, ++ unless executable is ET_DYN, where we expect the very first ++ phdr_callback to be for the PIE. */ ++ if (info->dlpi_name == NULL || info->dlpi_name[0] == '\0') ++ { ++ if (pd->exe_descriptor == -1) ++ return 0; ++ descriptor = pd->exe_descriptor; ++ pd->exe_descriptor = -1; ++ } ++ else ++ { ++ if (pd->exe_descriptor != -1) ++ { ++ backtrace_close (pd->exe_descriptor, pd->error_callback, pd->data); ++ pd->exe_descriptor = -1; ++ } ++ ++ descriptor = backtrace_open (info->dlpi_name, pd->error_callback, ++ pd->data, &does_not_exist); ++ if (descriptor < 0) ++ return 0; ++ } ++ ++ if (elf_add (pd->state, descriptor, info->dlpi_addr, pd->error_callback, ++ pd->data, &elf_fileline_fn, pd->found_sym, &found_dwarf, 0)) ++ { ++ if (found_dwarf) ++ { ++ *pd->found_dwarf = 1; ++ *pd->fileline_fn = elf_fileline_fn; ++ } ++ } ++ ++ return 0; ++} ++ ++/* Initialize the backtrace data we need from an ELF executable. At ++ the ELF level, all we need to do is find the debug info ++ sections. */ ++ ++int ++backtrace_initialize (struct backtrace_state *state, int descriptor, ++ backtrace_error_callback error_callback, ++ void *data, fileline *fileline_fn) ++{ ++ int ret; ++ int found_sym; ++ int found_dwarf; ++ fileline elf_fileline_fn = elf_nodebug; ++ struct phdr_data pd; ++ ++ ret = elf_add (state, descriptor, 0, error_callback, data, &elf_fileline_fn, ++ &found_sym, &found_dwarf, 1); ++ if (!ret) ++ return 0; ++ ++ pd.state = state; ++ pd.error_callback = error_callback; ++ pd.data = data; ++ pd.fileline_fn = &elf_fileline_fn; ++ pd.found_sym = &found_sym; ++ pd.found_dwarf = &found_dwarf; ++ pd.exe_descriptor = ret < 0 ? descriptor : -1; ++ ++ dl_iterate_phdr (phdr_callback, (void *) &pd); ++ ++ if (!state->threaded) ++ { ++ if (found_sym) ++ state->syminfo_fn = elf_syminfo; ++ else if (state->syminfo_fn == NULL) ++ state->syminfo_fn = elf_nosyms; ++ } ++ else ++ { ++ if (found_sym) ++ backtrace_atomic_store_pointer (&state->syminfo_fn, elf_syminfo); ++ else ++ (void) __sync_bool_compare_and_swap (&state->syminfo_fn, NULL, ++ elf_nosyms); ++ } ++ ++ if (!state->threaded) ++ { ++ if (state->fileline_fn == NULL || state->fileline_fn == elf_nodebug) ++ *fileline_fn = elf_fileline_fn; ++ } ++ else ++ { ++ fileline current_fn; ++ ++ current_fn = backtrace_atomic_load_pointer (&state->fileline_fn); ++ if (current_fn == NULL || current_fn == elf_nodebug) ++ *fileline_fn = elf_fileline_fn; ++ } ++ ++ return 1; ++} diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/fileline.c index 000000000,000000000..503bbc6bc new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/fileline.c @@@ -1,0 -1,0 +1,194 @@@ ++/* fileline.c -- Get file and line number information in a backtrace. ++ Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++#include "config.h" ++ ++#include ++#include ++#include ++#include ++#include ++ ++#include "backtrace.h" ++#include "internal.h" ++ ++#ifndef HAVE_GETEXECNAME ++#define getexecname() NULL ++#endif ++ ++/* Initialize the fileline information from the executable. Returns 1 ++ on success, 0 on failure. */ ++ ++static int ++fileline_initialize (struct backtrace_state *state, ++ backtrace_error_callback error_callback, void *data) ++{ ++ int failed; ++ fileline fileline_fn; ++ int pass; ++ int called_error_callback; ++ int descriptor; ++ ++ if (!state->threaded) ++ failed = state->fileline_initialization_failed; ++ else ++ failed = backtrace_atomic_load_int (&state->fileline_initialization_failed); ++ ++ if (failed) ++ { ++ error_callback (data, "failed to read executable information", -1); ++ return 0; ++ } ++ ++ if (!state->threaded) ++ fileline_fn = state->fileline_fn; ++ else ++ fileline_fn = backtrace_atomic_load_pointer (&state->fileline_fn); ++ if (fileline_fn != NULL) ++ return 1; ++ ++ /* We have not initialized the information. Do it now. */ ++ ++ descriptor = -1; ++ called_error_callback = 0; ++ for (pass = 0; pass < 4; ++pass) ++ { ++ const char *filename; ++ int does_not_exist; ++ ++ switch (pass) ++ { ++ case 0: ++ filename = state->filename; ++ break; ++ case 1: ++ filename = getexecname (); ++ break; ++ case 2: ++ filename = "/proc/self/exe"; ++ break; ++ case 3: ++ filename = "/proc/curproc/file"; ++ break; ++ default: ++ abort (); ++ } ++ ++ if (filename == NULL) ++ continue; ++ ++ descriptor = backtrace_open (filename, error_callback, data, ++ &does_not_exist); ++ if (descriptor < 0 && !does_not_exist) ++ { ++ called_error_callback = 1; ++ break; ++ } ++ if (descriptor >= 0) ++ break; ++ } ++ ++ if (descriptor < 0) ++ { ++ if (!called_error_callback) ++ { ++ if (state->filename != NULL) ++ error_callback (data, state->filename, ENOENT); ++ else ++ error_callback (data, ++ "libbacktrace could not find executable to open", ++ 0); ++ } ++ failed = 1; ++ } ++ ++ if (!failed) ++ { ++ if (!backtrace_initialize (state, descriptor, error_callback, data, ++ &fileline_fn)) ++ failed = 1; ++ } ++ ++ if (failed) ++ { ++ if (!state->threaded) ++ state->fileline_initialization_failed = 1; ++ else ++ backtrace_atomic_store_int (&state->fileline_initialization_failed, 1); ++ return 0; ++ } ++ ++ if (!state->threaded) ++ state->fileline_fn = fileline_fn; ++ else ++ { ++ backtrace_atomic_store_pointer (&state->fileline_fn, fileline_fn); ++ ++ /* Note that if two threads initialize at once, one of the data ++ sets may be leaked. */ ++ } ++ ++ return 1; ++} ++ ++/* Given a PC, find the file name, line number, and function name. */ ++ ++int ++backtrace_pcinfo (struct backtrace_state *state, uintptr_t pc, ++ backtrace_full_callback callback, ++ backtrace_error_callback error_callback, void *data) ++{ ++ if (!fileline_initialize (state, error_callback, data)) ++ return 0; ++ ++ if (state->fileline_initialization_failed) ++ return 0; ++ ++ return state->fileline_fn (state, pc, callback, error_callback, data); ++} ++ ++/* Given a PC, find the symbol for it, and its value. */ ++ ++int ++backtrace_syminfo (struct backtrace_state *state, uintptr_t pc, ++ backtrace_syminfo_callback callback, ++ backtrace_error_callback error_callback, void *data) ++{ ++ if (!fileline_initialize (state, error_callback, data)) ++ return 0; ++ ++ if (state->fileline_initialization_failed) ++ return 0; ++ ++ state->syminfo_fn (state, pc, callback, error_callback, data); ++ return 1; ++} diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/filenames.h index 000000000,000000000..1161daaa4 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/filenames.h @@@ -1,0 -1,0 +1,99 @@@ ++/* Macros for taking apart, interpreting and processing file names. ++ ++ These are here because some non-Posix (a.k.a. DOSish) systems have ++ drive letter brain-damage at the beginning of an absolute file name, ++ use forward- and back-slash in path names interchangeably, and ++ some of them have case-insensitive file names. ++ ++ Copyright (C) 2000-2015 Free Software Foundation, Inc. ++ ++This file is part of BFD, the Binary File Descriptor library. ++ ++This program is free software; you can redistribute it and/or modify ++it under the terms of the GNU General Public License as published by ++the Free Software Foundation; either version 2 of the License, or ++(at your option) any later version. ++ ++This program is distributed in the hope that it will be useful, ++but WITHOUT ANY WARRANTY; without even the implied warranty of ++MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++GNU General Public License for more details. ++ ++You should have received a copy of the GNU General Public License ++along with this program; if not, write to the Free Software ++Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA 02110-1301, USA. */ ++ ++#ifndef FILENAMES_H ++#define FILENAMES_H ++ ++#include "hashtab.h" /* for hashval_t */ ++ ++#ifdef __cplusplus ++extern "C" { ++#endif ++ ++#if defined(__MSDOS__) || defined(_WIN32) || defined(__OS2__) || defined (__CYGWIN__) ++# ifndef HAVE_DOS_BASED_FILE_SYSTEM ++# define HAVE_DOS_BASED_FILE_SYSTEM 1 ++# endif ++# ifndef HAVE_CASE_INSENSITIVE_FILE_SYSTEM ++# define HAVE_CASE_INSENSITIVE_FILE_SYSTEM 1 ++# endif ++# define HAS_DRIVE_SPEC(f) HAS_DOS_DRIVE_SPEC (f) ++# define IS_DIR_SEPARATOR(c) IS_DOS_DIR_SEPARATOR (c) ++# define IS_ABSOLUTE_PATH(f) IS_DOS_ABSOLUTE_PATH (f) ++#else /* not DOSish */ ++# if defined(__APPLE__) ++# ifndef HAVE_CASE_INSENSITIVE_FILE_SYSTEM ++# define HAVE_CASE_INSENSITIVE_FILE_SYSTEM 1 ++# endif ++# endif /* __APPLE__ */ ++# define HAS_DRIVE_SPEC(f) (0) ++# define IS_DIR_SEPARATOR(c) IS_UNIX_DIR_SEPARATOR (c) ++# define IS_ABSOLUTE_PATH(f) IS_UNIX_ABSOLUTE_PATH (f) ++#endif ++ ++#define IS_DIR_SEPARATOR_1(dos_based, c) \ ++ (((c) == '/') \ ++ || (((c) == '\\') && (dos_based))) ++ ++#define HAS_DRIVE_SPEC_1(dos_based, f) \ ++ ((f)[0] && ((f)[1] == ':') && (dos_based)) ++ ++/* Remove the drive spec from F, assuming HAS_DRIVE_SPEC (f). ++ The result is a pointer to the remainder of F. */ ++#define STRIP_DRIVE_SPEC(f) ((f) + 2) ++ ++#define IS_DOS_DIR_SEPARATOR(c) IS_DIR_SEPARATOR_1 (1, c) ++#define IS_DOS_ABSOLUTE_PATH(f) IS_ABSOLUTE_PATH_1 (1, f) ++#define HAS_DOS_DRIVE_SPEC(f) HAS_DRIVE_SPEC_1 (1, f) ++ ++#define IS_UNIX_DIR_SEPARATOR(c) IS_DIR_SEPARATOR_1 (0, c) ++#define IS_UNIX_ABSOLUTE_PATH(f) IS_ABSOLUTE_PATH_1 (0, f) ++ ++/* Note that when DOS_BASED is true, IS_ABSOLUTE_PATH accepts d:foo as ++ well, although it is only semi-absolute. This is because the users ++ of IS_ABSOLUTE_PATH want to know whether to prepend the current ++ working directory to a file name, which should not be done with a ++ name like d:foo. */ ++#define IS_ABSOLUTE_PATH_1(dos_based, f) \ ++ (IS_DIR_SEPARATOR_1 (dos_based, (f)[0]) \ ++ || HAS_DRIVE_SPEC_1 (dos_based, f)) ++ ++extern int filename_cmp (const char *s1, const char *s2); ++#define FILENAME_CMP(s1, s2) filename_cmp(s1, s2) ++ ++extern int filename_ncmp (const char *s1, const char *s2, ++ size_t n); ++ ++extern hashval_t filename_hash (const void *s); ++ ++extern int filename_eq (const void *s1, const void *s2); ++ ++extern int canonical_filename_eq (const char *a, const char *b); ++ ++#ifdef __cplusplus ++} ++#endif ++ ++#endif /* FILENAMES_H */ diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/filetype.awk index 000000000,000000000..57bab797a new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/filetype.awk @@@ -1,0 -1,0 +1,5 @@@ ++# An awk script to determine the type of a file. ++/\177ELF\001/ { if (NR == 1) { print "elf32"; exit } } ++/\177ELF\002/ { if (NR == 1) { print "elf64"; exit } } ++/\114\001/ { if (NR == 1) { print "pecoff"; exit } } ++/\144\206/ { if (NR == 1) { print "pecoff"; exit } } diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/hashtab.h index 000000000,000000000..b1b5877aa new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/hashtab.h @@@ -1,0 -1,0 +1,204 @@@ ++/* An expandable hash tables datatype. ++ Copyright (C) 1999-2015 Free Software Foundation, Inc. ++ Contributed by Vladimir Makarov (vmakarov@cygnus.com). ++ ++This program is free software; you can redistribute it and/or modify ++it under the terms of the GNU General Public License as published by ++the Free Software Foundation; either version 2 of the License, or ++(at your option) any later version. ++ ++This program is distributed in the hope that it will be useful, ++but WITHOUT ANY WARRANTY; without even the implied warranty of ++MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++GNU General Public License for more details. ++ ++You should have received a copy of the GNU General Public License ++along with this program; if not, write to the Free Software ++Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA 02110-1301, USA. */ ++ ++/* This package implements basic hash table functionality. It is possible ++ to search for an entry, create an entry and destroy an entry. ++ ++ Elements in the table are generic pointers. ++ ++ The size of the table is not fixed; if the occupancy of the table ++ grows too high the hash table will be expanded. ++ ++ The abstract data implementation is based on generalized Algorithm D ++ from Knuth's book "The art of computer programming". Hash table is ++ expanded by creation of new hash table and transferring elements from ++ the old table to the new table. */ ++ ++#ifndef __HASHTAB_H__ ++#define __HASHTAB_H__ ++ ++#ifdef __cplusplus ++extern "C" { ++#endif /* __cplusplus */ ++ ++#include "ansidecl.h" ++ ++/* The type for a hash code. */ ++typedef unsigned int hashval_t; ++ ++/* Callback function pointer types. */ ++ ++/* Calculate hash of a table entry. */ ++typedef hashval_t (*htab_hash) (const void *); ++ ++/* Compare a table entry with a possible entry. The entry already in ++ the table always comes first, so the second element can be of a ++ different type (but in this case htab_find and htab_find_slot ++ cannot be used; instead the variants that accept a hash value ++ must be used). */ ++typedef int (*htab_eq) (const void *, const void *); ++ ++/* Cleanup function called whenever a live element is removed from ++ the hash table. */ ++typedef void (*htab_del) (void *); ++ ++/* Function called by htab_traverse for each live element. The first ++ arg is the slot of the element (which can be passed to htab_clear_slot ++ if desired), the second arg is the auxiliary pointer handed to ++ htab_traverse. Return 1 to continue scan, 0 to stop. */ ++typedef int (*htab_trav) (void **, void *); ++ ++/* Memory-allocation function, with the same functionality as calloc(). ++ Iff it returns NULL, the hash table implementation will pass an error ++ code back to the user, so if your code doesn't handle errors, ++ best if you use xcalloc instead. */ ++typedef void *(*htab_alloc) (size_t, size_t); ++ ++/* We also need a free() routine. */ ++typedef void (*htab_free) (void *); ++ ++/* Memory allocation and deallocation; variants which take an extra ++ argument. */ ++typedef void *(*htab_alloc_with_arg) (void *, size_t, size_t); ++typedef void (*htab_free_with_arg) (void *, void *); ++ ++/* This macro defines reserved value for empty table entry. */ ++ ++#define HTAB_EMPTY_ENTRY ((PTR) 0) ++ ++/* This macro defines reserved value for table entry which contained ++ a deleted element. */ ++ ++#define HTAB_DELETED_ENTRY ((PTR) 1) ++ ++/* Hash tables are of the following type. The structure ++ (implementation) of this type is not needed for using the hash ++ tables. All work with hash table should be executed only through ++ functions mentioned below. The size of this structure is subject to ++ change. */ ++ ++struct htab { ++ /* Pointer to hash function. */ ++ htab_hash hash_f; ++ ++ /* Pointer to comparison function. */ ++ htab_eq eq_f; ++ ++ /* Pointer to cleanup function. */ ++ htab_del del_f; ++ ++ /* Table itself. */ ++ void **entries; ++ ++ /* Current size (in entries) of the hash table. */ ++ size_t size; ++ ++ /* Current number of elements including also deleted elements. */ ++ size_t n_elements; ++ ++ /* Current number of deleted elements in the table. */ ++ size_t n_deleted; ++ ++ /* The following member is used for debugging. Its value is number ++ of all calls of `htab_find_slot' for the hash table. */ ++ unsigned int searches; ++ ++ /* The following member is used for debugging. Its value is number ++ of collisions fixed for time of work with the hash table. */ ++ unsigned int collisions; ++ ++ /* Pointers to allocate/free functions. */ ++ htab_alloc alloc_f; ++ htab_free free_f; ++ ++ /* Alternate allocate/free functions, which take an extra argument. */ ++ void *alloc_arg; ++ htab_alloc_with_arg alloc_with_arg_f; ++ htab_free_with_arg free_with_arg_f; ++ ++ /* Current size (in entries) of the hash table, as an index into the ++ table of primes. */ ++ unsigned int size_prime_index; ++}; ++ ++typedef struct htab *htab_t; ++ ++/* An enum saying whether we insert into the hash table or not. */ ++enum insert_option {NO_INSERT, INSERT}; ++ ++/* The prototypes of the package functions. */ ++ ++extern htab_t htab_create_alloc (size_t, htab_hash, ++ htab_eq, htab_del, ++ htab_alloc, htab_free); ++ ++extern htab_t htab_create_alloc_ex (size_t, htab_hash, ++ htab_eq, htab_del, ++ void *, htab_alloc_with_arg, ++ htab_free_with_arg); ++ ++extern htab_t htab_create_typed_alloc (size_t, htab_hash, htab_eq, htab_del, ++ htab_alloc, htab_alloc, htab_free); ++ ++/* Backward-compatibility functions. */ ++extern htab_t htab_create (size_t, htab_hash, htab_eq, htab_del); ++extern htab_t htab_try_create (size_t, htab_hash, htab_eq, htab_del); ++ ++extern void htab_set_functions_ex (htab_t, htab_hash, ++ htab_eq, htab_del, ++ void *, htab_alloc_with_arg, ++ htab_free_with_arg); ++ ++extern void htab_delete (htab_t); ++extern void htab_empty (htab_t); ++ ++extern void * htab_find (htab_t, const void *); ++extern void ** htab_find_slot (htab_t, const void *, enum insert_option); ++extern void * htab_find_with_hash (htab_t, const void *, hashval_t); ++extern void ** htab_find_slot_with_hash (htab_t, const void *, ++ hashval_t, enum insert_option); ++extern void htab_clear_slot (htab_t, void **); ++extern void htab_remove_elt (htab_t, void *); ++extern void htab_remove_elt_with_hash (htab_t, void *, hashval_t); ++ ++extern void htab_traverse (htab_t, htab_trav, void *); ++extern void htab_traverse_noresize (htab_t, htab_trav, void *); ++ ++extern size_t htab_size (htab_t); ++extern size_t htab_elements (htab_t); ++extern double htab_collisions (htab_t); ++ ++/* A hash function for pointers. */ ++extern htab_hash htab_hash_pointer; ++ ++/* An equality function for pointers. */ ++extern htab_eq htab_eq_pointer; ++ ++/* A hash function for null-terminated strings. */ ++extern hashval_t htab_hash_string (const void *); ++ ++/* An iterative hash function for arbitrary data. */ ++extern hashval_t iterative_hash (const void *, size_t, hashval_t); ++/* Shorthand for hashing something with an intrinsic size. */ ++#define iterative_hash_object(OB,INIT) iterative_hash (&OB, sizeof (OB), INIT) ++ ++#ifdef __cplusplus ++} ++#endif /* __cplusplus */ ++ ++#endif /* __HASHTAB_H */ diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/install-sh index 000000000,000000000..0b0fdcbba new file mode 100755 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/install-sh @@@ -1,0 -1,0 +1,501 @@@ ++#!/bin/sh ++# install - install a program, script, or datafile ++ ++scriptversion=2013-12-25.23; # UTC ++ ++# This originates from X11R5 (mit/util/scripts/install.sh), which was ++# later released in X11R6 (xc/config/util/install.sh) with the ++# following copyright and license. ++# ++# Copyright (C) 1994 X Consortium ++# ++# Permission is hereby granted, free of charge, to any person obtaining a copy ++# of this software and associated documentation files (the "Software"), to ++# deal in the Software without restriction, including without limitation the ++# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or ++# sell copies of the Software, and to permit persons to whom the Software is ++# furnished to do so, subject to the following conditions: ++# ++# The above copyright notice and this permission notice shall be included in ++# all copies or substantial portions of the Software. ++# ++# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR ++# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, ++# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE ++# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN ++# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC- ++# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ++# ++# Except as contained in this notice, the name of the X Consortium shall not ++# be used in advertising or otherwise to promote the sale, use or other deal- ++# ings in this Software without prior written authorization from the X Consor- ++# tium. ++# ++# ++# FSF changes to this file are in the public domain. ++# ++# Calling this script install-sh is preferred over install.sh, to prevent ++# 'make' implicit rules from creating a file called install from it ++# when there is no Makefile. ++# ++# This script is compatible with the BSD install script, but was written ++# from scratch. ++ ++tab=' ' ++nl=' ++' ++IFS=" $tab$nl" ++ ++# Set DOITPROG to "echo" to test this script. ++ ++doit=${DOITPROG-} ++doit_exec=${doit:-exec} ++ ++# Put in absolute file names if you don't have them in your path; ++# or use environment vars. ++ ++chgrpprog=${CHGRPPROG-chgrp} ++chmodprog=${CHMODPROG-chmod} ++chownprog=${CHOWNPROG-chown} ++cmpprog=${CMPPROG-cmp} ++cpprog=${CPPROG-cp} ++mkdirprog=${MKDIRPROG-mkdir} ++mvprog=${MVPROG-mv} ++rmprog=${RMPROG-rm} ++stripprog=${STRIPPROG-strip} ++ ++posix_mkdir= ++ ++# Desired mode of installed file. ++mode=0755 ++ ++chgrpcmd= ++chmodcmd=$chmodprog ++chowncmd= ++mvcmd=$mvprog ++rmcmd="$rmprog -f" ++stripcmd= ++ ++src= ++dst= ++dir_arg= ++dst_arg= ++ ++copy_on_change=false ++is_target_a_directory=possibly ++ ++usage="\ ++Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE ++ or: $0 [OPTION]... SRCFILES... DIRECTORY ++ or: $0 [OPTION]... -t DIRECTORY SRCFILES... ++ or: $0 [OPTION]... -d DIRECTORIES... ++ ++In the 1st form, copy SRCFILE to DSTFILE. ++In the 2nd and 3rd, copy all SRCFILES to DIRECTORY. ++In the 4th, create DIRECTORIES. ++ ++Options: ++ --help display this help and exit. ++ --version display version info and exit. ++ ++ -c (ignored) ++ -C install only if different (preserve the last data modification time) ++ -d create directories instead of installing files. ++ -g GROUP $chgrpprog installed files to GROUP. ++ -m MODE $chmodprog installed files to MODE. ++ -o USER $chownprog installed files to USER. ++ -s $stripprog installed files. ++ -t DIRECTORY install into DIRECTORY. ++ -T report an error if DSTFILE is a directory. ++ ++Environment variables override the default commands: ++ CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG ++ RMPROG STRIPPROG ++" ++ ++while test $# -ne 0; do ++ case $1 in ++ -c) ;; ++ ++ -C) copy_on_change=true;; ++ ++ -d) dir_arg=true;; ++ ++ -g) chgrpcmd="$chgrpprog $2" ++ shift;; ++ ++ --help) echo "$usage"; exit $?;; ++ ++ -m) mode=$2 ++ case $mode in ++ *' '* | *"$tab"* | *"$nl"* | *'*'* | *'?'* | *'['*) ++ echo "$0: invalid mode: $mode" >&2 ++ exit 1;; ++ esac ++ shift;; ++ ++ -o) chowncmd="$chownprog $2" ++ shift;; ++ ++ -s) stripcmd=$stripprog;; ++ ++ -t) ++ is_target_a_directory=always ++ dst_arg=$2 ++ # Protect names problematic for 'test' and other utilities. ++ case $dst_arg in ++ -* | [=\(\)!]) dst_arg=./$dst_arg;; ++ esac ++ shift;; ++ ++ -T) is_target_a_directory=never;; ++ ++ --version) echo "$0 $scriptversion"; exit $?;; ++ ++ --) shift ++ break;; ++ ++ -*) echo "$0: invalid option: $1" >&2 ++ exit 1;; ++ ++ *) break;; ++ esac ++ shift ++done ++ ++# We allow the use of options -d and -T together, by making -d ++# take the precedence; this is for compatibility with GNU install. ++ ++if test -n "$dir_arg"; then ++ if test -n "$dst_arg"; then ++ echo "$0: target directory not allowed when installing a directory." >&2 ++ exit 1 ++ fi ++fi ++ ++if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then ++ # When -d is used, all remaining arguments are directories to create. ++ # When -t is used, the destination is already specified. ++ # Otherwise, the last argument is the destination. Remove it from $@. ++ for arg ++ do ++ if test -n "$dst_arg"; then ++ # $@ is not empty: it contains at least $arg. ++ set fnord "$@" "$dst_arg" ++ shift # fnord ++ fi ++ shift # arg ++ dst_arg=$arg ++ # Protect names problematic for 'test' and other utilities. ++ case $dst_arg in ++ -* | [=\(\)!]) dst_arg=./$dst_arg;; ++ esac ++ done ++fi ++ ++if test $# -eq 0; then ++ if test -z "$dir_arg"; then ++ echo "$0: no input file specified." >&2 ++ exit 1 ++ fi ++ # It's OK to call 'install-sh -d' without argument. ++ # This can happen when creating conditional directories. ++ exit 0 ++fi ++ ++if test -z "$dir_arg"; then ++ if test $# -gt 1 || test "$is_target_a_directory" = always; then ++ if test ! -d "$dst_arg"; then ++ echo "$0: $dst_arg: Is not a directory." >&2 ++ exit 1 ++ fi ++ fi ++fi ++ ++if test -z "$dir_arg"; then ++ do_exit='(exit $ret); exit $ret' ++ trap "ret=129; $do_exit" 1 ++ trap "ret=130; $do_exit" 2 ++ trap "ret=141; $do_exit" 13 ++ trap "ret=143; $do_exit" 15 ++ ++ # Set umask so as not to create temps with too-generous modes. ++ # However, 'strip' requires both read and write access to temps. ++ case $mode in ++ # Optimize common cases. ++ *644) cp_umask=133;; ++ *755) cp_umask=22;; ++ ++ *[0-7]) ++ if test -z "$stripcmd"; then ++ u_plus_rw= ++ else ++ u_plus_rw='% 200' ++ fi ++ cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;; ++ *) ++ if test -z "$stripcmd"; then ++ u_plus_rw= ++ else ++ u_plus_rw=,u+rw ++ fi ++ cp_umask=$mode$u_plus_rw;; ++ esac ++fi ++ ++for src ++do ++ # Protect names problematic for 'test' and other utilities. ++ case $src in ++ -* | [=\(\)!]) src=./$src;; ++ esac ++ ++ if test -n "$dir_arg"; then ++ dst=$src ++ dstdir=$dst ++ test -d "$dstdir" ++ dstdir_status=$? ++ else ++ ++ # Waiting for this to be detected by the "$cpprog $src $dsttmp" command ++ # might cause directories to be created, which would be especially bad ++ # if $src (and thus $dsttmp) contains '*'. ++ if test ! -f "$src" && test ! -d "$src"; then ++ echo "$0: $src does not exist." >&2 ++ exit 1 ++ fi ++ ++ if test -z "$dst_arg"; then ++ echo "$0: no destination specified." >&2 ++ exit 1 ++ fi ++ dst=$dst_arg ++ ++ # If destination is a directory, append the input filename; won't work ++ # if double slashes aren't ignored. ++ if test -d "$dst"; then ++ if test "$is_target_a_directory" = never; then ++ echo "$0: $dst_arg: Is a directory" >&2 ++ exit 1 ++ fi ++ dstdir=$dst ++ dst=$dstdir/`basename "$src"` ++ dstdir_status=0 ++ else ++ dstdir=`dirname "$dst"` ++ test -d "$dstdir" ++ dstdir_status=$? ++ fi ++ fi ++ ++ obsolete_mkdir_used=false ++ ++ if test $dstdir_status != 0; then ++ case $posix_mkdir in ++ '') ++ # Create intermediate dirs using mode 755 as modified by the umask. ++ # This is like FreeBSD 'install' as of 1997-10-28. ++ umask=`umask` ++ case $stripcmd.$umask in ++ # Optimize common cases. ++ *[2367][2367]) mkdir_umask=$umask;; ++ .*0[02][02] | .[02][02] | .[02]) mkdir_umask=22;; ++ ++ *[0-7]) ++ mkdir_umask=`expr $umask + 22 \ ++ - $umask % 100 % 40 + $umask % 20 \ ++ - $umask % 10 % 4 + $umask % 2 ++ `;; ++ *) mkdir_umask=$umask,go-w;; ++ esac ++ ++ # With -d, create the new directory with the user-specified mode. ++ # Otherwise, rely on $mkdir_umask. ++ if test -n "$dir_arg"; then ++ mkdir_mode=-m$mode ++ else ++ mkdir_mode= ++ fi ++ ++ posix_mkdir=false ++ case $umask in ++ *[123567][0-7][0-7]) ++ # POSIX mkdir -p sets u+wx bits regardless of umask, which ++ # is incompatible with FreeBSD 'install' when (umask & 300) != 0. ++ ;; ++ *) ++ tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$ ++ trap 'ret=$?; rmdir "$tmpdir/d" "$tmpdir" 2>/dev/null; exit $ret' 0 ++ ++ if (umask $mkdir_umask && ++ exec $mkdirprog $mkdir_mode -p -- "$tmpdir/d") >/dev/null 2>&1 ++ then ++ if test -z "$dir_arg" || { ++ # Check for POSIX incompatibilities with -m. ++ # HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or ++ # other-writable bit of parent directory when it shouldn't. ++ # FreeBSD 6.1 mkdir -m -p sets mode of existing directory. ++ ls_ld_tmpdir=`ls -ld "$tmpdir"` ++ case $ls_ld_tmpdir in ++ d????-?r-*) different_mode=700;; ++ d????-?--*) different_mode=755;; ++ *) false;; ++ esac && ++ $mkdirprog -m$different_mode -p -- "$tmpdir" && { ++ ls_ld_tmpdir_1=`ls -ld "$tmpdir"` ++ test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1" ++ } ++ } ++ then posix_mkdir=: ++ fi ++ rmdir "$tmpdir/d" "$tmpdir" ++ else ++ # Remove any dirs left behind by ancient mkdir implementations. ++ rmdir ./$mkdir_mode ./-p ./-- 2>/dev/null ++ fi ++ trap '' 0;; ++ esac;; ++ esac ++ ++ if ++ $posix_mkdir && ( ++ umask $mkdir_umask && ++ $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir" ++ ) ++ then : ++ else ++ ++ # The umask is ridiculous, or mkdir does not conform to POSIX, ++ # or it failed possibly due to a race condition. Create the ++ # directory the slow way, step by step, checking for races as we go. ++ ++ case $dstdir in ++ /*) prefix='/';; ++ [-=\(\)!]*) prefix='./';; ++ *) prefix='';; ++ esac ++ ++ oIFS=$IFS ++ IFS=/ ++ set -f ++ set fnord $dstdir ++ shift ++ set +f ++ IFS=$oIFS ++ ++ prefixes= ++ ++ for d ++ do ++ test X"$d" = X && continue ++ ++ prefix=$prefix$d ++ if test -d "$prefix"; then ++ prefixes= ++ else ++ if $posix_mkdir; then ++ (umask=$mkdir_umask && ++ $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break ++ # Don't fail if two instances are running concurrently. ++ test -d "$prefix" || exit 1 ++ else ++ case $prefix in ++ *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;; ++ *) qprefix=$prefix;; ++ esac ++ prefixes="$prefixes '$qprefix'" ++ fi ++ fi ++ prefix=$prefix/ ++ done ++ ++ if test -n "$prefixes"; then ++ # Don't fail if two instances are running concurrently. ++ (umask $mkdir_umask && ++ eval "\$doit_exec \$mkdirprog $prefixes") || ++ test -d "$dstdir" || exit 1 ++ obsolete_mkdir_used=true ++ fi ++ fi ++ fi ++ ++ if test -n "$dir_arg"; then ++ { test -z "$chowncmd" || $doit $chowncmd "$dst"; } && ++ { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } && ++ { test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false || ++ test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1 ++ else ++ ++ # Make a couple of temp file names in the proper directory. ++ dsttmp=$dstdir/_inst.$$_ ++ rmtmp=$dstdir/_rm.$$_ ++ ++ # Trap to clean up those temp files at exit. ++ trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0 ++ ++ # Copy the file name to the temp name. ++ (umask $cp_umask && $doit_exec $cpprog "$src" "$dsttmp") && ++ ++ # and set any options; do chmod last to preserve setuid bits. ++ # ++ # If any of these fail, we abort the whole thing. If we want to ++ # ignore errors from any of these, just make sure not to ignore ++ # errors from the above "$doit $cpprog $src $dsttmp" command. ++ # ++ { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } && ++ { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } && ++ { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } && ++ { test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } && ++ ++ # If -C, don't bother to copy if it wouldn't change the file. ++ if $copy_on_change && ++ old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` && ++ new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` && ++ set -f && ++ set X $old && old=:$2:$4:$5:$6 && ++ set X $new && new=:$2:$4:$5:$6 && ++ set +f && ++ test "$old" = "$new" && ++ $cmpprog "$dst" "$dsttmp" >/dev/null 2>&1 ++ then ++ rm -f "$dsttmp" ++ else ++ # Rename the file to the real destination. ++ $doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null || ++ ++ # The rename failed, perhaps because mv can't rename something else ++ # to itself, or perhaps because mv is so ancient that it does not ++ # support -f. ++ { ++ # Now remove or move aside any old file at destination location. ++ # We try this two ways since rm can't unlink itself on some ++ # systems and the destination file might be busy for other ++ # reasons. In this case, the final cleanup might fail but the new ++ # file should still install successfully. ++ { ++ test ! -f "$dst" || ++ $doit $rmcmd -f "$dst" 2>/dev/null || ++ { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null && ++ { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; } ++ } || ++ { echo "$0: cannot unlink or rename $dst" >&2 ++ (exit 1); exit 1 ++ } ++ } && ++ ++ # Now rename the file to the real destination. ++ $doit $mvcmd "$dsttmp" "$dst" ++ } ++ fi || exit 1 ++ ++ trap '' 0 ++ fi ++done ++ ++# Local variables: ++# eval: (add-hook 'write-file-hooks 'time-stamp) ++# time-stamp-start: "scriptversion=" ++# time-stamp-format: "%:y-%02m-%02d.%02H" ++# time-stamp-time-zone: "UTC" ++# time-stamp-end: "; # UTC" ++# End: diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/internal.h index 000000000,000000000..aab4e2a00 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/internal.h @@@ -1,0 -1,0 +1,294 @@@ ++/* internal.h -- Internal header file for stack backtrace library. ++ Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++#ifndef BACKTRACE_INTERNAL_H ++#define BACKTRACE_INTERNAL_H ++ ++/* We assume that and "backtrace.h" have already been ++ included. */ ++ ++#ifndef GCC_VERSION ++# define GCC_VERSION (__GNUC__ * 1000 + __GNUC_MINOR__) ++#endif ++ ++#if (GCC_VERSION < 2007) ++# define __attribute__(x) ++#endif ++ ++#ifndef ATTRIBUTE_UNUSED ++# define ATTRIBUTE_UNUSED __attribute__ ((__unused__)) ++#endif ++ ++#ifndef ATTRIBUTE_MALLOC ++# if (GCC_VERSION >= 2096) ++# define ATTRIBUTE_MALLOC __attribute__ ((__malloc__)) ++# else ++# define ATTRIBUTE_MALLOC ++# endif ++#endif ++ ++#ifndef HAVE_SYNC_FUNCTIONS ++ ++/* Define out the sync functions. These should never be called if ++ they are not available. */ ++ ++#define __sync_bool_compare_and_swap(A, B, C) (abort(), 1) ++#define __sync_lock_test_and_set(A, B) (abort(), 0) ++#define __sync_lock_release(A) abort() ++ ++#endif /* !defined (HAVE_SYNC_FUNCTIONS) */ ++ ++#ifdef HAVE_ATOMIC_FUNCTIONS ++ ++/* We have the atomic builtin functions. */ ++ ++#define backtrace_atomic_load_pointer(p) \ ++ __atomic_load_n ((p), __ATOMIC_ACQUIRE) ++#define backtrace_atomic_load_int(p) \ ++ __atomic_load_n ((p), __ATOMIC_ACQUIRE) ++#define backtrace_atomic_store_pointer(p, v) \ ++ __atomic_store_n ((p), (v), __ATOMIC_RELEASE) ++#define backtrace_atomic_store_size_t(p, v) \ ++ __atomic_store_n ((p), (v), __ATOMIC_RELEASE) ++#define backtrace_atomic_store_int(p, v) \ ++ __atomic_store_n ((p), (v), __ATOMIC_RELEASE) ++ ++#else /* !defined (HAVE_ATOMIC_FUNCTIONS) */ ++#ifdef HAVE_SYNC_FUNCTIONS ++ ++/* We have the sync functions but not the atomic functions. Define ++ the atomic ones in terms of the sync ones. */ ++ ++extern void *backtrace_atomic_load_pointer (void *); ++extern int backtrace_atomic_load_int (int *); ++extern void backtrace_atomic_store_pointer (void *, void *); ++extern void backtrace_atomic_store_size_t (size_t *, size_t); ++extern void backtrace_atomic_store_int (int *, int); ++ ++#else /* !defined (HAVE_SYNC_FUNCTIONS) */ ++ ++/* We have neither the sync nor the atomic functions. These will ++ never be called. */ ++ ++#define backtrace_atomic_load_pointer(p) (abort(), (void *) NULL) ++#define backtrace_atomic_load_int(p) (abort(), 0) ++#define backtrace_atomic_store_pointer(p, v) abort() ++#define backtrace_atomic_store_size_t(p, v) abort() ++#define backtrace_atomic_store_int(p, v) abort() ++ ++#endif /* !defined (HAVE_SYNC_FUNCTIONS) */ ++#endif /* !defined (HAVE_ATOMIC_FUNCTIONS) */ ++ ++/* The type of the function that collects file/line information. This ++ is like backtrace_pcinfo. */ ++ ++typedef int (*fileline) (struct backtrace_state *state, uintptr_t pc, ++ backtrace_full_callback callback, ++ backtrace_error_callback error_callback, void *data); ++ ++/* The type of the function that collects symbol information. This is ++ like backtrace_syminfo. */ ++ ++typedef void (*syminfo) (struct backtrace_state *state, uintptr_t pc, ++ backtrace_syminfo_callback callback, ++ backtrace_error_callback error_callback, void *data); ++ ++/* What the backtrace state pointer points to. */ ++ ++struct backtrace_state ++{ ++ /* The name of the executable. */ ++ const char *filename; ++ /* Non-zero if threaded. */ ++ int threaded; ++ /* The master lock for fileline_fn, fileline_data, syminfo_fn, ++ syminfo_data, fileline_initialization_failed and everything the ++ data pointers point to. */ ++ void *lock; ++ /* The function that returns file/line information. */ ++ fileline fileline_fn; ++ /* The data to pass to FILELINE_FN. */ ++ void *fileline_data; ++ /* The function that returns symbol information. */ ++ syminfo syminfo_fn; ++ /* The data to pass to SYMINFO_FN. */ ++ void *syminfo_data; ++ /* Whether initializing the file/line information failed. */ ++ int fileline_initialization_failed; ++ /* The lock for the freelist. */ ++ int lock_alloc; ++ /* The freelist when using mmap. */ ++ struct backtrace_freelist_struct *freelist; ++}; ++ ++/* Open a file for reading. Returns -1 on error. If DOES_NOT_EXIST ++ is not NULL, *DOES_NOT_EXIST will be set to 0 normally and set to 1 ++ if the file does not exist. If the file does not exist and ++ DOES_NOT_EXIST is not NULL, the function will return -1 and will ++ not call ERROR_CALLBACK. On other errors, or if DOES_NOT_EXIST is ++ NULL, the function will call ERROR_CALLBACK before returning. */ ++extern int backtrace_open (const char *filename, ++ backtrace_error_callback error_callback, ++ void *data, ++ int *does_not_exist); ++ ++/* A view of the contents of a file. This supports mmap when ++ available. A view will remain in memory even after backtrace_close ++ is called on the file descriptor from which the view was ++ obtained. */ ++ ++struct backtrace_view ++{ ++ /* The data that the caller requested. */ ++ const void *data; ++ /* The base of the view. */ ++ void *base; ++ /* The total length of the view. */ ++ size_t len; ++}; ++ ++/* Create a view of SIZE bytes from DESCRIPTOR at OFFSET. Store the ++ result in *VIEW. Returns 1 on success, 0 on error. */ ++extern int backtrace_get_view (struct backtrace_state *state, int descriptor, ++ off_t offset, size_t size, ++ backtrace_error_callback error_callback, ++ void *data, struct backtrace_view *view); ++ ++/* Release a view created by backtrace_get_view. */ ++extern void backtrace_release_view (struct backtrace_state *state, ++ struct backtrace_view *view, ++ backtrace_error_callback error_callback, ++ void *data); ++ ++/* Close a file opened by backtrace_open. Returns 1 on success, 0 on ++ error. */ ++ ++extern int backtrace_close (int descriptor, ++ backtrace_error_callback error_callback, ++ void *data); ++ ++/* Sort without using memory. */ ++ ++extern void backtrace_qsort (void *base, size_t count, size_t size, ++ int (*compar) (const void *, const void *)); ++ ++/* Allocate memory. This is like malloc. If ERROR_CALLBACK is NULL, ++ this does not report an error, it just returns NULL. */ ++ ++extern void *backtrace_alloc (struct backtrace_state *state, size_t size, ++ backtrace_error_callback error_callback, ++ void *data) ATTRIBUTE_MALLOC; ++ ++/* Free memory allocated by backtrace_alloc. If ERROR_CALLBACK is ++ NULL, this does not report an error. */ ++ ++extern void backtrace_free (struct backtrace_state *state, void *mem, ++ size_t size, ++ backtrace_error_callback error_callback, ++ void *data); ++ ++/* A growable vector of some struct. This is used for more efficient ++ allocation when we don't know the final size of some group of data ++ that we want to represent as an array. */ ++ ++struct backtrace_vector ++{ ++ /* The base of the vector. */ ++ void *base; ++ /* The number of bytes in the vector. */ ++ size_t size; ++ /* The number of bytes available at the current allocation. */ ++ size_t alc; ++}; ++ ++/* Grow VEC by SIZE bytes. Return a pointer to the newly allocated ++ bytes. Note that this may move the entire vector to a new memory ++ location. Returns NULL on failure. */ ++ ++extern void *backtrace_vector_grow (struct backtrace_state *state, size_t size, ++ backtrace_error_callback error_callback, ++ void *data, ++ struct backtrace_vector *vec); ++ ++/* Finish the current allocation on VEC. Prepare to start a new ++ allocation. The finished allocation will never be freed. Returns ++ a pointer to the base of the finished entries, or NULL on ++ failure. */ ++ ++extern void* backtrace_vector_finish (struct backtrace_state *state, ++ struct backtrace_vector *vec, ++ backtrace_error_callback error_callback, ++ void *data); ++ ++/* Release any extra space allocated for VEC. This may change ++ VEC->base. Returns 1 on success, 0 on failure. */ ++ ++extern int backtrace_vector_release (struct backtrace_state *state, ++ struct backtrace_vector *vec, ++ backtrace_error_callback error_callback, ++ void *data); ++ ++/* Read initial debug data from a descriptor, and set the ++ fileline_data, syminfo_fn, and syminfo_data fields of STATE. ++ Return the fileln_fn field in *FILELN_FN--this is done this way so ++ that the synchronization code is only implemented once. This is ++ called after the descriptor has first been opened. It will close ++ the descriptor if it is no longer needed. Returns 1 on success, 0 ++ on error. There will be multiple implementations of this function, ++ for different file formats. Each system will compile the ++ appropriate one. */ ++ ++extern int backtrace_initialize (struct backtrace_state *state, ++ int descriptor, ++ backtrace_error_callback error_callback, ++ void *data, ++ fileline *fileline_fn); ++ ++/* Add file/line information for a DWARF module. */ ++ ++extern int backtrace_dwarf_add (struct backtrace_state *state, ++ uintptr_t base_address, ++ const unsigned char* dwarf_info, ++ size_t dwarf_info_size, ++ const unsigned char *dwarf_line, ++ size_t dwarf_line_size, ++ const unsigned char *dwarf_abbrev, ++ size_t dwarf_abbrev_size, ++ const unsigned char *dwarf_ranges, ++ size_t dwarf_range_size, ++ const unsigned char *dwarf_str, ++ size_t dwarf_str_size, ++ int is_bigendian, ++ backtrace_error_callback error_callback, ++ void *data, fileline *fileline_fn); ++ ++#endif diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/ltmain.sh index 000000000,000000000..9503ec85d new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/ltmain.sh @@@ -1,0 -1,0 +1,8636 @@@ ++# Generated from ltmain.m4sh. ++ ++# libtool (GNU libtool 1.3134 2009-11-29) 2.2.7a ++# Written by Gordon Matzigkeit , 1996 ++ ++# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, ++# 2007, 2008, 2009 Free Software Foundation, Inc. ++# This is free software; see the source for copying conditions. There is NO ++# warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. ++ ++# GNU Libtool is free software; you can redistribute it and/or modify ++# it under the terms of the GNU General Public License as published by ++# the Free Software Foundation; either version 2 of the License, or ++# (at your option) any later version. ++# ++# As a special exception to the GNU General Public License, ++# if you distribute this file as part of a program or library that ++# is built using GNU Libtool, you may include this file under the ++# same distribution terms that you use for the rest of that program. ++# ++# GNU Libtool is distributed in the hope that it will be useful, but ++# WITHOUT ANY WARRANTY; without even the implied warranty of ++# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++# General Public License for more details. ++# ++# You should have received a copy of the GNU General Public License ++# along with GNU Libtool; see the file COPYING. If not, a copy ++# can be downloaded from http://www.gnu.org/licenses/gpl.html, ++# or obtained by writing to the Free Software Foundation, Inc., ++# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. ++ ++# Usage: $progname [OPTION]... [MODE-ARG]... ++# ++# Provide generalized library-building support services. ++# ++# --config show all configuration variables ++# --debug enable verbose shell tracing ++# -n, --dry-run display commands without modifying any files ++# --features display basic configuration information and exit ++# --mode=MODE use operation mode MODE ++# --no-finish let install mode avoid finish commands ++# --preserve-dup-deps don't remove duplicate dependency libraries ++# --quiet, --silent don't print informational messages ++# --no-quiet, --no-silent ++# print informational messages (default) ++# --tag=TAG use configuration variables from tag TAG ++# -v, --verbose print more informational messages than default ++# --no-verbose don't print the extra informational messages ++# --version print version information ++# -h, --help, --help-all print short, long, or detailed help message ++# ++# MODE must be one of the following: ++# ++# clean remove files from the build directory ++# compile compile a source file into a libtool object ++# execute automatically set library path, then run a program ++# finish complete the installation of libtool libraries ++# install install libraries or executables ++# link create a library or an executable ++# uninstall remove libraries from an installed directory ++# ++# MODE-ARGS vary depending on the MODE. When passed as first option, ++# `--mode=MODE' may be abbreviated as `MODE' or a unique abbreviation of that. ++# Try `$progname --help --mode=MODE' for a more detailed description of MODE. ++# ++# When reporting a bug, please describe a test case to reproduce it and ++# include the following information: ++# ++# host-triplet: $host ++# shell: $SHELL ++# compiler: $LTCC ++# compiler flags: $LTCFLAGS ++# linker: $LD (gnu? $with_gnu_ld) ++# $progname: (GNU libtool 1.3134 2009-11-29) 2.2.7a ++# automake: $automake_version ++# autoconf: $autoconf_version ++# ++# Report bugs to . ++ ++PROGRAM=libtool ++PACKAGE=libtool ++VERSION=2.2.7a ++TIMESTAMP=" 1.3134 2009-11-29" ++package_revision=1.3134 ++ ++# Be Bourne compatible ++if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then ++ emulate sh ++ NULLCMD=: ++ # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which ++ # is contrary to our usage. Disable this feature. ++ alias -g '${1+"$@"}'='"$@"' ++ setopt NO_GLOB_SUBST ++else ++ case `(set -o) 2>/dev/null` in *posix*) set -o posix;; esac ++fi ++BIN_SH=xpg4; export BIN_SH # for Tru64 ++DUALCASE=1; export DUALCASE # for MKS sh ++ ++# A function that is used when there is no print builtin or printf. ++func_fallback_echo () ++{ ++ eval 'cat <<_LTECHO_EOF ++$1 ++_LTECHO_EOF' ++} ++ ++# NLS nuisances: We save the old values to restore during execute mode. ++# Only set LANG and LC_ALL to C if already set. ++# These must not be set unconditionally because not all systems understand ++# e.g. LANG=C (notably SCO). ++lt_user_locale= ++lt_safe_locale= ++for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES ++do ++ eval "if test \"\${$lt_var+set}\" = set; then ++ save_$lt_var=\$$lt_var ++ $lt_var=C ++ export $lt_var ++ lt_user_locale=\"$lt_var=\\\$save_\$lt_var; \$lt_user_locale\" ++ lt_safe_locale=\"$lt_var=C; \$lt_safe_locale\" ++ fi" ++done ++ ++$lt_unset CDPATH ++ ++ ++ ++ ++ ++ ++ ++# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh ++# is ksh but when the shell is invoked as "sh" and the current value of ++# the _XPG environment variable is not equal to 1 (one), the special ++# positional parameter $0, within a function call, is the name of the ++# function. ++progpath="$0" ++ ++ ++ ++: ${CP="cp -f"} ++: ${ECHO=$as_echo} ++: ${EGREP="/bin/grep -E"} ++: ${FGREP="/bin/grep -F"} ++: ${GREP="/bin/grep"} ++: ${LN_S="ln -s"} ++: ${MAKE="make"} ++: ${MKDIR="mkdir"} ++: ${MV="mv -f"} ++: ${RM="rm -f"} ++: ${SED="/mount/endor/wildenhu/local-x86_64/bin/sed"} ++: ${SHELL="${CONFIG_SHELL-/bin/sh}"} ++: ${Xsed="$SED -e 1s/^X//"} ++ ++# Global variables: ++EXIT_SUCCESS=0 ++EXIT_FAILURE=1 ++EXIT_MISMATCH=63 # $? = 63 is used to indicate version mismatch to missing. ++EXIT_SKIP=77 # $? = 77 is used to indicate a skipped test to automake. ++ ++exit_status=$EXIT_SUCCESS ++ ++# Make sure IFS has a sensible default ++lt_nl=' ++' ++IFS=" $lt_nl" ++ ++dirname="s,/[^/]*$,," ++basename="s,^.*/,," ++ ++# func_dirname_and_basename file append nondir_replacement ++# perform func_basename and func_dirname in a single function ++# call: ++# dirname: Compute the dirname of FILE. If nonempty, ++# add APPEND to the result, otherwise set result ++# to NONDIR_REPLACEMENT. ++# value returned in "$func_dirname_result" ++# basename: Compute filename of FILE. ++# value retuned in "$func_basename_result" ++# Implementation must be kept synchronized with func_dirname ++# and func_basename. For efficiency, we do not delegate to ++# those functions but instead duplicate the functionality here. ++func_dirname_and_basename () ++{ ++ # Extract subdirectory from the argument. ++ func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"` ++ if test "X$func_dirname_result" = "X${1}"; then ++ func_dirname_result="${3}" ++ else ++ func_dirname_result="$func_dirname_result${2}" ++ fi ++ func_basename_result=`$ECHO "${1}" | $SED -e "$basename"` ++} ++ ++# Generated shell functions inserted here. ++ ++# These SED scripts presuppose an absolute path with a trailing slash. ++pathcar='s,^/\([^/]*\).*$,\1,' ++pathcdr='s,^/[^/]*,,' ++removedotparts=':dotsl ++ s@/\./@/@g ++ t dotsl ++ s,/\.$,/,' ++collapseslashes='s@/\{1,\}@/@g' ++finalslash='s,/*$,/,' ++ ++# func_normal_abspath PATH ++# Remove doubled-up and trailing slashes, "." path components, ++# and cancel out any ".." path components in PATH after making ++# it an absolute path. ++# value returned in "$func_normal_abspath_result" ++func_normal_abspath () ++{ ++ # Start from root dir and reassemble the path. ++ func_normal_abspath_result= ++ func_normal_abspath_tpath=$1 ++ func_normal_abspath_altnamespace= ++ case $func_normal_abspath_tpath in ++ "") ++ # Empty path, that just means $cwd. ++ func_stripname '' '/' "`pwd`" ++ func_normal_abspath_result=$func_stripname_result ++ return ++ ;; ++ # The next three entries are used to spot a run of precisely ++ # two leading slashes without using negated character classes; ++ # we take advantage of case's first-match behaviour. ++ ///*) ++ # Unusual form of absolute path, do nothing. ++ ;; ++ //*) ++ # Not necessarily an ordinary path; POSIX reserves leading '//' ++ # and for example Cygwin uses it to access remote file shares ++ # over CIFS/SMB, so we conserve a leading double slash if found. ++ func_normal_abspath_altnamespace=/ ++ ;; ++ /*) ++ # Absolute path, do nothing. ++ ;; ++ *) ++ # Relative path, prepend $cwd. ++ func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath ++ ;; ++ esac ++ # Cancel out all the simple stuff to save iterations. We also want ++ # the path to end with a slash for ease of parsing, so make sure ++ # there is one (and only one) here. ++ func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \ ++ -e "$removedotparts" -e "$collapseslashes" -e "$finalslash"` ++ while :; do ++ # Processed it all yet? ++ if test "$func_normal_abspath_tpath" = / ; then ++ # If we ascended to the root using ".." the result may be empty now. ++ if test -z "$func_normal_abspath_result" ; then ++ func_normal_abspath_result=/ ++ fi ++ break ++ fi ++ func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \ ++ -e "$pathcar"` ++ func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \ ++ -e "$pathcdr"` ++ # Figure out what to do with it ++ case $func_normal_abspath_tcomponent in ++ "") ++ # Trailing empty path component, ignore it. ++ ;; ++ ..) ++ # Parent dir; strip last assembled component from result. ++ func_dirname "$func_normal_abspath_result" ++ func_normal_abspath_result=$func_dirname_result ++ ;; ++ *) ++ # Actual path component, append it. ++ func_normal_abspath_result=$func_normal_abspath_result/$func_normal_abspath_tcomponent ++ ;; ++ esac ++ done ++ # Restore leading double-slash if one was found on entry. ++ func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result ++} ++ ++# func_relative_path SRCDIR DSTDIR ++# generates a relative path from SRCDIR to DSTDIR, with a trailing ++# slash if non-empty, suitable for immediately appending a filename ++# without needing to append a separator. ++# value returned in "$func_relative_path_result" ++func_relative_path () ++{ ++ func_relative_path_result= ++ func_normal_abspath "$1" ++ func_relative_path_tlibdir=$func_normal_abspath_result ++ func_normal_abspath "$2" ++ func_relative_path_tbindir=$func_normal_abspath_result ++ ++ # Ascend the tree starting from libdir ++ while :; do ++ # check if we have found a prefix of bindir ++ case $func_relative_path_tbindir in ++ $func_relative_path_tlibdir) ++ # found an exact match ++ func_relative_path_tcancelled= ++ break ++ ;; ++ $func_relative_path_tlibdir*) ++ # found a matching prefix ++ func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir" ++ func_relative_path_tcancelled=$func_stripname_result ++ if test -z "$func_relative_path_result"; then ++ func_relative_path_result=. ++ fi ++ break ++ ;; ++ *) ++ func_dirname $func_relative_path_tlibdir ++ func_relative_path_tlibdir=${func_dirname_result} ++ if test "x$func_relative_path_tlibdir" = x ; then ++ # Have to descend all the way to the root! ++ func_relative_path_result=../$func_relative_path_result ++ func_relative_path_tcancelled=$func_relative_path_tbindir ++ break ++ fi ++ func_relative_path_result=../$func_relative_path_result ++ ;; ++ esac ++ done ++ ++ # Now calculate path; take care to avoid doubling-up slashes. ++ func_stripname '' '/' "$func_relative_path_result" ++ func_relative_path_result=$func_stripname_result ++ func_stripname '/' '/' "$func_relative_path_tcancelled" ++ if test "x$func_stripname_result" != x ; then ++ func_relative_path_result=${func_relative_path_result}/${func_stripname_result} ++ fi ++ ++ # Normalisation. If bindir is libdir, return empty string, ++ # else relative path ending with a slash; either way, target ++ # file name can be directly appended. ++ if test ! -z "$func_relative_path_result"; then ++ func_stripname './' '' "$func_relative_path_result/" ++ func_relative_path_result=$func_stripname_result ++ fi ++} ++ ++# The name of this program: ++func_dirname_and_basename "$progpath" ++progname=$func_basename_result ++ ++# Make sure we have an absolute path for reexecution: ++case $progpath in ++ [\\/]*|[A-Za-z]:\\*) ;; ++ *[\\/]*) ++ progdir=$func_dirname_result ++ progdir=`cd "$progdir" && pwd` ++ progpath="$progdir/$progname" ++ ;; ++ *) ++ save_IFS="$IFS" ++ IFS=: ++ for progdir in $PATH; do ++ IFS="$save_IFS" ++ test -x "$progdir/$progname" && break ++ done ++ IFS="$save_IFS" ++ test -n "$progdir" || progdir=`pwd` ++ progpath="$progdir/$progname" ++ ;; ++esac ++ ++# Sed substitution that helps us do robust quoting. It backslashifies ++# metacharacters that are still active within double-quoted strings. ++Xsed="${SED}"' -e 1s/^X//' ++sed_quote_subst='s/\([`"$\\]\)/\\\1/g' ++ ++# Same as above, but do not quote variable references. ++double_quote_subst='s/\(["`\\]\)/\\\1/g' ++ ++# Re-`\' parameter expansions in output of double_quote_subst that were ++# `\'-ed in input to the same. If an odd number of `\' preceded a '$' ++# in input to double_quote_subst, that '$' was protected from expansion. ++# Since each input `\' is now two `\'s, look for any number of runs of ++# four `\'s followed by two `\'s and then a '$'. `\' that '$'. ++bs='\\' ++bs2='\\\\' ++bs4='\\\\\\\\' ++dollar='\$' ++sed_double_backslash="\ ++ s/$bs4/&\\ ++/g ++ s/^$bs2$dollar/$bs&/ ++ s/\\([^$bs]\\)$bs2$dollar/\\1$bs2$bs$dollar/g ++ s/\n//g" ++ ++# Standard options: ++opt_dry_run=false ++opt_help=false ++opt_quiet=false ++opt_verbose=false ++opt_warning=: ++ ++# func_echo arg... ++# Echo program name prefixed message, along with the current mode ++# name if it has been set yet. ++func_echo () ++{ ++ $ECHO "$progname${mode+: }$mode: $*" ++} ++ ++# func_verbose arg... ++# Echo program name prefixed message in verbose mode only. ++func_verbose () ++{ ++ $opt_verbose && func_echo ${1+"$@"} ++ ++ # A bug in bash halts the script if the last line of a function ++ # fails when set -e is in force, so we need another command to ++ # work around that: ++ : ++} ++ ++# func_echo_all arg... ++# Invoke $ECHO with all args, space-separated. ++func_echo_all () ++{ ++ $ECHO "$*" ++} ++ ++# func_error arg... ++# Echo program name prefixed message to standard error. ++func_error () ++{ ++ $ECHO "$progname${mode+: }$mode: "${1+"$@"} 1>&2 ++} ++ ++# func_warning arg... ++# Echo program name prefixed warning message to standard error. ++func_warning () ++{ ++ $opt_warning && $ECHO "$progname${mode+: }$mode: warning: "${1+"$@"} 1>&2 ++ ++ # bash bug again: ++ : ++} ++ ++# func_fatal_error arg... ++# Echo program name prefixed message to standard error, and exit. ++func_fatal_error () ++{ ++ func_error ${1+"$@"} ++ exit $EXIT_FAILURE ++} ++ ++# func_fatal_help arg... ++# Echo program name prefixed message to standard error, followed by ++# a help hint, and exit. ++func_fatal_help () ++{ ++ func_error ${1+"$@"} ++ func_fatal_error "$help" ++} ++help="Try \`$progname --help' for more information." ## default ++ ++ ++# func_grep expression filename ++# Check whether EXPRESSION matches any line of FILENAME, without output. ++func_grep () ++{ ++ $GREP "$1" "$2" >/dev/null 2>&1 ++} ++ ++ ++# func_mkdir_p directory-path ++# Make sure the entire path to DIRECTORY-PATH is available. ++func_mkdir_p () ++{ ++ my_directory_path="$1" ++ my_dir_list= ++ ++ if test -n "$my_directory_path" && test "$opt_dry_run" != ":"; then ++ ++ # Protect directory names starting with `-' ++ case $my_directory_path in ++ -*) my_directory_path="./$my_directory_path" ;; ++ esac ++ ++ # While some portion of DIR does not yet exist... ++ while test ! -d "$my_directory_path"; do ++ # ...make a list in topmost first order. Use a colon delimited ++ # list incase some portion of path contains whitespace. ++ my_dir_list="$my_directory_path:$my_dir_list" ++ ++ # If the last portion added has no slash in it, the list is done ++ case $my_directory_path in */*) ;; *) break ;; esac ++ ++ # ...otherwise throw away the child directory and loop ++ my_directory_path=`$ECHO "$my_directory_path" | $SED -e "$dirname"` ++ done ++ my_dir_list=`$ECHO "$my_dir_list" | $SED 's,:*$,,'` ++ ++ save_mkdir_p_IFS="$IFS"; IFS=':' ++ for my_dir in $my_dir_list; do ++ IFS="$save_mkdir_p_IFS" ++ # mkdir can fail with a `File exist' error if two processes ++ # try to create one of the directories concurrently. Don't ++ # stop in that case! ++ $MKDIR "$my_dir" 2>/dev/null || : ++ done ++ IFS="$save_mkdir_p_IFS" ++ ++ # Bail out if we (or some other process) failed to create a directory. ++ test -d "$my_directory_path" || \ ++ func_fatal_error "Failed to create \`$1'" ++ fi ++} ++ ++ ++# func_mktempdir [string] ++# Make a temporary directory that won't clash with other running ++# libtool processes, and avoids race conditions if possible. If ++# given, STRING is the basename for that directory. ++func_mktempdir () ++{ ++ my_template="${TMPDIR-/tmp}/${1-$progname}" ++ ++ if test "$opt_dry_run" = ":"; then ++ # Return a directory name, but don't create it in dry-run mode ++ my_tmpdir="${my_template}-$$" ++ else ++ ++ # If mktemp works, use that first and foremost ++ my_tmpdir=`mktemp -d "${my_template}-XXXXXXXX" 2>/dev/null` ++ ++ if test ! -d "$my_tmpdir"; then ++ # Failing that, at least try and use $RANDOM to avoid a race ++ my_tmpdir="${my_template}-${RANDOM-0}$$" ++ ++ save_mktempdir_umask=`umask` ++ umask 0077 ++ $MKDIR "$my_tmpdir" ++ umask $save_mktempdir_umask ++ fi ++ ++ # If we're not in dry-run mode, bomb out on failure ++ test -d "$my_tmpdir" || \ ++ func_fatal_error "cannot create temporary directory \`$my_tmpdir'" ++ fi ++ ++ $ECHO "$my_tmpdir" ++} ++ ++ ++# func_quote_for_eval arg ++# Aesthetically quote ARG to be evaled later. ++# This function returns two values: FUNC_QUOTE_FOR_EVAL_RESULT ++# is double-quoted, suitable for a subsequent eval, whereas ++# FUNC_QUOTE_FOR_EVAL_UNQUOTED_RESULT has merely all characters ++# which are still active within double quotes backslashified. ++func_quote_for_eval () ++{ ++ case $1 in ++ *[\\\`\"\$]*) ++ func_quote_for_eval_unquoted_result=`$ECHO "$1" | $SED "$sed_quote_subst"` ;; ++ *) ++ func_quote_for_eval_unquoted_result="$1" ;; ++ esac ++ ++ case $func_quote_for_eval_unquoted_result in ++ # Double-quote args containing shell metacharacters to delay ++ # word splitting, command substitution and and variable ++ # expansion for a subsequent eval. ++ # Many Bourne shells cannot handle close brackets correctly ++ # in scan sets, so we specify it separately. ++ *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") ++ func_quote_for_eval_result="\"$func_quote_for_eval_unquoted_result\"" ++ ;; ++ *) ++ func_quote_for_eval_result="$func_quote_for_eval_unquoted_result" ++ esac ++} ++ ++ ++# func_quote_for_expand arg ++# Aesthetically quote ARG to be evaled later; same as above, ++# but do not quote variable references. ++func_quote_for_expand () ++{ ++ case $1 in ++ *[\\\`\"]*) ++ my_arg=`$ECHO "$1" | $SED \ ++ -e "$double_quote_subst" -e "$sed_double_backslash"` ;; ++ *) ++ my_arg="$1" ;; ++ esac ++ ++ case $my_arg in ++ # Double-quote args containing shell metacharacters to delay ++ # word splitting and command substitution for a subsequent eval. ++ # Many Bourne shells cannot handle close brackets correctly ++ # in scan sets, so we specify it separately. ++ *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") ++ my_arg="\"$my_arg\"" ++ ;; ++ esac ++ ++ func_quote_for_expand_result="$my_arg" ++} ++ ++ ++# func_show_eval cmd [fail_exp] ++# Unless opt_silent is true, then output CMD. Then, if opt_dryrun is ++# not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP ++# is given, then evaluate it. ++func_show_eval () ++{ ++ my_cmd="$1" ++ my_fail_exp="${2-:}" ++ ++ ${opt_silent-false} || { ++ func_quote_for_expand "$my_cmd" ++ eval "func_echo $func_quote_for_expand_result" ++ } ++ ++ if ${opt_dry_run-false}; then :; else ++ eval "$my_cmd" ++ my_status=$? ++ if test "$my_status" -eq 0; then :; else ++ eval "(exit $my_status); $my_fail_exp" ++ fi ++ fi ++} ++ ++ ++# func_show_eval_locale cmd [fail_exp] ++# Unless opt_silent is true, then output CMD. Then, if opt_dryrun is ++# not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP ++# is given, then evaluate it. Use the saved locale for evaluation. ++func_show_eval_locale () ++{ ++ my_cmd="$1" ++ my_fail_exp="${2-:}" ++ ++ ${opt_silent-false} || { ++ func_quote_for_expand "$my_cmd" ++ eval "func_echo $func_quote_for_expand_result" ++ } ++ ++ if ${opt_dry_run-false}; then :; else ++ eval "$lt_user_locale ++ $my_cmd" ++ my_status=$? ++ eval "$lt_safe_locale" ++ if test "$my_status" -eq 0; then :; else ++ eval "(exit $my_status); $my_fail_exp" ++ fi ++ fi ++} ++ ++ ++ ++ ++ ++# func_version ++# Echo version message to standard output and exit. ++func_version () ++{ ++ $SED -n '/(C)/!b go ++ :more ++ /\./!{ ++ N ++ s/\n# // ++ b more ++ } ++ :go ++ /^# '$PROGRAM' (GNU /,/# warranty; / { ++ s/^# // ++ s/^# *$// ++ s/\((C)\)[ 0-9,-]*\( [1-9][0-9]*\)/\1\2/ ++ p ++ }' < "$progpath" ++ exit $? ++} ++ ++# func_usage ++# Echo short help message to standard output and exit. ++func_usage () ++{ ++ $SED -n '/^# Usage:/,/^# *-h/ { ++ s/^# // ++ s/^# *$// ++ s/\$progname/'$progname'/ ++ p ++ }' < "$progpath" ++ echo ++ $ECHO "run \`$progname --help | more' for full usage" ++ exit $? ++} ++ ++# func_help [NOEXIT] ++# Echo long help message to standard output and exit, ++# unless 'noexit' is passed as argument. ++func_help () ++{ ++ $SED -n '/^# Usage:/,/# Report bugs to/ { ++ s/^# // ++ s/^# *$// ++ s*\$progname*'$progname'* ++ s*\$host*'"$host"'* ++ s*\$SHELL*'"$SHELL"'* ++ s*\$LTCC*'"$LTCC"'* ++ s*\$LTCFLAGS*'"$LTCFLAGS"'* ++ s*\$LD*'"$LD"'* ++ s/\$with_gnu_ld/'"$with_gnu_ld"'/ ++ s/\$automake_version/'"`(automake --version) 2>/dev/null |$SED 1q`"'/ ++ s/\$autoconf_version/'"`(autoconf --version) 2>/dev/null |$SED 1q`"'/ ++ p ++ }' < "$progpath" ++ ret=$? ++ if test -z "$1"; then ++ exit $ret ++ fi ++} ++ ++# func_missing_arg argname ++# Echo program name prefixed message to standard error and set global ++# exit_cmd. ++func_missing_arg () ++{ ++ func_error "missing argument for $1" ++ exit_cmd=exit ++} ++ ++exit_cmd=: ++ ++ ++ ++ ++ ++ ++magic="%%%MAGIC variable%%%" ++magic_exe="%%%MAGIC EXE variable%%%" ++ ++# Global variables. ++# $mode is unset ++nonopt= ++execute_dlfiles= ++preserve_args= ++lo2o="s/\\.lo\$/.${objext}/" ++o2lo="s/\\.${objext}\$/.lo/" ++extracted_archives= ++extracted_serial=0 ++ ++opt_dry_run=false ++opt_finish=: ++opt_duplicate_deps=false ++opt_silent=false ++opt_debug=: ++ ++# If this variable is set in any of the actions, the command in it ++# will be execed at the end. This prevents here-documents from being ++# left over by shells. ++exec_cmd= ++ ++# func_fatal_configuration arg... ++# Echo program name prefixed message to standard error, followed by ++# a configuration failure hint, and exit. ++func_fatal_configuration () ++{ ++ func_error ${1+"$@"} ++ func_error "See the $PACKAGE documentation for more information." ++ func_fatal_error "Fatal configuration error." ++} ++ ++ ++# func_config ++# Display the configuration for all the tags in this script. ++func_config () ++{ ++ re_begincf='^# ### BEGIN LIBTOOL' ++ re_endcf='^# ### END LIBTOOL' ++ ++ # Default configuration. ++ $SED "1,/$re_begincf CONFIG/d;/$re_endcf CONFIG/,\$d" < "$progpath" ++ ++ # Now print the configurations for the tags. ++ for tagname in $taglist; do ++ $SED -n "/$re_begincf TAG CONFIG: $tagname\$/,/$re_endcf TAG CONFIG: $tagname\$/p" < "$progpath" ++ done ++ ++ exit $? ++} ++ ++# func_features ++# Display the features supported by this script. ++func_features () ++{ ++ echo "host: $host" ++ if test "$build_libtool_libs" = yes; then ++ echo "enable shared libraries" ++ else ++ echo "disable shared libraries" ++ fi ++ if test "$build_old_libs" = yes; then ++ echo "enable static libraries" ++ else ++ echo "disable static libraries" ++ fi ++ ++ exit $? ++} ++ ++# func_enable_tag tagname ++# Verify that TAGNAME is valid, and either flag an error and exit, or ++# enable the TAGNAME tag. We also add TAGNAME to the global $taglist ++# variable here. ++func_enable_tag () ++{ ++ # Global variable: ++ tagname="$1" ++ ++ re_begincf="^# ### BEGIN LIBTOOL TAG CONFIG: $tagname\$" ++ re_endcf="^# ### END LIBTOOL TAG CONFIG: $tagname\$" ++ sed_extractcf="/$re_begincf/,/$re_endcf/p" ++ ++ # Validate tagname. ++ case $tagname in ++ *[!-_A-Za-z0-9,/]*) ++ func_fatal_error "invalid tag name: $tagname" ++ ;; ++ esac ++ ++ # Don't test for the "default" C tag, as we know it's ++ # there but not specially marked. ++ case $tagname in ++ CC) ;; ++ *) ++ if $GREP "$re_begincf" "$progpath" >/dev/null 2>&1; then ++ taglist="$taglist $tagname" ++ ++ # Evaluate the configuration. Be careful to quote the path ++ # and the sed script, to avoid splitting on whitespace, but ++ # also don't use non-portable quotes within backquotes within ++ # quotes we have to do it in 2 steps: ++ extractedcf=`$SED -n -e "$sed_extractcf" < "$progpath"` ++ eval "$extractedcf" ++ else ++ func_error "ignoring unknown tag $tagname" ++ fi ++ ;; ++ esac ++} ++ ++# Parse options once, thoroughly. This comes as soon as possible in ++# the script to make things like `libtool --version' happen quickly. ++{ ++ ++ # Shorthand for --mode=foo, only valid as the first argument ++ case $1 in ++ clean|clea|cle|cl) ++ shift; set dummy --mode clean ${1+"$@"}; shift ++ ;; ++ compile|compil|compi|comp|com|co|c) ++ shift; set dummy --mode compile ${1+"$@"}; shift ++ ;; ++ execute|execut|execu|exec|exe|ex|e) ++ shift; set dummy --mode execute ${1+"$@"}; shift ++ ;; ++ finish|finis|fini|fin|fi|f) ++ shift; set dummy --mode finish ${1+"$@"}; shift ++ ;; ++ install|instal|insta|inst|ins|in|i) ++ shift; set dummy --mode install ${1+"$@"}; shift ++ ;; ++ link|lin|li|l) ++ shift; set dummy --mode link ${1+"$@"}; shift ++ ;; ++ uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u) ++ shift; set dummy --mode uninstall ${1+"$@"}; shift ++ ;; ++ esac ++ ++ # Parse non-mode specific arguments: ++ while test "$#" -gt 0; do ++ opt="$1" ++ shift ++ ++ case $opt in ++ --config) func_config ;; ++ ++ --debug) preserve_args="$preserve_args $opt" ++ func_echo "enabling shell trace mode" ++ opt_debug='set -x' ++ $opt_debug ++ ;; ++ ++ -dlopen) test "$#" -eq 0 && func_missing_arg "$opt" && break ++ execute_dlfiles="$execute_dlfiles $1" ++ shift ++ ;; ++ ++ --dry-run | -n) opt_dry_run=: ;; ++ --features) func_features ;; ++ --finish) mode="finish" ;; ++ --no-finish) opt_finish=false ;; ++ ++ --mode) test "$#" -eq 0 && func_missing_arg "$opt" && break ++ case $1 in ++ # Valid mode arguments: ++ clean) ;; ++ compile) ;; ++ execute) ;; ++ finish) ;; ++ install) ;; ++ link) ;; ++ relink) ;; ++ uninstall) ;; ++ ++ # Catch anything else as an error ++ *) func_error "invalid argument for $opt" ++ exit_cmd=exit ++ break ++ ;; ++ esac ++ ++ mode="$1" ++ shift ++ ;; ++ ++ --preserve-dup-deps) ++ opt_duplicate_deps=: ;; ++ ++ --quiet|--silent) preserve_args="$preserve_args $opt" ++ opt_silent=: ++ opt_verbose=false ++ ;; ++ ++ --no-quiet|--no-silent) ++ preserve_args="$preserve_args $opt" ++ opt_silent=false ++ ;; ++ ++ --verbose| -v) preserve_args="$preserve_args $opt" ++ opt_silent=false ++ opt_verbose=: ++ ;; ++ ++ --no-verbose) preserve_args="$preserve_args $opt" ++ opt_verbose=false ++ ;; ++ ++ --tag) test "$#" -eq 0 && func_missing_arg "$opt" && break ++ preserve_args="$preserve_args $opt $1" ++ func_enable_tag "$1" # tagname is set here ++ shift ++ ;; ++ ++ # Separate optargs to long options: ++ -dlopen=*|--mode=*|--tag=*) ++ func_opt_split "$opt" ++ set dummy "$func_opt_split_opt" "$func_opt_split_arg" ${1+"$@"} ++ shift ++ ;; ++ ++ -\?|-h) func_usage ;; ++ --help) opt_help=: ;; ++ --help-all) opt_help=': help-all' ;; ++ --version) func_version ;; ++ ++ -*) func_fatal_help "unrecognized option \`$opt'" ;; ++ ++ *) nonopt="$opt" ++ break ++ ;; ++ esac ++ done ++ ++ ++ case $host in ++ *cygwin* | *mingw* | *pw32* | *cegcc* | *solaris2* ) ++ # don't eliminate duplications in $postdeps and $predeps ++ opt_duplicate_compiler_generated_deps=: ++ ;; ++ *) ++ opt_duplicate_compiler_generated_deps=$opt_duplicate_deps ++ ;; ++ esac ++ ++ # Having warned about all mis-specified options, bail out if ++ # anything was wrong. ++ $exit_cmd $EXIT_FAILURE ++} ++ ++# func_check_version_match ++# Ensure that we are using m4 macros, and libtool script from the same ++# release of libtool. ++func_check_version_match () ++{ ++ if test "$package_revision" != "$macro_revision"; then ++ if test "$VERSION" != "$macro_version"; then ++ if test -z "$macro_version"; then ++ cat >&2 <<_LT_EOF ++$progname: Version mismatch error. This is $PACKAGE $VERSION, but the ++$progname: definition of this LT_INIT comes from an older release. ++$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION ++$progname: and run autoconf again. ++_LT_EOF ++ else ++ cat >&2 <<_LT_EOF ++$progname: Version mismatch error. This is $PACKAGE $VERSION, but the ++$progname: definition of this LT_INIT comes from $PACKAGE $macro_version. ++$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION ++$progname: and run autoconf again. ++_LT_EOF ++ fi ++ else ++ cat >&2 <<_LT_EOF ++$progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision, ++$progname: but the definition of this LT_INIT comes from revision $macro_revision. ++$progname: You should recreate aclocal.m4 with macros from revision $package_revision ++$progname: of $PACKAGE $VERSION and run autoconf again. ++_LT_EOF ++ fi ++ ++ exit $EXIT_MISMATCH ++ fi ++} ++ ++ ++## ----------- ## ++## Main. ## ++## ----------- ## ++ ++$opt_help || { ++ # Sanity checks first: ++ func_check_version_match ++ ++ if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then ++ func_fatal_configuration "not configured to build any kind of library" ++ fi ++ ++ test -z "$mode" && func_fatal_error "error: you must specify a MODE." ++ ++ ++ # Darwin sucks ++ eval "std_shrext=\"$shrext_cmds\"" ++ ++ ++ # Only execute mode is allowed to have -dlopen flags. ++ if test -n "$execute_dlfiles" && test "$mode" != execute; then ++ func_error "unrecognized option \`-dlopen'" ++ $ECHO "$help" 1>&2 ++ exit $EXIT_FAILURE ++ fi ++ ++ # Change the help message to a mode-specific one. ++ generic_help="$help" ++ help="Try \`$progname --help --mode=$mode' for more information." ++} ++ ++ ++# func_lalib_p file ++# True iff FILE is a libtool `.la' library or `.lo' object file. ++# This function is only a basic sanity check; it will hardly flush out ++# determined imposters. ++func_lalib_p () ++{ ++ test -f "$1" && ++ $SED -e 4q "$1" 2>/dev/null \ ++ | $GREP "^# Generated by .*$PACKAGE" > /dev/null 2>&1 ++} ++ ++# func_lalib_unsafe_p file ++# True iff FILE is a libtool `.la' library or `.lo' object file. ++# This function implements the same check as func_lalib_p without ++# resorting to external programs. To this end, it redirects stdin and ++# closes it afterwards, without saving the original file descriptor. ++# As a safety measure, use it only where a negative result would be ++# fatal anyway. Works if `file' does not exist. ++func_lalib_unsafe_p () ++{ ++ lalib_p=no ++ if test -f "$1" && test -r "$1" && exec 5<&0 <"$1"; then ++ for lalib_p_l in 1 2 3 4 ++ do ++ read lalib_p_line ++ case "$lalib_p_line" in ++ \#\ Generated\ by\ *$PACKAGE* ) lalib_p=yes; break;; ++ esac ++ done ++ exec 0<&5 5<&- ++ fi ++ test "$lalib_p" = yes ++} ++ ++# func_ltwrapper_script_p file ++# True iff FILE is a libtool wrapper script ++# This function is only a basic sanity check; it will hardly flush out ++# determined imposters. ++func_ltwrapper_script_p () ++{ ++ func_lalib_p "$1" ++} ++ ++# func_ltwrapper_executable_p file ++# True iff FILE is a libtool wrapper executable ++# This function is only a basic sanity check; it will hardly flush out ++# determined imposters. ++func_ltwrapper_executable_p () ++{ ++ func_ltwrapper_exec_suffix= ++ case $1 in ++ *.exe) ;; ++ *) func_ltwrapper_exec_suffix=.exe ;; ++ esac ++ $GREP "$magic_exe" "$1$func_ltwrapper_exec_suffix" >/dev/null 2>&1 ++} ++ ++# func_ltwrapper_scriptname file ++# Assumes file is an ltwrapper_executable ++# uses $file to determine the appropriate filename for a ++# temporary ltwrapper_script. ++func_ltwrapper_scriptname () ++{ ++ func_ltwrapper_scriptname_result="" ++ if func_ltwrapper_executable_p "$1"; then ++ func_dirname_and_basename "$1" "" "." ++ func_stripname '' '.exe' "$func_basename_result" ++ func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper" ++ fi ++} ++ ++# func_ltwrapper_p file ++# True iff FILE is a libtool wrapper script or wrapper executable ++# This function is only a basic sanity check; it will hardly flush out ++# determined imposters. ++func_ltwrapper_p () ++{ ++ func_ltwrapper_script_p "$1" || func_ltwrapper_executable_p "$1" ++} ++ ++ ++# func_execute_cmds commands fail_cmd ++# Execute tilde-delimited COMMANDS. ++# If FAIL_CMD is given, eval that upon failure. ++# FAIL_CMD may read-access the current command in variable CMD! ++func_execute_cmds () ++{ ++ $opt_debug ++ save_ifs=$IFS; IFS='~' ++ for cmd in $1; do ++ IFS=$save_ifs ++ eval "cmd=\"$cmd\"" ++ func_show_eval "$cmd" "${2-:}" ++ done ++ IFS=$save_ifs ++} ++ ++ ++# func_source file ++# Source FILE, adding directory component if necessary. ++# Note that it is not necessary on cygwin/mingw to append a dot to ++# FILE even if both FILE and FILE.exe exist: automatic-append-.exe ++# behavior happens only for exec(3), not for open(2)! Also, sourcing ++# `FILE.' does not work on cygwin managed mounts. ++func_source () ++{ ++ $opt_debug ++ case $1 in ++ */* | *\\*) . "$1" ;; ++ *) . "./$1" ;; ++ esac ++} ++ ++ ++# func_infer_tag arg ++# Infer tagged configuration to use if any are available and ++# if one wasn't chosen via the "--tag" command line option. ++# Only attempt this if the compiler in the base compile ++# command doesn't match the default compiler. ++# arg is usually of the form 'gcc ...' ++func_infer_tag () ++{ ++ $opt_debug ++ if test -n "$available_tags" && test -z "$tagname"; then ++ CC_quoted= ++ for arg in $CC; do ++ func_quote_for_eval "$arg" ++ CC_quoted="$CC_quoted $func_quote_for_eval_result" ++ done ++ CC_expanded=`func_echo_all $CC` ++ CC_quoted_expanded=`func_echo_all $CC_quoted` ++ case $@ in ++ # Blanks in the command may have been stripped by the calling shell, ++ # but not from the CC environment variable when configure was run. ++ " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \ ++ " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*) ;; ++ # Blanks at the start of $base_compile will cause this to fail ++ # if we don't check for them as well. ++ *) ++ for z in $available_tags; do ++ if $GREP "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then ++ # Evaluate the configuration. ++ eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`" ++ CC_quoted= ++ for arg in $CC; do ++ # Double-quote args containing other shell metacharacters. ++ func_quote_for_eval "$arg" ++ CC_quoted="$CC_quoted $func_quote_for_eval_result" ++ done ++ CC_expanded=`func_echo_all $CC` ++ CC_quoted_expanded=`func_echo_all $CC_quoted` ++ case "$@ " in ++ " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \ ++ " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*) ++ # The compiler in the base compile command matches ++ # the one in the tagged configuration. ++ # Assume this is the tagged configuration we want. ++ tagname=$z ++ break ++ ;; ++ esac ++ fi ++ done ++ # If $tagname still isn't set, then no tagged configuration ++ # was found and let the user know that the "--tag" command ++ # line option must be used. ++ if test -z "$tagname"; then ++ func_echo "unable to infer tagged configuration" ++ func_fatal_error "specify a tag with \`--tag'" ++# else ++# func_verbose "using $tagname tagged configuration" ++ fi ++ ;; ++ esac ++ fi ++} ++ ++ ++ ++# func_write_libtool_object output_name pic_name nonpic_name ++# Create a libtool object file (analogous to a ".la" file), ++# but don't create it if we're doing a dry run. ++func_write_libtool_object () ++{ ++ write_libobj=${1} ++ if test "$build_libtool_libs" = yes; then ++ write_lobj=\'${2}\' ++ else ++ write_lobj=none ++ fi ++ ++ if test "$build_old_libs" = yes; then ++ write_oldobj=\'${3}\' ++ else ++ write_oldobj=none ++ fi ++ ++ $opt_dry_run || { ++ cat >${write_libobj}T <?"'"'"' &()|`$[]' \ ++ && func_warning "libobj name \`$libobj' may not contain shell special characters." ++ func_dirname_and_basename "$obj" "/" "" ++ objname="$func_basename_result" ++ xdir="$func_dirname_result" ++ lobj=${xdir}$objdir/$objname ++ ++ test -z "$base_compile" && \ ++ func_fatal_help "you must specify a compilation command" ++ ++ # Delete any leftover library objects. ++ if test "$build_old_libs" = yes; then ++ removelist="$obj $lobj $libobj ${libobj}T" ++ else ++ removelist="$lobj $libobj ${libobj}T" ++ fi ++ ++ # On Cygwin there's no "real" PIC flag so we must build both object types ++ case $host_os in ++ cygwin* | mingw* | pw32* | os2* | cegcc*) ++ pic_mode=default ++ ;; ++ esac ++ if test "$pic_mode" = no && test "$deplibs_check_method" != pass_all; then ++ # non-PIC code in shared libraries is not supported ++ pic_mode=default ++ fi ++ ++ # Calculate the filename of the output object if compiler does ++ # not support -o with -c ++ if test "$compiler_c_o" = no; then ++ output_obj=`$ECHO "$srcfile" | $SED 's%^.*/%%; s%\.[^.]*$%%'`.${objext} ++ lockfile="$output_obj.lock" ++ else ++ output_obj= ++ need_locks=no ++ lockfile= ++ fi ++ ++ # Lock this critical section if it is needed ++ # We use this script file to make the link, it avoids creating a new file ++ if test "$need_locks" = yes; then ++ until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do ++ func_echo "Waiting for $lockfile to be removed" ++ sleep 2 ++ done ++ elif test "$need_locks" = warn; then ++ if test -f "$lockfile"; then ++ $ECHO "\ ++*** ERROR, $lockfile exists and contains: ++`cat $lockfile 2>/dev/null` ++ ++This indicates that another process is trying to use the same ++temporary object file, and libtool could not work around it because ++your compiler does not support \`-c' and \`-o' together. If you ++repeat this compilation, it may succeed, by chance, but you had better ++avoid parallel builds (make -j) in this platform, or get a better ++compiler." ++ ++ $opt_dry_run || $RM $removelist ++ exit $EXIT_FAILURE ++ fi ++ removelist="$removelist $output_obj" ++ $ECHO "$srcfile" > "$lockfile" ++ fi ++ ++ $opt_dry_run || $RM $removelist ++ removelist="$removelist $lockfile" ++ trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15 ++ ++ if test -n "$fix_srcfile_path"; then ++ eval "srcfile=\"$fix_srcfile_path\"" ++ fi ++ func_quote_for_eval "$srcfile" ++ qsrcfile=$func_quote_for_eval_result ++ ++ # Only build a PIC object if we are building libtool libraries. ++ if test "$build_libtool_libs" = yes; then ++ # Without this assignment, base_compile gets emptied. ++ fbsd_hideous_sh_bug=$base_compile ++ ++ if test "$pic_mode" != no; then ++ command="$base_compile $qsrcfile $pic_flag" ++ else ++ # Don't build PIC code ++ command="$base_compile $qsrcfile" ++ fi ++ ++ func_mkdir_p "$xdir$objdir" ++ ++ if test -z "$output_obj"; then ++ # Place PIC objects in $objdir ++ command="$command -o $lobj" ++ fi ++ ++ func_show_eval_locale "$command" \ ++ 'test -n "$output_obj" && $RM $removelist; exit $EXIT_FAILURE' ++ ++ if test "$need_locks" = warn && ++ test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then ++ $ECHO "\ ++*** ERROR, $lockfile contains: ++`cat $lockfile 2>/dev/null` ++ ++but it should contain: ++$srcfile ++ ++This indicates that another process is trying to use the same ++temporary object file, and libtool could not work around it because ++your compiler does not support \`-c' and \`-o' together. If you ++repeat this compilation, it may succeed, by chance, but you had better ++avoid parallel builds (make -j) in this platform, or get a better ++compiler." ++ ++ $opt_dry_run || $RM $removelist ++ exit $EXIT_FAILURE ++ fi ++ ++ # Just move the object if needed, then go on to compile the next one ++ if test -n "$output_obj" && test "X$output_obj" != "X$lobj"; then ++ func_show_eval '$MV "$output_obj" "$lobj"' \ ++ 'error=$?; $opt_dry_run || $RM $removelist; exit $error' ++ fi ++ ++ # Allow error messages only from the first compilation. ++ if test "$suppress_opt" = yes; then ++ suppress_output=' >/dev/null 2>&1' ++ fi ++ fi ++ ++ # Only build a position-dependent object if we build old libraries. ++ if test "$build_old_libs" = yes; then ++ if test "$pic_mode" != yes; then ++ # Don't build PIC code ++ command="$base_compile $qsrcfile$pie_flag" ++ else ++ command="$base_compile $qsrcfile $pic_flag" ++ fi ++ if test "$compiler_c_o" = yes; then ++ command="$command -o $obj" ++ fi ++ ++ # Suppress compiler output if we already did a PIC compilation. ++ command="$command$suppress_output" ++ func_show_eval_locale "$command" \ ++ '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' ++ ++ if test "$need_locks" = warn && ++ test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then ++ $ECHO "\ ++*** ERROR, $lockfile contains: ++`cat $lockfile 2>/dev/null` ++ ++but it should contain: ++$srcfile ++ ++This indicates that another process is trying to use the same ++temporary object file, and libtool could not work around it because ++your compiler does not support \`-c' and \`-o' together. If you ++repeat this compilation, it may succeed, by chance, but you had better ++avoid parallel builds (make -j) in this platform, or get a better ++compiler." ++ ++ $opt_dry_run || $RM $removelist ++ exit $EXIT_FAILURE ++ fi ++ ++ # Just move the object if needed ++ if test -n "$output_obj" && test "X$output_obj" != "X$obj"; then ++ func_show_eval '$MV "$output_obj" "$obj"' \ ++ 'error=$?; $opt_dry_run || $RM $removelist; exit $error' ++ fi ++ fi ++ ++ $opt_dry_run || { ++ func_write_libtool_object "$libobj" "$objdir/$objname" "$objname" ++ ++ # Unlock the critical section if it was locked ++ if test "$need_locks" != no; then ++ removelist=$lockfile ++ $RM "$lockfile" ++ fi ++ } ++ ++ exit $EXIT_SUCCESS ++} ++ ++$opt_help || { ++ test "$mode" = compile && func_mode_compile ${1+"$@"} ++} ++ ++func_mode_help () ++{ ++ # We need to display help for each of the modes. ++ case $mode in ++ "") ++ # Generic help is extracted from the usage comments ++ # at the start of this file. ++ func_help ++ ;; ++ ++ clean) ++ $ECHO \ ++"Usage: $progname [OPTION]... --mode=clean RM [RM-OPTION]... FILE... ++ ++Remove files from the build directory. ++ ++RM is the name of the program to use to delete files associated with each FILE ++(typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed ++to RM. ++ ++If FILE is a libtool library, object or program, all the files associated ++with it are deleted. Otherwise, only FILE itself is deleted using RM." ++ ;; ++ ++ compile) ++ $ECHO \ ++"Usage: $progname [OPTION]... --mode=compile COMPILE-COMMAND... SOURCEFILE ++ ++Compile a source file into a libtool library object. ++ ++This mode accepts the following additional options: ++ ++ -o OUTPUT-FILE set the output file name to OUTPUT-FILE ++ -no-suppress do not suppress compiler output for multiple passes ++ -prefer-pic try to building PIC objects only ++ -prefer-non-pic try to building non-PIC objects only ++ -shared do not build a \`.o' file suitable for static linking ++ -static only build a \`.o' file suitable for static linking ++ -Wc,FLAG pass FLAG directly to the compiler ++ ++COMPILE-COMMAND is a command to be used in creating a \`standard' object file ++from the given SOURCEFILE. ++ ++The output file name is determined by removing the directory component from ++SOURCEFILE, then substituting the C source code suffix \`.c' with the ++library object suffix, \`.lo'." ++ ;; ++ ++ execute) ++ $ECHO \ ++"Usage: $progname [OPTION]... --mode=execute COMMAND [ARGS]... ++ ++Automatically set library path, then run a program. ++ ++This mode accepts the following additional options: ++ ++ -dlopen FILE add the directory containing FILE to the library path ++ ++This mode sets the library path environment variable according to \`-dlopen' ++flags. ++ ++If any of the ARGS are libtool executable wrappers, then they are translated ++into their corresponding uninstalled binary, and any of their required library ++directories are added to the library path. ++ ++Then, COMMAND is executed, with ARGS as arguments." ++ ;; ++ ++ finish) ++ $ECHO \ ++"Usage: $progname [OPTION]... --mode=finish [LIBDIR]... ++ ++Complete the installation of libtool libraries. ++ ++Each LIBDIR is a directory that contains libtool libraries. ++ ++The commands that this mode executes may require superuser privileges. Use ++the \`--dry-run' option if you just want to see what would be executed." ++ ;; ++ ++ install) ++ $ECHO \ ++"Usage: $progname [OPTION]... --mode=install INSTALL-COMMAND... ++ ++Install executables or libraries. ++ ++INSTALL-COMMAND is the installation command. The first component should be ++either the \`install' or \`cp' program. ++ ++The following components of INSTALL-COMMAND are treated specially: ++ ++ -inst-prefix-dir PREFIX-DIR Use PREFIX-DIR as a staging area for installation ++ ++The rest of the components are interpreted as arguments to that command (only ++BSD-compatible install options are recognized)." ++ ;; ++ ++ link) ++ $ECHO \ ++"Usage: $progname [OPTION]... --mode=link LINK-COMMAND... ++ ++Link object files or libraries together to form another library, or to ++create an executable program. ++ ++LINK-COMMAND is a command using the C compiler that you would use to create ++a program from several object files. ++ ++The following components of LINK-COMMAND are treated specially: ++ ++ -all-static do not do any dynamic linking at all ++ -avoid-version do not add a version suffix if possible ++ -bindir BINDIR specify path to binaries directory (for systems where ++ libraries must be found in the PATH setting at runtime) ++ -dlopen FILE \`-dlpreopen' FILE if it cannot be dlopened at runtime ++ -dlpreopen FILE link in FILE and add its symbols to lt_preloaded_symbols ++ -export-dynamic allow symbols from OUTPUT-FILE to be resolved with dlsym(3) ++ -export-symbols SYMFILE ++ try to export only the symbols listed in SYMFILE ++ -export-symbols-regex REGEX ++ try to export only the symbols matching REGEX ++ -LLIBDIR search LIBDIR for required installed libraries ++ -lNAME OUTPUT-FILE requires the installed library libNAME ++ -module build a library that can dlopened ++ -no-fast-install disable the fast-install mode ++ -no-install link a not-installable executable ++ -no-undefined declare that a library does not refer to external symbols ++ -o OUTPUT-FILE create OUTPUT-FILE from the specified objects ++ -objectlist FILE Use a list of object files found in FILE to specify objects ++ -precious-files-regex REGEX ++ don't remove output files matching REGEX ++ -release RELEASE specify package release information ++ -rpath LIBDIR the created library will eventually be installed in LIBDIR ++ -R[ ]LIBDIR add LIBDIR to the runtime path of programs and libraries ++ -shared only do dynamic linking of libtool libraries ++ -shrext SUFFIX override the standard shared library file extension ++ -static do not do any dynamic linking of uninstalled libtool libraries ++ -static-libtool-libs ++ do not do any dynamic linking of libtool libraries ++ -version-info CURRENT[:REVISION[:AGE]] ++ specify library version info [each variable defaults to 0] ++ -weak LIBNAME declare that the target provides the LIBNAME interface ++ -Wc,FLAG ++ -Xcompiler FLAG pass linker-specific FLAG directly to the compiler ++ -Wl,FLAG ++ -Xlinker FLAG pass linker-specific FLAG directly to the linker ++ -XCClinker FLAG pass link-specific FLAG to the compiler driver (CC) ++ ++All other options (arguments beginning with \`-') are ignored. ++ ++Every other argument is treated as a filename. Files ending in \`.la' are ++treated as uninstalled libtool libraries, other files are standard or library ++object files. ++ ++If the OUTPUT-FILE ends in \`.la', then a libtool library is created, ++only library objects (\`.lo' files) may be specified, and \`-rpath' is ++required, except when creating a convenience library. ++ ++If OUTPUT-FILE ends in \`.a' or \`.lib', then a standard library is created ++using \`ar' and \`ranlib', or on Windows using \`lib'. ++ ++If OUTPUT-FILE ends in \`.lo' or \`.${objext}', then a reloadable object file ++is created, otherwise an executable program is created." ++ ;; ++ ++ uninstall) ++ $ECHO \ ++"Usage: $progname [OPTION]... --mode=uninstall RM [RM-OPTION]... FILE... ++ ++Remove libraries from an installation directory. ++ ++RM is the name of the program to use to delete files associated with each FILE ++(typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed ++to RM. ++ ++If FILE is a libtool library, all the files associated with it are deleted. ++Otherwise, only FILE itself is deleted using RM." ++ ;; ++ ++ *) ++ func_fatal_help "invalid operation mode \`$mode'" ++ ;; ++ esac ++ ++ echo ++ $ECHO "Try \`$progname --help' for more information about other modes." ++} ++ ++# Now that we've collected a possible --mode arg, show help if necessary ++if $opt_help; then ++ if test "$opt_help" = :; then ++ func_mode_help ++ else ++ { ++ func_help noexit ++ for mode in compile link execute install finish uninstall clean; do ++ func_mode_help ++ done ++ } | sed -n '1p; 2,$s/^Usage:/ or: /p' ++ { ++ func_help noexit ++ for mode in compile link execute install finish uninstall clean; do ++ echo ++ func_mode_help ++ done ++ } | ++ sed '1d ++ /^When reporting/,/^Report/{ ++ H ++ d ++ } ++ $x ++ /information about other modes/d ++ /more detailed .*MODE/d ++ s/^Usage:.*--mode=\([^ ]*\) .*/Description of \1 mode:/' ++ fi ++ exit $? ++fi ++ ++ ++# func_mode_execute arg... ++func_mode_execute () ++{ ++ $opt_debug ++ # The first argument is the command name. ++ cmd="$nonopt" ++ test -z "$cmd" && \ ++ func_fatal_help "you must specify a COMMAND" ++ ++ # Handle -dlopen flags immediately. ++ for file in $execute_dlfiles; do ++ test -f "$file" \ ++ || func_fatal_help "\`$file' is not a file" ++ ++ dir= ++ case $file in ++ *.la) ++ # Check to see that this really is a libtool archive. ++ func_lalib_unsafe_p "$file" \ ++ || func_fatal_help "\`$lib' is not a valid libtool archive" ++ ++ # Read the libtool library. ++ dlname= ++ library_names= ++ func_source "$file" ++ ++ # Skip this library if it cannot be dlopened. ++ if test -z "$dlname"; then ++ # Warn if it was a shared library. ++ test -n "$library_names" && \ ++ func_warning "\`$file' was not linked with \`-export-dynamic'" ++ continue ++ fi ++ ++ func_dirname "$file" "" "." ++ dir="$func_dirname_result" ++ ++ if test -f "$dir/$objdir/$dlname"; then ++ dir="$dir/$objdir" ++ else ++ if test ! -f "$dir/$dlname"; then ++ func_fatal_error "cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'" ++ fi ++ fi ++ ;; ++ ++ *.lo) ++ # Just add the directory containing the .lo file. ++ func_dirname "$file" "" "." ++ dir="$func_dirname_result" ++ ;; ++ ++ *) ++ func_warning "\`-dlopen' is ignored for non-libtool libraries and objects" ++ continue ++ ;; ++ esac ++ ++ # Get the absolute pathname. ++ absdir=`cd "$dir" && pwd` ++ test -n "$absdir" && dir="$absdir" ++ ++ # Now add the directory to shlibpath_var. ++ if eval test -z \"\$$shlibpath_var\"; then ++ eval $shlibpath_var=\$dir ++ else ++ eval $shlibpath_var=\$dir:\$$shlibpath_var ++ fi ++ done ++ ++ # This variable tells wrapper scripts just to set shlibpath_var ++ # rather than running their programs. ++ libtool_execute_magic="$magic" ++ ++ # Check if any of the arguments is a wrapper script. ++ args= ++ for file ++ do ++ case $file in ++ -* | *.la | *.lo ) ;; ++ *) ++ # Do a test to see if this is really a libtool program. ++ if func_ltwrapper_script_p "$file"; then ++ func_source "$file" ++ # Transform arg to wrapped name. ++ file="$progdir/$program" ++ elif func_ltwrapper_executable_p "$file"; then ++ func_ltwrapper_scriptname "$file" ++ func_source "$func_ltwrapper_scriptname_result" ++ # Transform arg to wrapped name. ++ file="$progdir/$program" ++ fi ++ ;; ++ esac ++ # Quote arguments (to preserve shell metacharacters). ++ func_quote_for_eval "$file" ++ args="$args $func_quote_for_eval_result" ++ done ++ ++ if test "X$opt_dry_run" = Xfalse; then ++ if test -n "$shlibpath_var"; then ++ # Export the shlibpath_var. ++ eval "export $shlibpath_var" ++ fi ++ ++ # Restore saved environment variables ++ for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES ++ do ++ eval "if test \"\${save_$lt_var+set}\" = set; then ++ $lt_var=\$save_$lt_var; export $lt_var ++ else ++ $lt_unset $lt_var ++ fi" ++ done ++ ++ # Now prepare to actually exec the command. ++ exec_cmd="\$cmd$args" ++ else ++ # Display what would be done. ++ if test -n "$shlibpath_var"; then ++ eval "\$ECHO \"\$shlibpath_var=\$$shlibpath_var\"" ++ echo "export $shlibpath_var" ++ fi ++ $ECHO "$cmd$args" ++ exit $EXIT_SUCCESS ++ fi ++} ++ ++test "$mode" = execute && func_mode_execute ${1+"$@"} ++ ++ ++# func_mode_finish arg... ++func_mode_finish () ++{ ++ $opt_debug ++ libdirs="$nonopt" ++ admincmds= ++ ++ if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then ++ for dir ++ do ++ libdirs="$libdirs $dir" ++ done ++ ++ for libdir in $libdirs; do ++ if test -n "$finish_cmds"; then ++ # Do each command in the finish commands. ++ func_execute_cmds "$finish_cmds" 'admincmds="$admincmds ++'"$cmd"'"' ++ fi ++ if test -n "$finish_eval"; then ++ # Do the single finish_eval. ++ eval cmds=\"$finish_eval\" ++ $opt_dry_run || eval "$cmds" || admincmds="$admincmds ++ $cmds" ++ fi ++ done ++ fi ++ ++ # Exit here if they wanted silent mode. ++ $opt_silent && exit $EXIT_SUCCESS ++ ++ echo "----------------------------------------------------------------------" ++ echo "Libraries have been installed in:" ++ for libdir in $libdirs; do ++ $ECHO " $libdir" ++ done ++ echo ++ echo "If you ever happen to want to link against installed libraries" ++ echo "in a given directory, LIBDIR, you must either use libtool, and" ++ echo "specify the full pathname of the library, or use the \`-LLIBDIR'" ++ echo "flag during linking and do at least one of the following:" ++ if test -n "$shlibpath_var"; then ++ echo " - add LIBDIR to the \`$shlibpath_var' environment variable" ++ echo " during execution" ++ fi ++ if test -n "$runpath_var"; then ++ echo " - add LIBDIR to the \`$runpath_var' environment variable" ++ echo " during linking" ++ fi ++ if test -n "$hardcode_libdir_flag_spec"; then ++ libdir=LIBDIR ++ eval "flag=\"$hardcode_libdir_flag_spec\"" ++ ++ $ECHO " - use the \`$flag' linker flag" ++ fi ++ if test -n "$admincmds"; then ++ $ECHO " - have your system administrator run these commands:$admincmds" ++ fi ++ if test -f /etc/ld.so.conf; then ++ echo " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'" ++ fi ++ echo ++ ++ echo "See any operating system documentation about shared libraries for" ++ case $host in ++ solaris2.[6789]|solaris2.1[0-9]) ++ echo "more information, such as the ld(1), crle(1) and ld.so(8) manual" ++ echo "pages." ++ ;; ++ *) ++ echo "more information, such as the ld(1) and ld.so(8) manual pages." ++ ;; ++ esac ++ echo "----------------------------------------------------------------------" ++ exit $EXIT_SUCCESS ++} ++ ++test "$mode" = finish && func_mode_finish ${1+"$@"} ++ ++ ++# func_mode_install arg... ++func_mode_install () ++{ ++ $opt_debug ++ # There may be an optional sh(1) argument at the beginning of ++ # install_prog (especially on Windows NT). ++ if test "$nonopt" = "$SHELL" || test "$nonopt" = /bin/sh || ++ # Allow the use of GNU shtool's install command. ++ case $nonopt in *shtool*) :;; *) false;; esac; then ++ # Aesthetically quote it. ++ func_quote_for_eval "$nonopt" ++ install_prog="$func_quote_for_eval_result " ++ arg=$1 ++ shift ++ else ++ install_prog= ++ arg=$nonopt ++ fi ++ ++ # The real first argument should be the name of the installation program. ++ # Aesthetically quote it. ++ func_quote_for_eval "$arg" ++ install_prog="$install_prog$func_quote_for_eval_result" ++ install_shared_prog=$install_prog ++ case " $install_prog " in ++ *[\\\ /]cp\ *) install_cp=: ;; ++ *) install_cp=false ;; ++ esac ++ ++ # We need to accept at least all the BSD install flags. ++ dest= ++ files= ++ opts= ++ prev= ++ install_type= ++ isdir=no ++ stripme= ++ no_mode=: ++ for arg ++ do ++ arg2= ++ if test -n "$dest"; then ++ files="$files $dest" ++ dest=$arg ++ continue ++ fi ++ ++ case $arg in ++ -d) isdir=yes ;; ++ -f) ++ if $install_cp; then :; else ++ prev=$arg ++ fi ++ ;; ++ -g | -m | -o) ++ prev=$arg ++ ;; ++ -s) ++ stripme=" -s" ++ continue ++ ;; ++ -*) ++ ;; ++ *) ++ # If the previous option needed an argument, then skip it. ++ if test -n "$prev"; then ++ if test "x$prev" = x-m && test -n "$install_override_mode"; then ++ arg2=$install_override_mode ++ no_mode=false ++ fi ++ prev= ++ else ++ dest=$arg ++ continue ++ fi ++ ;; ++ esac ++ ++ # Aesthetically quote the argument. ++ func_quote_for_eval "$arg" ++ install_prog="$install_prog $func_quote_for_eval_result" ++ if test -n "$arg2"; then ++ func_quote_for_eval "$arg2" ++ fi ++ install_shared_prog="$install_shared_prog $func_quote_for_eval_result" ++ done ++ ++ test -z "$install_prog" && \ ++ func_fatal_help "you must specify an install program" ++ ++ test -n "$prev" && \ ++ func_fatal_help "the \`$prev' option requires an argument" ++ ++ if test -n "$install_override_mode" && $no_mode; then ++ if $install_cp; then :; else ++ func_quote_for_eval "$install_override_mode" ++ install_shared_prog="$install_shared_prog -m $func_quote_for_eval_result" ++ fi ++ fi ++ ++ if test -z "$files"; then ++ if test -z "$dest"; then ++ func_fatal_help "no file or destination specified" ++ else ++ func_fatal_help "you must specify a destination" ++ fi ++ fi ++ ++ # Strip any trailing slash from the destination. ++ func_stripname '' '/' "$dest" ++ dest=$func_stripname_result ++ ++ # Check to see that the destination is a directory. ++ test -d "$dest" && isdir=yes ++ if test "$isdir" = yes; then ++ destdir="$dest" ++ destname= ++ else ++ func_dirname_and_basename "$dest" "" "." ++ destdir="$func_dirname_result" ++ destname="$func_basename_result" ++ ++ # Not a directory, so check to see that there is only one file specified. ++ set dummy $files; shift ++ test "$#" -gt 1 && \ ++ func_fatal_help "\`$dest' is not a directory" ++ fi ++ case $destdir in ++ [\\/]* | [A-Za-z]:[\\/]*) ;; ++ *) ++ for file in $files; do ++ case $file in ++ *.lo) ;; ++ *) ++ func_fatal_help "\`$destdir' must be an absolute directory name" ++ ;; ++ esac ++ done ++ ;; ++ esac ++ ++ # This variable tells wrapper scripts just to set variables rather ++ # than running their programs. ++ libtool_install_magic="$magic" ++ ++ staticlibs= ++ future_libdirs= ++ current_libdirs= ++ for file in $files; do ++ ++ # Do each installation. ++ case $file in ++ *.$libext) ++ # Do the static libraries later. ++ staticlibs="$staticlibs $file" ++ ;; ++ ++ *.la) ++ # Check to see that this really is a libtool archive. ++ func_lalib_unsafe_p "$file" \ ++ || func_fatal_help "\`$file' is not a valid libtool archive" ++ ++ library_names= ++ old_library= ++ relink_command= ++ func_source "$file" ++ ++ # Add the libdir to current_libdirs if it is the destination. ++ if test "X$destdir" = "X$libdir"; then ++ case "$current_libdirs " in ++ *" $libdir "*) ;; ++ *) current_libdirs="$current_libdirs $libdir" ;; ++ esac ++ else ++ # Note the libdir as a future libdir. ++ case "$future_libdirs " in ++ *" $libdir "*) ;; ++ *) future_libdirs="$future_libdirs $libdir" ;; ++ esac ++ fi ++ ++ func_dirname "$file" "/" "" ++ dir="$func_dirname_result" ++ dir="$dir$objdir" ++ ++ if test -n "$relink_command"; then ++ # Determine the prefix the user has applied to our future dir. ++ inst_prefix_dir=`$ECHO "$destdir" | $SED -e "s%$libdir\$%%"` ++ ++ # Don't allow the user to place us outside of our expected ++ # location b/c this prevents finding dependent libraries that ++ # are installed to the same prefix. ++ # At present, this check doesn't affect windows .dll's that ++ # are installed into $libdir/../bin (currently, that works fine) ++ # but it's something to keep an eye on. ++ test "$inst_prefix_dir" = "$destdir" && \ ++ func_fatal_error "error: cannot install \`$file' to a directory not ending in $libdir" ++ ++ if test -n "$inst_prefix_dir"; then ++ # Stick the inst_prefix_dir data into the link command. ++ relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"` ++ else ++ relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%%"` ++ fi ++ ++ func_warning "relinking \`$file'" ++ func_show_eval "$relink_command" \ ++ 'func_fatal_error "error: relink \`$file'\'' with the above command before installing it"' ++ fi ++ ++ # See the names of the shared library. ++ set dummy $library_names; shift ++ if test -n "$1"; then ++ realname="$1" ++ shift ++ ++ srcname="$realname" ++ test -n "$relink_command" && srcname="$realname"T ++ ++ # Install the shared library and build the symlinks. ++ func_show_eval "$install_shared_prog $dir/$srcname $destdir/$realname" \ ++ 'exit $?' ++ tstripme="$stripme" ++ case $host_os in ++ cygwin* | mingw* | pw32* | cegcc*) ++ case $realname in ++ *.dll.a) ++ tstripme="" ++ ;; ++ esac ++ ;; ++ esac ++ if test -n "$tstripme" && test -n "$striplib"; then ++ func_show_eval "$striplib $destdir/$realname" 'exit $?' ++ fi ++ ++ if test "$#" -gt 0; then ++ # Delete the old symlinks, and create new ones. ++ # Try `ln -sf' first, because the `ln' binary might depend on ++ # the symlink we replace! Solaris /bin/ln does not understand -f, ++ # so we also need to try rm && ln -s. ++ for linkname ++ do ++ test "$linkname" != "$realname" \ ++ && func_show_eval "(cd $destdir && { $LN_S -f $realname $linkname || { $RM $linkname && $LN_S $realname $linkname; }; })" ++ done ++ fi ++ ++ # Do each command in the postinstall commands. ++ lib="$destdir/$realname" ++ func_execute_cmds "$postinstall_cmds" 'exit $?' ++ fi ++ ++ # Install the pseudo-library for information purposes. ++ func_basename "$file" ++ name="$func_basename_result" ++ instname="$dir/$name"i ++ func_show_eval "$install_prog $instname $destdir/$name" 'exit $?' ++ ++ # Maybe install the static library, too. ++ test -n "$old_library" && staticlibs="$staticlibs $dir/$old_library" ++ ;; ++ ++ *.lo) ++ # Install (i.e. copy) a libtool object. ++ ++ # Figure out destination file name, if it wasn't already specified. ++ if test -n "$destname"; then ++ destfile="$destdir/$destname" ++ else ++ func_basename "$file" ++ destfile="$func_basename_result" ++ destfile="$destdir/$destfile" ++ fi ++ ++ # Deduce the name of the destination old-style object file. ++ case $destfile in ++ *.lo) ++ func_lo2o "$destfile" ++ staticdest=$func_lo2o_result ++ ;; ++ *.$objext) ++ staticdest="$destfile" ++ destfile= ++ ;; ++ *) ++ func_fatal_help "cannot copy a libtool object to \`$destfile'" ++ ;; ++ esac ++ ++ # Install the libtool object if requested. ++ test -n "$destfile" && \ ++ func_show_eval "$install_prog $file $destfile" 'exit $?' ++ ++ # Install the old object if enabled. ++ if test "$build_old_libs" = yes; then ++ # Deduce the name of the old-style object file. ++ func_lo2o "$file" ++ staticobj=$func_lo2o_result ++ func_show_eval "$install_prog \$staticobj \$staticdest" 'exit $?' ++ fi ++ exit $EXIT_SUCCESS ++ ;; ++ ++ *) ++ # Figure out destination file name, if it wasn't already specified. ++ if test -n "$destname"; then ++ destfile="$destdir/$destname" ++ else ++ func_basename "$file" ++ destfile="$func_basename_result" ++ destfile="$destdir/$destfile" ++ fi ++ ++ # If the file is missing, and there is a .exe on the end, strip it ++ # because it is most likely a libtool script we actually want to ++ # install ++ stripped_ext="" ++ case $file in ++ *.exe) ++ if test ! -f "$file"; then ++ func_stripname '' '.exe' "$file" ++ file=$func_stripname_result ++ stripped_ext=".exe" ++ fi ++ ;; ++ esac ++ ++ # Do a test to see if this is really a libtool program. ++ case $host in ++ *cygwin* | *mingw*) ++ if func_ltwrapper_executable_p "$file"; then ++ func_ltwrapper_scriptname "$file" ++ wrapper=$func_ltwrapper_scriptname_result ++ else ++ func_stripname '' '.exe' "$file" ++ wrapper=$func_stripname_result ++ fi ++ ;; ++ *) ++ wrapper=$file ++ ;; ++ esac ++ if func_ltwrapper_script_p "$wrapper"; then ++ notinst_deplibs= ++ relink_command= ++ ++ func_source "$wrapper" ++ ++ # Check the variables that should have been set. ++ test -z "$generated_by_libtool_version" && \ ++ func_fatal_error "invalid libtool wrapper script \`$wrapper'" ++ ++ finalize=yes ++ for lib in $notinst_deplibs; do ++ # Check to see that each library is installed. ++ libdir= ++ if test -f "$lib"; then ++ func_source "$lib" ++ fi ++ libfile="$libdir/"`$ECHO "$lib" | $SED 's%^.*/%%g'` ### testsuite: skip nested quoting test ++ if test -n "$libdir" && test ! -f "$libfile"; then ++ func_warning "\`$lib' has not been installed in \`$libdir'" ++ finalize=no ++ fi ++ done ++ ++ relink_command= ++ func_source "$wrapper" ++ ++ outputname= ++ if test "$fast_install" = no && test -n "$relink_command"; then ++ $opt_dry_run || { ++ if test "$finalize" = yes; then ++ tmpdir=`func_mktempdir` ++ func_basename "$file$stripped_ext" ++ file="$func_basename_result" ++ outputname="$tmpdir/$file" ++ # Replace the output file specification. ++ relink_command=`$ECHO "$relink_command" | $SED 's%@OUTPUT@%'"$outputname"'%g'` ++ ++ $opt_silent || { ++ func_quote_for_expand "$relink_command" ++ eval "func_echo $func_quote_for_expand_result" ++ } ++ if eval "$relink_command"; then : ++ else ++ func_error "error: relink \`$file' with the above command before installing it" ++ $opt_dry_run || ${RM}r "$tmpdir" ++ continue ++ fi ++ file="$outputname" ++ else ++ func_warning "cannot relink \`$file'" ++ fi ++ } ++ else ++ # Install the binary that we compiled earlier. ++ file=`$ECHO "$file$stripped_ext" | $SED "s%\([^/]*\)$%$objdir/\1%"` ++ fi ++ fi ++ ++ # remove .exe since cygwin /usr/bin/install will append another ++ # one anyway ++ case $install_prog,$host in ++ */usr/bin/install*,*cygwin*) ++ case $file:$destfile in ++ *.exe:*.exe) ++ # this is ok ++ ;; ++ *.exe:*) ++ destfile=$destfile.exe ++ ;; ++ *:*.exe) ++ func_stripname '' '.exe' "$destfile" ++ destfile=$func_stripname_result ++ ;; ++ esac ++ ;; ++ esac ++ func_show_eval "$install_prog\$stripme \$file \$destfile" 'exit $?' ++ $opt_dry_run || if test -n "$outputname"; then ++ ${RM}r "$tmpdir" ++ fi ++ ;; ++ esac ++ done ++ ++ for file in $staticlibs; do ++ func_basename "$file" ++ name="$func_basename_result" ++ ++ # Set up the ranlib parameters. ++ oldlib="$destdir/$name" ++ ++ func_show_eval "$install_prog \$file \$oldlib" 'exit $?' ++ ++ if test -n "$stripme" && test -n "$old_striplib"; then ++ func_show_eval "$old_striplib $oldlib" 'exit $?' ++ fi ++ ++ # Do each command in the postinstall commands. ++ func_execute_cmds "$old_postinstall_cmds" 'exit $?' ++ done ++ ++ test -n "$future_libdirs" && \ ++ func_warning "remember to run \`$progname --finish$future_libdirs'" ++ ++ if test -n "$current_libdirs" && $opt_finish; then ++ # Maybe just do a dry run. ++ $opt_dry_run && current_libdirs=" -n$current_libdirs" ++ exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs' ++ else ++ exit $EXIT_SUCCESS ++ fi ++} ++ ++test "$mode" = install && func_mode_install ${1+"$@"} ++ ++ ++# func_generate_dlsyms outputname originator pic_p ++# Extract symbols from dlprefiles and create ${outputname}S.o with ++# a dlpreopen symbol table. ++func_generate_dlsyms () ++{ ++ $opt_debug ++ my_outputname="$1" ++ my_originator="$2" ++ my_pic_p="${3-no}" ++ my_prefix=`$ECHO "$my_originator" | sed 's%[^a-zA-Z0-9]%_%g'` ++ my_dlsyms= ++ ++ if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then ++ if test -n "$NM" && test -n "$global_symbol_pipe"; then ++ my_dlsyms="${my_outputname}S.c" ++ else ++ func_error "not configured to extract global symbols from dlpreopened files" ++ fi ++ fi ++ ++ if test -n "$my_dlsyms"; then ++ case $my_dlsyms in ++ "") ;; ++ *.c) ++ # Discover the nlist of each of the dlfiles. ++ nlist="$output_objdir/${my_outputname}.nm" ++ ++ func_show_eval "$RM $nlist ${nlist}S ${nlist}T" ++ ++ # Parse the name list into a source file. ++ func_verbose "creating $output_objdir/$my_dlsyms" ++ ++ $opt_dry_run || $ECHO > "$output_objdir/$my_dlsyms" "\ ++/* $my_dlsyms - symbol resolution table for \`$my_outputname' dlsym emulation. */ ++/* Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION */ ++ ++#ifdef __cplusplus ++extern \"C\" { ++#endif ++ ++#if defined(__GNUC__) && (((__GNUC__ == 4) && (__GNUC_MINOR__ >= 4)) || (__GNUC__ > 4)) ++#pragma GCC diagnostic ignored \"-Wstrict-prototypes\" ++#endif ++ ++/* External symbol declarations for the compiler. */\ ++" ++ ++ if test "$dlself" = yes; then ++ func_verbose "generating symbol list for \`$output'" ++ ++ $opt_dry_run || echo ': @PROGRAM@ ' > "$nlist" ++ ++ # Add our own program objects to the symbol list. ++ progfiles=`$ECHO "$objs$old_deplibs" | $SP2NL | $SED "$lo2o" | $NL2SP` ++ for progfile in $progfiles; do ++ func_verbose "extracting global C symbols from \`$progfile'" ++ $opt_dry_run || eval "$NM $progfile | $global_symbol_pipe >> '$nlist'" ++ done ++ ++ if test -n "$exclude_expsyms"; then ++ $opt_dry_run || { ++ $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T ++ $MV "$nlist"T "$nlist" ++ } ++ fi ++ ++ if test -n "$export_symbols_regex"; then ++ $opt_dry_run || { ++ $EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T ++ $MV "$nlist"T "$nlist" ++ } ++ fi ++ ++ # Prepare the list of exported symbols ++ if test -z "$export_symbols"; then ++ export_symbols="$output_objdir/$outputname.exp" ++ $opt_dry_run || { ++ $RM $export_symbols ++ ${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' < "$nlist" > "$export_symbols" ++ case $host in ++ *cygwin* | *mingw* | *cegcc* ) ++ echo EXPORTS > "$output_objdir/$outputname.def" ++ cat "$export_symbols" >> "$output_objdir/$outputname.def" ++ ;; ++ esac ++ } ++ else ++ $opt_dry_run || { ++ ${SED} -e 's/\([].[*^$]\)/\\\1/g' -e 's/^/ /' -e 's/$/$/' < "$export_symbols" > "$output_objdir/$outputname.exp" ++ $GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T ++ $MV "$nlist"T "$nlist" ++ case $host in ++ *cygwin* | *mingw* | *cegcc* ) ++ echo EXPORTS > "$output_objdir/$outputname.def" ++ cat "$nlist" >> "$output_objdir/$outputname.def" ++ ;; ++ esac ++ } ++ fi ++ fi ++ ++ for dlprefile in $dlprefiles; do ++ func_verbose "extracting global C symbols from \`$dlprefile'" ++ func_basename "$dlprefile" ++ name="$func_basename_result" ++ $opt_dry_run || { ++ $ECHO ": $name " >> "$nlist" ++ eval "$NM $dlprefile 2>/dev/null | $global_symbol_pipe >> '$nlist'" ++ } ++ done ++ ++ $opt_dry_run || { ++ # Make sure we have at least an empty file. ++ test -f "$nlist" || : > "$nlist" ++ ++ if test -n "$exclude_expsyms"; then ++ $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T ++ $MV "$nlist"T "$nlist" ++ fi ++ ++ # Try sorting and uniquifying the output. ++ if $GREP -v "^: " < "$nlist" | ++ if sort -k 3 /dev/null 2>&1; then ++ sort -k 3 ++ else ++ sort +2 ++ fi | ++ uniq > "$nlist"S; then ++ : ++ else ++ $GREP -v "^: " < "$nlist" > "$nlist"S ++ fi ++ ++ if test -f "$nlist"S; then ++ eval "$global_symbol_to_cdecl"' < "$nlist"S >> "$output_objdir/$my_dlsyms"' ++ else ++ echo '/* NONE */' >> "$output_objdir/$my_dlsyms" ++ fi ++ ++ echo >> "$output_objdir/$my_dlsyms" "\ ++ ++/* The mapping between symbol names and symbols. */ ++typedef struct { ++ const char *name; ++ void *address; ++} lt_dlsymlist; ++" ++ case $host in ++ *cygwin* | *mingw* | *cegcc* ) ++ echo >> "$output_objdir/$my_dlsyms" "\ ++/* DATA imports from DLLs on WIN32 con't be const, because ++ runtime relocations are performed -- see ld's documentation ++ on pseudo-relocs. */" ++ lt_dlsym_const= ;; ++ *osf5*) ++ echo >> "$output_objdir/$my_dlsyms" "\ ++/* This system does not cope well with relocations in const data */" ++ lt_dlsym_const= ;; ++ *) ++ lt_dlsym_const=const ;; ++ esac ++ ++ echo >> "$output_objdir/$my_dlsyms" "\ ++extern $lt_dlsym_const lt_dlsymlist ++lt_${my_prefix}_LTX_preloaded_symbols[]; ++$lt_dlsym_const lt_dlsymlist ++lt_${my_prefix}_LTX_preloaded_symbols[] = ++{\ ++ { \"$my_originator\", (void *) 0 }," ++ ++ case $need_lib_prefix in ++ no) ++ eval "$global_symbol_to_c_name_address" < "$nlist" >> "$output_objdir/$my_dlsyms" ++ ;; ++ *) ++ eval "$global_symbol_to_c_name_address_lib_prefix" < "$nlist" >> "$output_objdir/$my_dlsyms" ++ ;; ++ esac ++ echo >> "$output_objdir/$my_dlsyms" "\ ++ {0, (void *) 0} ++}; ++ ++/* This works around a problem in FreeBSD linker */ ++#ifdef FREEBSD_WORKAROUND ++static const void *lt_preloaded_setup() { ++ return lt_${my_prefix}_LTX_preloaded_symbols; ++} ++#endif ++ ++#ifdef __cplusplus ++} ++#endif\ ++" ++ } # !$opt_dry_run ++ ++ pic_flag_for_symtable= ++ case "$compile_command " in ++ *" -static "*) ;; ++ *) ++ case $host in ++ # compiling the symbol table file with pic_flag works around ++ # a FreeBSD bug that causes programs to crash when -lm is ++ # linked before any other PIC object. But we must not use ++ # pic_flag when linking with -static. The problem exists in ++ # FreeBSD 2.2.6 and is fixed in FreeBSD 3.1. ++ *-*-freebsd2*|*-*-freebsd3.0*|*-*-freebsdelf3.0*) ++ pic_flag_for_symtable=" $pic_flag -DFREEBSD_WORKAROUND" ;; ++ *-*-hpux*) ++ pic_flag_for_symtable=" $pic_flag" ;; ++ *) ++ if test "X$my_pic_p" != Xno; then ++ pic_flag_for_symtable=" $pic_flag" ++ fi ++ ;; ++ esac ++ ;; ++ esac ++ symtab_cflags= ++ for arg in $LTCFLAGS; do ++ case $arg in ++ -pie | -fpie | -fPIE) ;; ++ *) symtab_cflags="$symtab_cflags $arg" ;; ++ esac ++ done ++ ++ # Now compile the dynamic symbol file. ++ func_show_eval '(cd $output_objdir && $LTCC$symtab_cflags -c$no_builtin_flag$pic_flag_for_symtable "$my_dlsyms")' 'exit $?' ++ ++ # Clean up the generated files. ++ func_show_eval '$RM "$output_objdir/$my_dlsyms" "$nlist" "${nlist}S" "${nlist}T"' ++ ++ # Transform the symbol file into the correct name. ++ symfileobj="$output_objdir/${my_outputname}S.$objext" ++ case $host in ++ *cygwin* | *mingw* | *cegcc* ) ++ if test -f "$output_objdir/$my_outputname.def"; then ++ compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"` ++ finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"` ++ else ++ compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"` ++ finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"` ++ fi ++ ;; ++ *) ++ compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"` ++ finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"` ++ ;; ++ esac ++ ;; ++ *) ++ func_fatal_error "unknown suffix for \`$my_dlsyms'" ++ ;; ++ esac ++ else ++ # We keep going just in case the user didn't refer to ++ # lt_preloaded_symbols. The linker will fail if global_symbol_pipe ++ # really was required. ++ ++ # Nullify the symbol file. ++ compile_command=`$ECHO "$compile_command" | $SED "s% @SYMFILE@%%"` ++ finalize_command=`$ECHO "$finalize_command" | $SED "s% @SYMFILE@%%"` ++ fi ++} ++ ++# func_win32_libid arg ++# return the library type of file 'arg' ++# ++# Need a lot of goo to handle *both* DLLs and import libs ++# Has to be a shell function in order to 'eat' the argument ++# that is supplied when $file_magic_command is called. ++# Despite the name, also deal with 64 bit binaries. ++func_win32_libid () ++{ ++ $opt_debug ++ win32_libid_type="unknown" ++ win32_fileres=`file -L $1 2>/dev/null` ++ case $win32_fileres in ++ *ar\ archive\ import\ library*) # definitely import ++ win32_libid_type="x86 archive import" ++ ;; ++ *ar\ archive*) # could be an import, or static ++ if $OBJDUMP -f "$1" | $SED -e '10q' 2>/dev/null | ++ $EGREP 'file format (pe-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then ++ win32_nmres=`$NM -f posix -A "$1" | ++ $SED -n -e ' ++ 1,100{ ++ / I /{ ++ s,.*,import, ++ p ++ q ++ } ++ }'` ++ case $win32_nmres in ++ import*) win32_libid_type="x86 archive import";; ++ *) win32_libid_type="x86 archive static";; ++ esac ++ fi ++ ;; ++ *DLL*) ++ win32_libid_type="x86 DLL" ++ ;; ++ *executable*) # but shell scripts are "executable" too... ++ case $win32_fileres in ++ *MS\ Windows\ PE\ Intel*) ++ win32_libid_type="x86 DLL" ++ ;; ++ esac ++ ;; ++ esac ++ $ECHO "$win32_libid_type" ++} ++ ++ ++ ++# func_extract_an_archive dir oldlib ++func_extract_an_archive () ++{ ++ $opt_debug ++ f_ex_an_ar_dir="$1"; shift ++ f_ex_an_ar_oldlib="$1" ++ if test "$lock_old_archive_extraction" = yes; then ++ lockfile=$f_ex_an_ar_oldlib.lock ++ until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do ++ func_echo "Waiting for $lockfile to be removed" ++ sleep 2 ++ done ++ fi ++ func_show_eval "(cd \$f_ex_an_ar_dir && $AR x \"\$f_ex_an_ar_oldlib\")" \ ++ 'stat=$?; rm -f "$lockfile"; exit $stat' ++ if test "$lock_old_archive_extraction" = yes; then ++ $opt_dry_run || rm -f "$lockfile" ++ fi ++ if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then ++ : ++ else ++ func_fatal_error "object name conflicts in archive: $f_ex_an_ar_dir/$f_ex_an_ar_oldlib" ++ fi ++} ++ ++ ++# func_extract_archives gentop oldlib ... ++func_extract_archives () ++{ ++ $opt_debug ++ my_gentop="$1"; shift ++ my_oldlibs=${1+"$@"} ++ my_oldobjs="" ++ my_xlib="" ++ my_xabs="" ++ my_xdir="" ++ ++ for my_xlib in $my_oldlibs; do ++ # Extract the objects. ++ case $my_xlib in ++ [\\/]* | [A-Za-z]:[\\/]*) my_xabs="$my_xlib" ;; ++ *) my_xabs=`pwd`"/$my_xlib" ;; ++ esac ++ func_basename "$my_xlib" ++ my_xlib="$func_basename_result" ++ my_xlib_u=$my_xlib ++ while :; do ++ case " $extracted_archives " in ++ *" $my_xlib_u "*) ++ func_arith $extracted_serial + 1 ++ extracted_serial=$func_arith_result ++ my_xlib_u=lt$extracted_serial-$my_xlib ;; ++ *) break ;; ++ esac ++ done ++ extracted_archives="$extracted_archives $my_xlib_u" ++ my_xdir="$my_gentop/$my_xlib_u" ++ ++ func_mkdir_p "$my_xdir" ++ ++ case $host in ++ *-darwin*) ++ func_verbose "Extracting $my_xabs" ++ # Do not bother doing anything if just a dry run ++ $opt_dry_run || { ++ darwin_orig_dir=`pwd` ++ cd $my_xdir || exit $? ++ darwin_archive=$my_xabs ++ darwin_curdir=`pwd` ++ darwin_base_archive=`basename "$darwin_archive"` ++ darwin_arches=`$LIPO -info "$darwin_archive" 2>/dev/null | $GREP Architectures 2>/dev/null || true` ++ if test -n "$darwin_arches"; then ++ darwin_arches=`$ECHO "$darwin_arches" | $SED -e 's/.*are://'` ++ darwin_arch= ++ func_verbose "$darwin_base_archive has multiple architectures $darwin_arches" ++ for darwin_arch in $darwin_arches ; do ++ func_mkdir_p "unfat-$$/${darwin_base_archive}-${darwin_arch}" ++ $LIPO -thin $darwin_arch -output "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" "${darwin_archive}" ++ cd "unfat-$$/${darwin_base_archive}-${darwin_arch}" ++ func_extract_an_archive "`pwd`" "${darwin_base_archive}" ++ cd "$darwin_curdir" ++ $RM "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" ++ done # $darwin_arches ++ ## Okay now we've a bunch of thin objects, gotta fatten them up :) ++ darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print | $SED -e "$basename" | sort -u` ++ darwin_file= ++ darwin_files= ++ for darwin_file in $darwin_filelist; do ++ darwin_files=`find unfat-$$ -name $darwin_file -print | $NL2SP` ++ $LIPO -create -output "$darwin_file" $darwin_files ++ done # $darwin_filelist ++ $RM -rf unfat-$$ ++ cd "$darwin_orig_dir" ++ else ++ cd $darwin_orig_dir ++ func_extract_an_archive "$my_xdir" "$my_xabs" ++ fi # $darwin_arches ++ } # !$opt_dry_run ++ ;; ++ *) ++ func_extract_an_archive "$my_xdir" "$my_xabs" ++ ;; ++ esac ++ my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | $NL2SP` ++ done ++ ++ func_extract_archives_result="$my_oldobjs" ++} ++ ++ ++# func_emit_wrapper [arg=no] ++# ++# Emit a libtool wrapper script on stdout. ++# Don't directly open a file because we may want to ++# incorporate the script contents within a cygwin/mingw ++# wrapper executable. Must ONLY be called from within ++# func_mode_link because it depends on a number of variables ++# set therein. ++# ++# ARG is the value that the WRAPPER_SCRIPT_BELONGS_IN_OBJDIR ++# variable will take. If 'yes', then the emitted script ++# will assume that the directory in which it is stored is ++# the $objdir directory. This is a cygwin/mingw-specific ++# behavior. ++func_emit_wrapper () ++{ ++ func_emit_wrapper_arg1=${1-no} ++ ++ $ECHO "\ ++#! $SHELL ++ ++# $output - temporary wrapper script for $objdir/$outputname ++# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION ++# ++# The $output program cannot be directly executed until all the libtool ++# libraries that it depends on are installed. ++# ++# This wrapper script should never be moved out of the build directory. ++# If it is, it will not operate correctly. ++ ++# Sed substitution that helps us do robust quoting. It backslashifies ++# metacharacters that are still active within double-quoted strings. ++sed_quote_subst='$sed_quote_subst' ++ ++# Be Bourne compatible ++if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then ++ emulate sh ++ NULLCMD=: ++ # Zsh 3.x and 4.x performs word splitting on \${1+\"\$@\"}, which ++ # is contrary to our usage. Disable this feature. ++ alias -g '\${1+\"\$@\"}'='\"\$@\"' ++ setopt NO_GLOB_SUBST ++else ++ case \`(set -o) 2>/dev/null\` in *posix*) set -o posix;; esac ++fi ++BIN_SH=xpg4; export BIN_SH # for Tru64 ++DUALCASE=1; export DUALCASE # for MKS sh ++ ++# The HP-UX ksh and POSIX shell print the target directory to stdout ++# if CDPATH is set. ++(unset CDPATH) >/dev/null 2>&1 && unset CDPATH ++ ++relink_command=\"$relink_command\" ++ ++# This environment variable determines our operation mode. ++if test \"\$libtool_install_magic\" = \"$magic\"; then ++ # install mode needs the following variables: ++ generated_by_libtool_version='$macro_version' ++ notinst_deplibs='$notinst_deplibs' ++else ++ # When we are sourced in execute mode, \$file and \$ECHO are already set. ++ if test \"\$libtool_execute_magic\" != \"$magic\"; then ++ file=\"\$0\"" ++ ++ qECHO=`$ECHO "$ECHO" | $SED "$sed_quote_subst"` ++ $ECHO "\ ++ ++# A function that is used when there is no print builtin or printf. ++func_fallback_echo () ++{ ++ eval 'cat <<_LTECHO_EOF ++\$1 ++_LTECHO_EOF' ++} ++ ECHO=\"$qECHO\" ++ fi\ ++ ++ # Find the directory that this script lives in. ++ thisdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*$%%'\` ++ test \"x\$thisdir\" = \"x\$file\" && thisdir=. ++ ++ # Follow symbolic links until we get to the real thisdir. ++ file=\`ls -ld \"\$file\" | $SED -n 's/.*-> //p'\` ++ while test -n \"\$file\"; do ++ destdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*\$%%'\` ++ ++ # If there was a directory component, then change thisdir. ++ if test \"x\$destdir\" != \"x\$file\"; then ++ case \"\$destdir\" in ++ [\\\\/]* | [A-Za-z]:[\\\\/]*) thisdir=\"\$destdir\" ;; ++ *) thisdir=\"\$thisdir/\$destdir\" ;; ++ esac ++ fi ++ ++ file=\`\$ECHO \"\$file\" | $SED 's%^.*/%%'\` ++ file=\`ls -ld \"\$thisdir/\$file\" | $SED -n 's/.*-> //p'\` ++ done ++ ++ # Usually 'no', except on cygwin/mingw when embedded into ++ # the cwrapper. ++ WRAPPER_SCRIPT_BELONGS_IN_OBJDIR=$func_emit_wrapper_arg1 ++ if test \"\$WRAPPER_SCRIPT_BELONGS_IN_OBJDIR\" = \"yes\"; then ++ # special case for '.' ++ if test \"\$thisdir\" = \".\"; then ++ thisdir=\`pwd\` ++ fi ++ # remove .libs from thisdir ++ case \"\$thisdir\" in ++ *[\\\\/]$objdir ) thisdir=\`\$ECHO \"\$thisdir\" | $SED 's%[\\\\/][^\\\\/]*$%%'\` ;; ++ $objdir ) thisdir=. ;; ++ esac ++ fi ++ ++ # Try to get the absolute directory name. ++ absdir=\`cd \"\$thisdir\" && pwd\` ++ test -n \"\$absdir\" && thisdir=\"\$absdir\" ++" ++ ++ if test "$fast_install" = yes; then ++ $ECHO "\ ++ program=lt-'$outputname'$exeext ++ progdir=\"\$thisdir/$objdir\" ++ ++ if test ! -f \"\$progdir/\$program\" || ++ { file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | ${SED} 1q\`; \\ ++ test \"X\$file\" != \"X\$progdir/\$program\"; }; then ++ ++ file=\"\$\$-\$program\" ++ ++ if test ! -d \"\$progdir\"; then ++ $MKDIR \"\$progdir\" ++ else ++ $RM \"\$progdir/\$file\" ++ fi" ++ ++ $ECHO "\ ++ ++ # relink executable if necessary ++ if test -n \"\$relink_command\"; then ++ if relink_command_output=\`eval \"\$relink_command\" 2>&1\`; then : ++ else ++ $ECHO \"\$relink_command_output\" >&2 ++ $RM \"\$progdir/\$file\" ++ exit 1 ++ fi ++ fi ++ ++ $MV \"\$progdir/\$file\" \"\$progdir/\$program\" 2>/dev/null || ++ { $RM \"\$progdir/\$program\"; ++ $MV \"\$progdir/\$file\" \"\$progdir/\$program\"; } ++ $RM \"\$progdir/\$file\" ++ fi" ++ else ++ $ECHO "\ ++ program='$outputname' ++ progdir=\"\$thisdir/$objdir\" ++" ++ fi ++ ++ $ECHO "\ ++ ++ if test -f \"\$progdir/\$program\"; then" ++ ++ # Export our shlibpath_var if we have one. ++ if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then ++ $ECHO "\ ++ # Add our own library path to $shlibpath_var ++ $shlibpath_var=\"$temp_rpath\$$shlibpath_var\" ++ ++ # Some systems cannot cope with colon-terminated $shlibpath_var ++ # The second colon is a workaround for a bug in BeOS R4 sed ++ $shlibpath_var=\`\$ECHO \"\$$shlibpath_var\" | $SED 's/::*\$//'\` ++ ++ export $shlibpath_var ++" ++ fi ++ ++ # fixup the dll searchpath if we need to. ++ if test -n "$dllsearchpath"; then ++ $ECHO "\ ++ # Add the dll search path components to the executable PATH ++ PATH=$dllsearchpath:\$PATH ++" ++ fi ++ ++ $ECHO "\ ++ if test \"\$libtool_execute_magic\" != \"$magic\"; then ++ # Run the actual program with our arguments. ++" ++ case $host in ++ # Backslashes separate directories on plain windows ++ *-*-mingw | *-*-os2* | *-cegcc*) ++ $ECHO "\ ++ exec \"\$progdir\\\\\$program\" \${1+\"\$@\"} ++" ++ ;; ++ ++ *) ++ $ECHO "\ ++ exec \"\$progdir/\$program\" \${1+\"\$@\"} ++" ++ ;; ++ esac ++ $ECHO "\ ++ \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2 ++ exit 1 ++ fi ++ else ++ # The program doesn't exist. ++ \$ECHO \"\$0: error: \\\`\$progdir/\$program' does not exist\" 1>&2 ++ \$ECHO \"This script is just a wrapper for \$program.\" 1>&2 ++ \$ECHO \"See the $PACKAGE documentation for more information.\" 1>&2 ++ exit 1 ++ fi ++fi\ ++" ++} ++ ++ ++# func_to_host_path arg ++# ++# Convert paths to host format when used with build tools. ++# Intended for use with "native" mingw (where libtool itself ++# is running under the msys shell), or in the following cross- ++# build environments: ++# $build $host ++# mingw (msys) mingw [e.g. native] ++# cygwin mingw ++# *nix + wine mingw ++# where wine is equipped with the `winepath' executable. ++# In the native mingw case, the (msys) shell automatically ++# converts paths for any non-msys applications it launches, ++# but that facility isn't available from inside the cwrapper. ++# Similar accommodations are necessary for $host mingw and ++# $build cygwin. Calling this function does no harm for other ++# $host/$build combinations not listed above. ++# ++# ARG is the path (on $build) that should be converted to ++# the proper representation for $host. The result is stored ++# in $func_to_host_path_result. ++func_to_host_path () ++{ ++ func_to_host_path_result="$1" ++ if test -n "$1"; then ++ case $host in ++ *mingw* ) ++ lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g' ++ case $build in ++ *mingw* ) # actually, msys ++ # awkward: cmd appends spaces to result ++ func_to_host_path_result=`( cmd //c echo "$1" ) 2>/dev/null | ++ $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"` ++ ;; ++ *cygwin* ) ++ func_to_host_path_result=`cygpath -w "$1" | ++ $SED -e "$lt_sed_naive_backslashify"` ++ ;; ++ * ) ++ # Unfortunately, winepath does not exit with a non-zero ++ # error code, so we are forced to check the contents of ++ # stdout. On the other hand, if the command is not ++ # found, the shell will set an exit code of 127 and print ++ # *an error message* to stdout. So we must check for both ++ # error code of zero AND non-empty stdout, which explains ++ # the odd construction: ++ func_to_host_path_tmp1=`winepath -w "$1" 2>/dev/null` ++ if test "$?" -eq 0 && test -n "${func_to_host_path_tmp1}"; then ++ func_to_host_path_result=`$ECHO "$func_to_host_path_tmp1" | ++ $SED -e "$lt_sed_naive_backslashify"` ++ else ++ # Allow warning below. ++ func_to_host_path_result= ++ fi ++ ;; ++ esac ++ if test -z "$func_to_host_path_result" ; then ++ func_error "Could not determine host path corresponding to" ++ func_error " \`$1'" ++ func_error "Continuing, but uninstalled executables may not work." ++ # Fallback: ++ func_to_host_path_result="$1" ++ fi ++ ;; ++ esac ++ fi ++} ++# end: func_to_host_path ++ ++# func_to_host_pathlist arg ++# ++# Convert pathlists to host format when used with build tools. ++# See func_to_host_path(), above. This function supports the ++# following $build/$host combinations (but does no harm for ++# combinations not listed here): ++# $build $host ++# mingw (msys) mingw [e.g. native] ++# cygwin mingw ++# *nix + wine mingw ++# ++# Path separators are also converted from $build format to ++# $host format. If ARG begins or ends with a path separator ++# character, it is preserved (but converted to $host format) ++# on output. ++# ++# ARG is a pathlist (on $build) that should be converted to ++# the proper representation on $host. The result is stored ++# in $func_to_host_pathlist_result. ++func_to_host_pathlist () ++{ ++ func_to_host_pathlist_result="$1" ++ if test -n "$1"; then ++ case $host in ++ *mingw* ) ++ lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g' ++ # Remove leading and trailing path separator characters from ++ # ARG. msys behavior is inconsistent here, cygpath turns them ++ # into '.;' and ';.', and winepath ignores them completely. ++ func_stripname : : "$1" ++ func_to_host_pathlist_tmp1=$func_stripname_result ++ case $build in ++ *mingw* ) # Actually, msys. ++ # Awkward: cmd appends spaces to result. ++ func_to_host_pathlist_result=` ++ ( cmd //c echo "$func_to_host_pathlist_tmp1" ) 2>/dev/null | ++ $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"` ++ ;; ++ *cygwin* ) ++ func_to_host_pathlist_result=`cygpath -w -p "$func_to_host_pathlist_tmp1" | ++ $SED -e "$lt_sed_naive_backslashify"` ++ ;; ++ * ) ++ # unfortunately, winepath doesn't convert pathlists ++ func_to_host_pathlist_result="" ++ func_to_host_pathlist_oldIFS=$IFS ++ IFS=: ++ for func_to_host_pathlist_f in $func_to_host_pathlist_tmp1 ; do ++ IFS=$func_to_host_pathlist_oldIFS ++ if test -n "$func_to_host_pathlist_f" ; then ++ func_to_host_path "$func_to_host_pathlist_f" ++ if test -n "$func_to_host_path_result" ; then ++ if test -z "$func_to_host_pathlist_result" ; then ++ func_to_host_pathlist_result="$func_to_host_path_result" ++ else ++ func_append func_to_host_pathlist_result ";$func_to_host_path_result" ++ fi ++ fi ++ fi ++ done ++ IFS=$func_to_host_pathlist_oldIFS ++ ;; ++ esac ++ if test -z "$func_to_host_pathlist_result"; then ++ func_error "Could not determine the host path(s) corresponding to" ++ func_error " \`$1'" ++ func_error "Continuing, but uninstalled executables may not work." ++ # Fallback. This may break if $1 contains DOS-style drive ++ # specifications. The fix is not to complicate the expression ++ # below, but for the user to provide a working wine installation ++ # with winepath so that path translation in the cross-to-mingw ++ # case works properly. ++ lt_replace_pathsep_nix_to_dos="s|:|;|g" ++ func_to_host_pathlist_result=`echo "$func_to_host_pathlist_tmp1" |\ ++ $SED -e "$lt_replace_pathsep_nix_to_dos"` ++ fi ++ # Now, add the leading and trailing path separators back ++ case "$1" in ++ :* ) func_to_host_pathlist_result=";$func_to_host_pathlist_result" ++ ;; ++ esac ++ case "$1" in ++ *: ) func_append func_to_host_pathlist_result ";" ++ ;; ++ esac ++ ;; ++ esac ++ fi ++} ++# end: func_to_host_pathlist ++ ++# func_emit_cwrapperexe_src ++# emit the source code for a wrapper executable on stdout ++# Must ONLY be called from within func_mode_link because ++# it depends on a number of variable set therein. ++func_emit_cwrapperexe_src () ++{ ++ cat < ++#include ++#ifdef _MSC_VER ++# include ++# include ++# include ++#else ++# include ++# include ++# ifdef __CYGWIN__ ++# include ++# endif ++#endif ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++ ++/* declarations of non-ANSI functions */ ++#if defined(__MINGW32__) ++# ifdef __STRICT_ANSI__ ++int _putenv (const char *); ++# endif ++#elif defined(__CYGWIN__) ++# ifdef __STRICT_ANSI__ ++char *realpath (const char *, char *); ++int putenv (char *); ++int setenv (const char *, const char *, int); ++# endif ++/* #elif defined (other platforms) ... */ ++#endif ++ ++/* portability defines, excluding path handling macros */ ++#if defined(_MSC_VER) ++# define setmode _setmode ++# define stat _stat ++# define chmod _chmod ++# define getcwd _getcwd ++# define putenv _putenv ++# define S_IXUSR _S_IEXEC ++# ifndef _INTPTR_T_DEFINED ++# define _INTPTR_T_DEFINED ++# define intptr_t int ++# endif ++#elif defined(__MINGW32__) ++# define setmode _setmode ++# define stat _stat ++# define chmod _chmod ++# define getcwd _getcwd ++# define putenv _putenv ++#elif defined(__CYGWIN__) ++# define HAVE_SETENV ++# define FOPEN_WB "wb" ++/* #elif defined (other platforms) ... */ ++#endif ++ ++#if defined(PATH_MAX) ++# define LT_PATHMAX PATH_MAX ++#elif defined(MAXPATHLEN) ++# define LT_PATHMAX MAXPATHLEN ++#else ++# define LT_PATHMAX 1024 ++#endif ++ ++#ifndef S_IXOTH ++# define S_IXOTH 0 ++#endif ++#ifndef S_IXGRP ++# define S_IXGRP 0 ++#endif ++ ++/* path handling portability macros */ ++#ifndef DIR_SEPARATOR ++# define DIR_SEPARATOR '/' ++# define PATH_SEPARATOR ':' ++#endif ++ ++#if defined (_WIN32) || defined (__MSDOS__) || defined (__DJGPP__) || \ ++ defined (__OS2__) ++# define HAVE_DOS_BASED_FILE_SYSTEM ++# define FOPEN_WB "wb" ++# ifndef DIR_SEPARATOR_2 ++# define DIR_SEPARATOR_2 '\\' ++# endif ++# ifndef PATH_SEPARATOR_2 ++# define PATH_SEPARATOR_2 ';' ++# endif ++#endif ++ ++#ifndef DIR_SEPARATOR_2 ++# define IS_DIR_SEPARATOR(ch) ((ch) == DIR_SEPARATOR) ++#else /* DIR_SEPARATOR_2 */ ++# define IS_DIR_SEPARATOR(ch) \ ++ (((ch) == DIR_SEPARATOR) || ((ch) == DIR_SEPARATOR_2)) ++#endif /* DIR_SEPARATOR_2 */ ++ ++#ifndef PATH_SEPARATOR_2 ++# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR) ++#else /* PATH_SEPARATOR_2 */ ++# define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR_2) ++#endif /* PATH_SEPARATOR_2 */ ++ ++#ifndef FOPEN_WB ++# define FOPEN_WB "w" ++#endif ++#ifndef _O_BINARY ++# define _O_BINARY 0 ++#endif ++ ++#define XMALLOC(type, num) ((type *) xmalloc ((num) * sizeof(type))) ++#define XFREE(stale) do { \ ++ if (stale) { free ((void *) stale); stale = 0; } \ ++} while (0) ++ ++#undef LTWRAPPER_DEBUGPRINTF ++#if defined LT_DEBUGWRAPPER ++# define LTWRAPPER_DEBUGPRINTF(args) ltwrapper_debugprintf args ++static void ++ltwrapper_debugprintf (const char *fmt, ...) ++{ ++ va_list args; ++ va_start (args, fmt); ++ (void) vfprintf (stderr, fmt, args); ++ va_end (args); ++} ++#else ++# define LTWRAPPER_DEBUGPRINTF(args) ++#endif ++ ++const char *program_name = NULL; ++ ++void *xmalloc (size_t num); ++char *xstrdup (const char *string); ++const char *base_name (const char *name); ++char *find_executable (const char *wrapper); ++char *chase_symlinks (const char *pathspec); ++int make_executable (const char *path); ++int check_executable (const char *path); ++char *strendzap (char *str, const char *pat); ++void lt_fatal (const char *message, ...); ++void lt_setenv (const char *name, const char *value); ++char *lt_extend_str (const char *orig_value, const char *add, int to_end); ++void lt_update_exe_path (const char *name, const char *value); ++void lt_update_lib_path (const char *name, const char *value); ++char **prepare_spawn (char **argv); ++void lt_dump_script (FILE *f); ++EOF ++ ++ cat <"))); ++ for (i = 0; i < newargc; i++) ++ { ++ LTWRAPPER_DEBUGPRINTF (("(main) newargz[%d] : %s\n", i, (newargz[i] ? newargz[i] : ""))); ++ } ++ ++EOF ++ ++ case $host_os in ++ mingw*) ++ cat <<"EOF" ++ /* execv doesn't actually work on mingw as expected on unix */ ++ newargz = prepare_spawn (newargz); ++ rval = _spawnv (_P_WAIT, lt_argv_zero, (const char * const *) newargz); ++ if (rval == -1) ++ { ++ /* failed to start process */ ++ LTWRAPPER_DEBUGPRINTF (("(main) failed to launch target \"%s\": errno = %d\n", lt_argv_zero, errno)); ++ return 127; ++ } ++ return rval; ++EOF ++ ;; ++ *) ++ cat <<"EOF" ++ execv (lt_argv_zero, newargz); ++ return rval; /* =127, but avoids unused variable warning */ ++EOF ++ ;; ++ esac ++ ++ cat <<"EOF" ++} ++ ++void * ++xmalloc (size_t num) ++{ ++ void *p = (void *) malloc (num); ++ if (!p) ++ lt_fatal ("Memory exhausted"); ++ ++ return p; ++} ++ ++char * ++xstrdup (const char *string) ++{ ++ return string ? strcpy ((char *) xmalloc (strlen (string) + 1), ++ string) : NULL; ++} ++ ++const char * ++base_name (const char *name) ++{ ++ const char *base; ++ ++#if defined (HAVE_DOS_BASED_FILE_SYSTEM) ++ /* Skip over the disk name in MSDOS pathnames. */ ++ if (isalpha ((unsigned char) name[0]) && name[1] == ':') ++ name += 2; ++#endif ++ ++ for (base = name; *name; name++) ++ if (IS_DIR_SEPARATOR (*name)) ++ base = name + 1; ++ return base; ++} ++ ++int ++check_executable (const char *path) ++{ ++ struct stat st; ++ ++ LTWRAPPER_DEBUGPRINTF (("(check_executable) : %s\n", ++ path ? (*path ? path : "EMPTY!") : "NULL!")); ++ if ((!path) || (!*path)) ++ return 0; ++ ++ if ((stat (path, &st) >= 0) ++ && (st.st_mode & (S_IXUSR | S_IXGRP | S_IXOTH))) ++ return 1; ++ else ++ return 0; ++} ++ ++int ++make_executable (const char *path) ++{ ++ int rval = 0; ++ struct stat st; ++ ++ LTWRAPPER_DEBUGPRINTF (("(make_executable) : %s\n", ++ path ? (*path ? path : "EMPTY!") : "NULL!")); ++ if ((!path) || (!*path)) ++ return 0; ++ ++ if (stat (path, &st) >= 0) ++ { ++ rval = chmod (path, st.st_mode | S_IXOTH | S_IXGRP | S_IXUSR); ++ } ++ return rval; ++} ++ ++/* Searches for the full path of the wrapper. Returns ++ newly allocated full path name if found, NULL otherwise ++ Does not chase symlinks, even on platforms that support them. ++*/ ++char * ++find_executable (const char *wrapper) ++{ ++ int has_slash = 0; ++ const char *p; ++ const char *p_next; ++ /* static buffer for getcwd */ ++ char tmp[LT_PATHMAX + 1]; ++ int tmp_len; ++ char *concat_name; ++ ++ LTWRAPPER_DEBUGPRINTF (("(find_executable) : %s\n", ++ wrapper ? (*wrapper ? wrapper : "EMPTY!") : "NULL!")); ++ ++ if ((wrapper == NULL) || (*wrapper == '\0')) ++ return NULL; ++ ++ /* Absolute path? */ ++#if defined (HAVE_DOS_BASED_FILE_SYSTEM) ++ if (isalpha ((unsigned char) wrapper[0]) && wrapper[1] == ':') ++ { ++ concat_name = xstrdup (wrapper); ++ if (check_executable (concat_name)) ++ return concat_name; ++ XFREE (concat_name); ++ } ++ else ++ { ++#endif ++ if (IS_DIR_SEPARATOR (wrapper[0])) ++ { ++ concat_name = xstrdup (wrapper); ++ if (check_executable (concat_name)) ++ return concat_name; ++ XFREE (concat_name); ++ } ++#if defined (HAVE_DOS_BASED_FILE_SYSTEM) ++ } ++#endif ++ ++ for (p = wrapper; *p; p++) ++ if (*p == '/') ++ { ++ has_slash = 1; ++ break; ++ } ++ if (!has_slash) ++ { ++ /* no slashes; search PATH */ ++ const char *path = getenv ("PATH"); ++ if (path != NULL) ++ { ++ for (p = path; *p; p = p_next) ++ { ++ const char *q; ++ size_t p_len; ++ for (q = p; *q; q++) ++ if (IS_PATH_SEPARATOR (*q)) ++ break; ++ p_len = q - p; ++ p_next = (*q == '\0' ? q : q + 1); ++ if (p_len == 0) ++ { ++ /* empty path: current directory */ ++ if (getcwd (tmp, LT_PATHMAX) == NULL) ++ lt_fatal ("getcwd failed"); ++ tmp_len = strlen (tmp); ++ concat_name = ++ XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1); ++ memcpy (concat_name, tmp, tmp_len); ++ concat_name[tmp_len] = '/'; ++ strcpy (concat_name + tmp_len + 1, wrapper); ++ } ++ else ++ { ++ concat_name = ++ XMALLOC (char, p_len + 1 + strlen (wrapper) + 1); ++ memcpy (concat_name, p, p_len); ++ concat_name[p_len] = '/'; ++ strcpy (concat_name + p_len + 1, wrapper); ++ } ++ if (check_executable (concat_name)) ++ return concat_name; ++ XFREE (concat_name); ++ } ++ } ++ /* not found in PATH; assume curdir */ ++ } ++ /* Relative path | not found in path: prepend cwd */ ++ if (getcwd (tmp, LT_PATHMAX) == NULL) ++ lt_fatal ("getcwd failed"); ++ tmp_len = strlen (tmp); ++ concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1); ++ memcpy (concat_name, tmp, tmp_len); ++ concat_name[tmp_len] = '/'; ++ strcpy (concat_name + tmp_len + 1, wrapper); ++ ++ if (check_executable (concat_name)) ++ return concat_name; ++ XFREE (concat_name); ++ return NULL; ++} ++ ++char * ++chase_symlinks (const char *pathspec) ++{ ++#ifndef S_ISLNK ++ return xstrdup (pathspec); ++#else ++ char buf[LT_PATHMAX]; ++ struct stat s; ++ char *tmp_pathspec = xstrdup (pathspec); ++ char *p; ++ int has_symlinks = 0; ++ while (strlen (tmp_pathspec) && !has_symlinks) ++ { ++ LTWRAPPER_DEBUGPRINTF (("checking path component for symlinks: %s\n", ++ tmp_pathspec)); ++ if (lstat (tmp_pathspec, &s) == 0) ++ { ++ if (S_ISLNK (s.st_mode) != 0) ++ { ++ has_symlinks = 1; ++ break; ++ } ++ ++ /* search backwards for last DIR_SEPARATOR */ ++ p = tmp_pathspec + strlen (tmp_pathspec) - 1; ++ while ((p > tmp_pathspec) && (!IS_DIR_SEPARATOR (*p))) ++ p--; ++ if ((p == tmp_pathspec) && (!IS_DIR_SEPARATOR (*p))) ++ { ++ /* no more DIR_SEPARATORS left */ ++ break; ++ } ++ *p = '\0'; ++ } ++ else ++ { ++ char *errstr = strerror (errno); ++ lt_fatal ("Error accessing file %s (%s)", tmp_pathspec, errstr); ++ } ++ } ++ XFREE (tmp_pathspec); ++ ++ if (!has_symlinks) ++ { ++ return xstrdup (pathspec); ++ } ++ ++ tmp_pathspec = realpath (pathspec, buf); ++ if (tmp_pathspec == 0) ++ { ++ lt_fatal ("Could not follow symlinks for %s", pathspec); ++ } ++ return xstrdup (tmp_pathspec); ++#endif ++} ++ ++char * ++strendzap (char *str, const char *pat) ++{ ++ size_t len, patlen; ++ ++ assert (str != NULL); ++ assert (pat != NULL); ++ ++ len = strlen (str); ++ patlen = strlen (pat); ++ ++ if (patlen <= len) ++ { ++ str += len - patlen; ++ if (strcmp (str, pat) == 0) ++ *str = '\0'; ++ } ++ return str; ++} ++ ++static void ++lt_error_core (int exit_status, const char *mode, ++ const char *message, va_list ap) ++{ ++ fprintf (stderr, "%s: %s: ", program_name, mode); ++ vfprintf (stderr, message, ap); ++ fprintf (stderr, ".\n"); ++ ++ if (exit_status >= 0) ++ exit (exit_status); ++} ++ ++void ++lt_fatal (const char *message, ...) ++{ ++ va_list ap; ++ va_start (ap, message); ++ lt_error_core (EXIT_FAILURE, "FATAL", message, ap); ++ va_end (ap); ++} ++ ++void ++lt_setenv (const char *name, const char *value) ++{ ++ LTWRAPPER_DEBUGPRINTF (("(lt_setenv) setting '%s' to '%s'\n", ++ (name ? name : ""), ++ (value ? value : ""))); ++ { ++#ifdef HAVE_SETENV ++ /* always make a copy, for consistency with !HAVE_SETENV */ ++ char *str = xstrdup (value); ++ setenv (name, str, 1); ++#else ++ int len = strlen (name) + 1 + strlen (value) + 1; ++ char *str = XMALLOC (char, len); ++ sprintf (str, "%s=%s", name, value); ++ if (putenv (str) != EXIT_SUCCESS) ++ { ++ XFREE (str); ++ } ++#endif ++ } ++} ++ ++char * ++lt_extend_str (const char *orig_value, const char *add, int to_end) ++{ ++ char *new_value; ++ if (orig_value && *orig_value) ++ { ++ int orig_value_len = strlen (orig_value); ++ int add_len = strlen (add); ++ new_value = XMALLOC (char, add_len + orig_value_len + 1); ++ if (to_end) ++ { ++ strcpy (new_value, orig_value); ++ strcpy (new_value + orig_value_len, add); ++ } ++ else ++ { ++ strcpy (new_value, add); ++ strcpy (new_value + add_len, orig_value); ++ } ++ } ++ else ++ { ++ new_value = xstrdup (add); ++ } ++ return new_value; ++} ++ ++void ++lt_update_exe_path (const char *name, const char *value) ++{ ++ LTWRAPPER_DEBUGPRINTF (("(lt_update_exe_path) modifying '%s' by prepending '%s'\n", ++ (name ? name : ""), ++ (value ? value : ""))); ++ ++ if (name && *name && value && *value) ++ { ++ char *new_value = lt_extend_str (getenv (name), value, 0); ++ /* some systems can't cope with a ':'-terminated path #' */ ++ int len = strlen (new_value); ++ while (((len = strlen (new_value)) > 0) && IS_PATH_SEPARATOR (new_value[len-1])) ++ { ++ new_value[len-1] = '\0'; ++ } ++ lt_setenv (name, new_value); ++ XFREE (new_value); ++ } ++} ++ ++void ++lt_update_lib_path (const char *name, const char *value) ++{ ++ LTWRAPPER_DEBUGPRINTF (("(lt_update_lib_path) modifying '%s' by prepending '%s'\n", ++ (name ? name : ""), ++ (value ? value : ""))); ++ ++ if (name && *name && value && *value) ++ { ++ char *new_value = lt_extend_str (getenv (name), value, 0); ++ lt_setenv (name, new_value); ++ XFREE (new_value); ++ } ++} ++ ++EOF ++ case $host_os in ++ mingw*) ++ cat <<"EOF" ++ ++/* Prepares an argument vector before calling spawn(). ++ Note that spawn() does not by itself call the command interpreter ++ (getenv ("COMSPEC") != NULL ? getenv ("COMSPEC") : ++ ({ OSVERSIONINFO v; v.dwOSVersionInfoSize = sizeof(OSVERSIONINFO); ++ GetVersionEx(&v); ++ v.dwPlatformId == VER_PLATFORM_WIN32_NT; ++ }) ? "cmd.exe" : "command.com"). ++ Instead it simply concatenates the arguments, separated by ' ', and calls ++ CreateProcess(). We must quote the arguments since Win32 CreateProcess() ++ interprets characters like ' ', '\t', '\\', '"' (but not '<' and '>') in a ++ special way: ++ - Space and tab are interpreted as delimiters. They are not treated as ++ delimiters if they are surrounded by double quotes: "...". ++ - Unescaped double quotes are removed from the input. Their only effect is ++ that within double quotes, space and tab are treated like normal ++ characters. ++ - Backslashes not followed by double quotes are not special. ++ - But 2*n+1 backslashes followed by a double quote become ++ n backslashes followed by a double quote (n >= 0): ++ \" -> " ++ \\\" -> \" ++ \\\\\" -> \\" ++ */ ++#define SHELL_SPECIAL_CHARS "\"\\ \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037" ++#define SHELL_SPACE_CHARS " \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037" ++char ** ++prepare_spawn (char **argv) ++{ ++ size_t argc; ++ char **new_argv; ++ size_t i; ++ ++ /* Count number of arguments. */ ++ for (argc = 0; argv[argc] != NULL; argc++) ++ ; ++ ++ /* Allocate new argument vector. */ ++ new_argv = XMALLOC (char *, argc + 1); ++ ++ /* Put quoted arguments into the new argument vector. */ ++ for (i = 0; i < argc; i++) ++ { ++ const char *string = argv[i]; ++ ++ if (string[0] == '\0') ++ new_argv[i] = xstrdup ("\"\""); ++ else if (strpbrk (string, SHELL_SPECIAL_CHARS) != NULL) ++ { ++ int quote_around = (strpbrk (string, SHELL_SPACE_CHARS) != NULL); ++ size_t length; ++ unsigned int backslashes; ++ const char *s; ++ char *quoted_string; ++ char *p; ++ ++ length = 0; ++ backslashes = 0; ++ if (quote_around) ++ length++; ++ for (s = string; *s != '\0'; s++) ++ { ++ char c = *s; ++ if (c == '"') ++ length += backslashes + 1; ++ length++; ++ if (c == '\\') ++ backslashes++; ++ else ++ backslashes = 0; ++ } ++ if (quote_around) ++ length += backslashes + 1; ++ ++ quoted_string = XMALLOC (char, length + 1); ++ ++ p = quoted_string; ++ backslashes = 0; ++ if (quote_around) ++ *p++ = '"'; ++ for (s = string; *s != '\0'; s++) ++ { ++ char c = *s; ++ if (c == '"') ++ { ++ unsigned int j; ++ for (j = backslashes + 1; j > 0; j--) ++ *p++ = '\\'; ++ } ++ *p++ = c; ++ if (c == '\\') ++ backslashes++; ++ else ++ backslashes = 0; ++ } ++ if (quote_around) ++ { ++ unsigned int j; ++ for (j = backslashes; j > 0; j--) ++ *p++ = '\\'; ++ *p++ = '"'; ++ } ++ *p = '\0'; ++ ++ new_argv[i] = quoted_string; ++ } ++ else ++ new_argv[i] = (char *) string; ++ } ++ new_argv[argc] = NULL; ++ ++ return new_argv; ++} ++EOF ++ ;; ++ esac ++ ++ cat <<"EOF" ++void lt_dump_script (FILE* f) ++{ ++EOF ++ func_emit_wrapper yes | ++ $SED -e 's/\([\\"]\)/\\\1/g' \ ++ -e 's/^/ fputs ("/' -e 's/$/\\n", f);/' ++ ++ cat <<"EOF" ++} ++EOF ++} ++# end: func_emit_cwrapperexe_src ++ ++# func_win32_import_lib_p ARG ++# True if ARG is an import lib, as indicated by $file_magic_cmd ++func_win32_import_lib_p () ++{ ++ $opt_debug ++ case `eval "$file_magic_cmd \"\$1\" 2>/dev/null" | $SED -e 10q` in ++ *import*) : ;; ++ *) false ;; ++ esac ++} ++ ++# func_mode_link arg... ++func_mode_link () ++{ ++ $opt_debug ++ case $host in ++ *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*) ++ # It is impossible to link a dll without this setting, and ++ # we shouldn't force the makefile maintainer to figure out ++ # which system we are compiling for in order to pass an extra ++ # flag for every libtool invocation. ++ # allow_undefined=no ++ ++ # FIXME: Unfortunately, there are problems with the above when trying ++ # to make a dll which has undefined symbols, in which case not ++ # even a static library is built. For now, we need to specify ++ # -no-undefined on the libtool link line when we can be certain ++ # that all symbols are satisfied, otherwise we get a static library. ++ allow_undefined=yes ++ ;; ++ *) ++ allow_undefined=yes ++ ;; ++ esac ++ libtool_args=$nonopt ++ base_compile="$nonopt $@" ++ compile_command=$nonopt ++ finalize_command=$nonopt ++ ++ compile_rpath= ++ finalize_rpath= ++ compile_shlibpath= ++ finalize_shlibpath= ++ convenience= ++ old_convenience= ++ deplibs= ++ old_deplibs= ++ compiler_flags= ++ linker_flags= ++ dllsearchpath= ++ lib_search_path=`pwd` ++ inst_prefix_dir= ++ new_inherited_linker_flags= ++ ++ avoid_version=no ++ bindir= ++ dlfiles= ++ dlprefiles= ++ dlself=no ++ export_dynamic=no ++ export_symbols= ++ export_symbols_regex= ++ generated= ++ libobjs= ++ ltlibs= ++ module=no ++ no_install=no ++ objs= ++ non_pic_objects= ++ precious_files_regex= ++ prefer_static_libs=no ++ preload=no ++ prev= ++ prevarg= ++ release= ++ rpath= ++ xrpath= ++ perm_rpath= ++ temp_rpath= ++ thread_safe=no ++ vinfo= ++ vinfo_number=no ++ weak_libs= ++ single_module="${wl}-single_module" ++ func_infer_tag $base_compile ++ ++ # We need to know -static, to get the right output filenames. ++ for arg ++ do ++ case $arg in ++ -shared) ++ test "$build_libtool_libs" != yes && \ ++ func_fatal_configuration "can not build a shared library" ++ build_old_libs=no ++ break ++ ;; ++ -all-static | -static | -static-libtool-libs) ++ case $arg in ++ -all-static) ++ if test "$build_libtool_libs" = yes && test -z "$link_static_flag"; then ++ func_warning "complete static linking is impossible in this configuration" ++ fi ++ if test -n "$link_static_flag"; then ++ dlopen_self=$dlopen_self_static ++ fi ++ prefer_static_libs=yes ++ ;; ++ -static) ++ if test -z "$pic_flag" && test -n "$link_static_flag"; then ++ dlopen_self=$dlopen_self_static ++ fi ++ prefer_static_libs=built ++ ;; ++ -static-libtool-libs) ++ if test -z "$pic_flag" && test -n "$link_static_flag"; then ++ dlopen_self=$dlopen_self_static ++ fi ++ prefer_static_libs=yes ++ ;; ++ esac ++ build_libtool_libs=no ++ build_old_libs=yes ++ break ++ ;; ++ esac ++ done ++ ++ # See if our shared archives depend on static archives. ++ test -n "$old_archive_from_new_cmds" && build_old_libs=yes ++ ++ # Go through the arguments, transforming them on the way. ++ while test "$#" -gt 0; do ++ arg="$1" ++ shift ++ func_quote_for_eval "$arg" ++ qarg=$func_quote_for_eval_unquoted_result ++ func_append libtool_args " $func_quote_for_eval_result" ++ ++ # If the previous option needs an argument, assign it. ++ if test -n "$prev"; then ++ case $prev in ++ output) ++ func_append compile_command " @OUTPUT@" ++ func_append finalize_command " @OUTPUT@" ++ ;; ++ esac ++ ++ case $prev in ++ bindir) ++ bindir="$arg" ++ prev= ++ continue ++ ;; ++ dlfiles|dlprefiles) ++ if test "$preload" = no; then ++ # Add the symbol object into the linking commands. ++ func_append compile_command " @SYMFILE@" ++ func_append finalize_command " @SYMFILE@" ++ preload=yes ++ fi ++ case $arg in ++ *.la | *.lo) ;; # We handle these cases below. ++ force) ++ if test "$dlself" = no; then ++ dlself=needless ++ export_dynamic=yes ++ fi ++ prev= ++ continue ++ ;; ++ self) ++ if test "$prev" = dlprefiles; then ++ dlself=yes ++ elif test "$prev" = dlfiles && test "$dlopen_self" != yes; then ++ dlself=yes ++ else ++ dlself=needless ++ export_dynamic=yes ++ fi ++ prev= ++ continue ++ ;; ++ *) ++ if test "$prev" = dlfiles; then ++ dlfiles="$dlfiles $arg" ++ else ++ dlprefiles="$dlprefiles $arg" ++ fi ++ prev= ++ continue ++ ;; ++ esac ++ ;; ++ expsyms) ++ export_symbols="$arg" ++ test -f "$arg" \ ++ || func_fatal_error "symbol file \`$arg' does not exist" ++ prev= ++ continue ++ ;; ++ expsyms_regex) ++ export_symbols_regex="$arg" ++ prev= ++ continue ++ ;; ++ framework) ++ case $host in ++ *-*-darwin*) ++ case "$deplibs " in ++ *" $qarg.ltframework "*) ;; ++ *) deplibs="$deplibs $qarg.ltframework" # this is fixed later ++ ;; ++ esac ++ ;; ++ esac ++ prev= ++ continue ++ ;; ++ inst_prefix) ++ inst_prefix_dir="$arg" ++ prev= ++ continue ++ ;; ++ objectlist) ++ if test -f "$arg"; then ++ save_arg=$arg ++ moreargs= ++ for fil in `cat "$save_arg"` ++ do ++# moreargs="$moreargs $fil" ++ arg=$fil ++ # A libtool-controlled object. ++ ++ # Check to see that this really is a libtool object. ++ if func_lalib_unsafe_p "$arg"; then ++ pic_object= ++ non_pic_object= ++ ++ # Read the .lo file ++ func_source "$arg" ++ ++ if test -z "$pic_object" || ++ test -z "$non_pic_object" || ++ test "$pic_object" = none && ++ test "$non_pic_object" = none; then ++ func_fatal_error "cannot find name of object for \`$arg'" ++ fi ++ ++ # Extract subdirectory from the argument. ++ func_dirname "$arg" "/" "" ++ xdir="$func_dirname_result" ++ ++ if test "$pic_object" != none; then ++ # Prepend the subdirectory the object is found in. ++ pic_object="$xdir$pic_object" ++ ++ if test "$prev" = dlfiles; then ++ if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then ++ dlfiles="$dlfiles $pic_object" ++ prev= ++ continue ++ else ++ # If libtool objects are unsupported, then we need to preload. ++ prev=dlprefiles ++ fi ++ fi ++ ++ # CHECK ME: I think I busted this. -Ossama ++ if test "$prev" = dlprefiles; then ++ # Preload the old-style object. ++ dlprefiles="$dlprefiles $pic_object" ++ prev= ++ fi ++ ++ # A PIC object. ++ func_append libobjs " $pic_object" ++ arg="$pic_object" ++ fi ++ ++ # Non-PIC object. ++ if test "$non_pic_object" != none; then ++ # Prepend the subdirectory the object is found in. ++ non_pic_object="$xdir$non_pic_object" ++ ++ # A standard non-PIC object ++ func_append non_pic_objects " $non_pic_object" ++ if test -z "$pic_object" || test "$pic_object" = none ; then ++ arg="$non_pic_object" ++ fi ++ else ++ # If the PIC object exists, use it instead. ++ # $xdir was prepended to $pic_object above. ++ non_pic_object="$pic_object" ++ func_append non_pic_objects " $non_pic_object" ++ fi ++ else ++ # Only an error if not doing a dry-run. ++ if $opt_dry_run; then ++ # Extract subdirectory from the argument. ++ func_dirname "$arg" "/" "" ++ xdir="$func_dirname_result" ++ ++ func_lo2o "$arg" ++ pic_object=$xdir$objdir/$func_lo2o_result ++ non_pic_object=$xdir$func_lo2o_result ++ func_append libobjs " $pic_object" ++ func_append non_pic_objects " $non_pic_object" ++ else ++ func_fatal_error "\`$arg' is not a valid libtool object" ++ fi ++ fi ++ done ++ else ++ func_fatal_error "link input file \`$arg' does not exist" ++ fi ++ arg=$save_arg ++ prev= ++ continue ++ ;; ++ precious_regex) ++ precious_files_regex="$arg" ++ prev= ++ continue ++ ;; ++ release) ++ release="-$arg" ++ prev= ++ continue ++ ;; ++ rpath | xrpath) ++ # We need an absolute path. ++ case $arg in ++ [\\/]* | [A-Za-z]:[\\/]*) ;; ++ *) ++ func_fatal_error "only absolute run-paths are allowed" ++ ;; ++ esac ++ if test "$prev" = rpath; then ++ case "$rpath " in ++ *" $arg "*) ;; ++ *) rpath="$rpath $arg" ;; ++ esac ++ else ++ case "$xrpath " in ++ *" $arg "*) ;; ++ *) xrpath="$xrpath $arg" ;; ++ esac ++ fi ++ prev= ++ continue ++ ;; ++ shrext) ++ shrext_cmds="$arg" ++ prev= ++ continue ++ ;; ++ weak) ++ weak_libs="$weak_libs $arg" ++ prev= ++ continue ++ ;; ++ xcclinker) ++ linker_flags="$linker_flags $qarg" ++ compiler_flags="$compiler_flags $qarg" ++ prev= ++ func_append compile_command " $qarg" ++ func_append finalize_command " $qarg" ++ continue ++ ;; ++ xcompiler) ++ compiler_flags="$compiler_flags $qarg" ++ prev= ++ func_append compile_command " $qarg" ++ func_append finalize_command " $qarg" ++ continue ++ ;; ++ xlinker) ++ linker_flags="$linker_flags $qarg" ++ compiler_flags="$compiler_flags $wl$qarg" ++ prev= ++ func_append compile_command " $wl$qarg" ++ func_append finalize_command " $wl$qarg" ++ continue ++ ;; ++ *) ++ eval "$prev=\"\$arg\"" ++ prev= ++ continue ++ ;; ++ esac ++ fi # test -n "$prev" ++ ++ prevarg="$arg" ++ ++ case $arg in ++ -all-static) ++ if test -n "$link_static_flag"; then ++ # See comment for -static flag below, for more details. ++ func_append compile_command " $link_static_flag" ++ func_append finalize_command " $link_static_flag" ++ fi ++ continue ++ ;; ++ ++ -allow-undefined) ++ # FIXME: remove this flag sometime in the future. ++ func_fatal_error "\`-allow-undefined' must not be used because it is the default" ++ ;; ++ ++ -avoid-version) ++ avoid_version=yes ++ continue ++ ;; ++ ++ -bindir) ++ prev=bindir ++ continue ++ ;; ++ ++ -dlopen) ++ prev=dlfiles ++ continue ++ ;; ++ ++ -dlpreopen) ++ prev=dlprefiles ++ continue ++ ;; ++ ++ -export-dynamic) ++ export_dynamic=yes ++ continue ++ ;; ++ ++ -export-symbols | -export-symbols-regex) ++ if test -n "$export_symbols" || test -n "$export_symbols_regex"; then ++ func_fatal_error "more than one -exported-symbols argument is not allowed" ++ fi ++ if test "X$arg" = "X-export-symbols"; then ++ prev=expsyms ++ else ++ prev=expsyms_regex ++ fi ++ continue ++ ;; ++ ++ -framework) ++ prev=framework ++ continue ++ ;; ++ ++ -inst-prefix-dir) ++ prev=inst_prefix ++ continue ++ ;; ++ ++ # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:* ++ # so, if we see these flags be careful not to treat them like -L ++ -L[A-Z][A-Z]*:*) ++ case $with_gcc/$host in ++ no/*-*-irix* | /*-*-irix*) ++ func_append compile_command " $arg" ++ func_append finalize_command " $arg" ++ ;; ++ esac ++ continue ++ ;; ++ ++ -L*) ++ func_stripname '-L' '' "$arg" ++ dir=$func_stripname_result ++ if test -z "$dir"; then ++ if test "$#" -gt 0; then ++ func_fatal_error "require no space between \`-L' and \`$1'" ++ else ++ func_fatal_error "need path for \`-L' option" ++ fi ++ fi ++ # We need an absolute path. ++ case $dir in ++ [\\/]* | [A-Za-z]:[\\/]*) ;; ++ *) ++ absdir=`cd "$dir" && pwd` ++ test -z "$absdir" && \ ++ func_fatal_error "cannot determine absolute directory name of \`$dir'" ++ dir="$absdir" ++ ;; ++ esac ++ case "$deplibs " in ++ *" -L$dir "*) ;; ++ *) ++ deplibs="$deplibs -L$dir" ++ lib_search_path="$lib_search_path $dir" ++ ;; ++ esac ++ case $host in ++ *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*) ++ testbindir=`$ECHO "$dir" | $SED 's*/lib$*/bin*'` ++ case :$dllsearchpath: in ++ *":$dir:"*) ;; ++ ::) dllsearchpath=$dir;; ++ *) dllsearchpath="$dllsearchpath:$dir";; ++ esac ++ case :$dllsearchpath: in ++ *":$testbindir:"*) ;; ++ ::) dllsearchpath=$testbindir;; ++ *) dllsearchpath="$dllsearchpath:$testbindir";; ++ esac ++ ;; ++ esac ++ continue ++ ;; ++ ++ -l*) ++ if test "X$arg" = "X-lc" || test "X$arg" = "X-lm"; then ++ case $host in ++ *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-beos* | *-cegcc* | *-*-haiku*) ++ # These systems don't actually have a C or math library (as such) ++ continue ++ ;; ++ *-*-os2*) ++ # These systems don't actually have a C library (as such) ++ test "X$arg" = "X-lc" && continue ++ ;; ++ *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) ++ # Do not include libc due to us having libc/libc_r. ++ test "X$arg" = "X-lc" && continue ++ ;; ++ *-*-rhapsody* | *-*-darwin1.[012]) ++ # Rhapsody C and math libraries are in the System framework ++ deplibs="$deplibs System.ltframework" ++ continue ++ ;; ++ *-*-sco3.2v5* | *-*-sco5v6*) ++ # Causes problems with __ctype ++ test "X$arg" = "X-lc" && continue ++ ;; ++ *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*) ++ # Compiler inserts libc in the correct place for threads to work ++ test "X$arg" = "X-lc" && continue ++ ;; ++ *-*-linux*) ++ test "X$arg" = "X-lc" && continue ++ ;; ++ esac ++ elif test "X$arg" = "X-lc_r"; then ++ case $host in ++ *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) ++ # Do not include libc_r directly, use -pthread flag. ++ continue ++ ;; ++ esac ++ fi ++ deplibs="$deplibs $arg" ++ continue ++ ;; ++ ++ -module) ++ module=yes ++ continue ++ ;; ++ ++ # Tru64 UNIX uses -model [arg] to determine the layout of C++ ++ # classes, name mangling, and exception handling. ++ # Darwin uses the -arch flag to determine output architecture. ++ -model|-arch|-isysroot) ++ compiler_flags="$compiler_flags $arg" ++ func_append compile_command " $arg" ++ func_append finalize_command " $arg" ++ prev=xcompiler ++ continue ++ ;; ++ ++ -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads) ++ compiler_flags="$compiler_flags $arg" ++ func_append compile_command " $arg" ++ func_append finalize_command " $arg" ++ case "$new_inherited_linker_flags " in ++ *" $arg "*) ;; ++ * ) new_inherited_linker_flags="$new_inherited_linker_flags $arg" ;; ++ esac ++ continue ++ ;; ++ ++ -multi_module) ++ single_module="${wl}-multi_module" ++ continue ++ ;; ++ ++ -no-fast-install) ++ fast_install=no ++ continue ++ ;; ++ ++ -no-install) ++ case $host in ++ *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-darwin* | *-cegcc*) ++ # The PATH hackery in wrapper scripts is required on Windows ++ # and Darwin in order for the loader to find any dlls it needs. ++ func_warning "\`-no-install' is ignored for $host" ++ func_warning "assuming \`-no-fast-install' instead" ++ fast_install=no ++ ;; ++ *) no_install=yes ;; ++ esac ++ continue ++ ;; ++ ++ -no-undefined) ++ allow_undefined=no ++ continue ++ ;; ++ ++ -objectlist) ++ prev=objectlist ++ continue ++ ;; ++ ++ -o) prev=output ;; ++ ++ -precious-files-regex) ++ prev=precious_regex ++ continue ++ ;; ++ ++ -release) ++ prev=release ++ continue ++ ;; ++ ++ -rpath) ++ prev=rpath ++ continue ++ ;; ++ ++ -R) ++ prev=xrpath ++ continue ++ ;; ++ ++ -R*) ++ func_stripname '-R' '' "$arg" ++ dir=$func_stripname_result ++ # We need an absolute path. ++ case $dir in ++ [\\/]* | [A-Za-z]:[\\/]*) ;; ++ *) ++ func_fatal_error "only absolute run-paths are allowed" ++ ;; ++ esac ++ case "$xrpath " in ++ *" $dir "*) ;; ++ *) xrpath="$xrpath $dir" ;; ++ esac ++ continue ++ ;; ++ ++ -shared) ++ # The effects of -shared are defined in a previous loop. ++ continue ++ ;; ++ ++ -shrext) ++ prev=shrext ++ continue ++ ;; ++ ++ -static | -static-libtool-libs) ++ # The effects of -static are defined in a previous loop. ++ # We used to do the same as -all-static on platforms that ++ # didn't have a PIC flag, but the assumption that the effects ++ # would be equivalent was wrong. It would break on at least ++ # Digital Unix and AIX. ++ continue ++ ;; ++ ++ -thread-safe) ++ thread_safe=yes ++ continue ++ ;; ++ ++ -version-info) ++ prev=vinfo ++ continue ++ ;; ++ ++ -version-number) ++ prev=vinfo ++ vinfo_number=yes ++ continue ++ ;; ++ ++ -weak) ++ prev=weak ++ continue ++ ;; ++ ++ -Wc,*) ++ func_stripname '-Wc,' '' "$arg" ++ args=$func_stripname_result ++ arg= ++ save_ifs="$IFS"; IFS=',' ++ for flag in $args; do ++ IFS="$save_ifs" ++ func_quote_for_eval "$flag" ++ arg="$arg $func_quote_for_eval_result" ++ compiler_flags="$compiler_flags $func_quote_for_eval_result" ++ done ++ IFS="$save_ifs" ++ func_stripname ' ' '' "$arg" ++ arg=$func_stripname_result ++ ;; ++ ++ -Wl,*) ++ func_stripname '-Wl,' '' "$arg" ++ args=$func_stripname_result ++ arg= ++ save_ifs="$IFS"; IFS=',' ++ for flag in $args; do ++ IFS="$save_ifs" ++ func_quote_for_eval "$flag" ++ arg="$arg $wl$func_quote_for_eval_result" ++ compiler_flags="$compiler_flags $wl$func_quote_for_eval_result" ++ linker_flags="$linker_flags $func_quote_for_eval_result" ++ done ++ IFS="$save_ifs" ++ func_stripname ' ' '' "$arg" ++ arg=$func_stripname_result ++ ;; ++ ++ -Xcompiler) ++ prev=xcompiler ++ continue ++ ;; ++ ++ -Xlinker) ++ prev=xlinker ++ continue ++ ;; ++ ++ -XCClinker) ++ prev=xcclinker ++ continue ++ ;; ++ ++ # -msg_* for osf cc ++ -msg_*) ++ func_quote_for_eval "$arg" ++ arg="$func_quote_for_eval_result" ++ ;; ++ ++ # -64, -mips[0-9] enable 64-bit mode on the SGI compiler ++ # -r[0-9][0-9]* specifies the processor on the SGI compiler ++ # -xarch=*, -xtarget=* enable 64-bit mode on the Sun compiler ++ # +DA*, +DD* enable 64-bit mode on the HP compiler ++ # -q* pass through compiler args for the IBM compiler ++ # -m*, -t[45]*, -txscale* pass through architecture-specific ++ # compiler args for GCC ++ # -F/path gives path to uninstalled frameworks, gcc on darwin ++ # -p, -pg, --coverage, -fprofile-* pass through profiling flag for GCC ++ # @file GCC response files ++ # -tp=* Portland pgcc target processor selection ++ -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \ ++ -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*) ++ func_quote_for_eval "$arg" ++ arg="$func_quote_for_eval_result" ++ func_append compile_command " $arg" ++ func_append finalize_command " $arg" ++ compiler_flags="$compiler_flags $arg" ++ continue ++ ;; ++ ++ # Some other compiler flag. ++ -* | +*) ++ func_quote_for_eval "$arg" ++ arg="$func_quote_for_eval_result" ++ ;; ++ ++ *.$objext) ++ # A standard object. ++ objs="$objs $arg" ++ ;; ++ ++ *.lo) ++ # A libtool-controlled object. ++ ++ # Check to see that this really is a libtool object. ++ if func_lalib_unsafe_p "$arg"; then ++ pic_object= ++ non_pic_object= ++ ++ # Read the .lo file ++ func_source "$arg" ++ ++ if test -z "$pic_object" || ++ test -z "$non_pic_object" || ++ test "$pic_object" = none && ++ test "$non_pic_object" = none; then ++ func_fatal_error "cannot find name of object for \`$arg'" ++ fi ++ ++ # Extract subdirectory from the argument. ++ func_dirname "$arg" "/" "" ++ xdir="$func_dirname_result" ++ ++ if test "$pic_object" != none; then ++ # Prepend the subdirectory the object is found in. ++ pic_object="$xdir$pic_object" ++ ++ if test "$prev" = dlfiles; then ++ if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then ++ dlfiles="$dlfiles $pic_object" ++ prev= ++ continue ++ else ++ # If libtool objects are unsupported, then we need to preload. ++ prev=dlprefiles ++ fi ++ fi ++ ++ # CHECK ME: I think I busted this. -Ossama ++ if test "$prev" = dlprefiles; then ++ # Preload the old-style object. ++ dlprefiles="$dlprefiles $pic_object" ++ prev= ++ fi ++ ++ # A PIC object. ++ func_append libobjs " $pic_object" ++ arg="$pic_object" ++ fi ++ ++ # Non-PIC object. ++ if test "$non_pic_object" != none; then ++ # Prepend the subdirectory the object is found in. ++ non_pic_object="$xdir$non_pic_object" ++ ++ # A standard non-PIC object ++ func_append non_pic_objects " $non_pic_object" ++ if test -z "$pic_object" || test "$pic_object" = none ; then ++ arg="$non_pic_object" ++ fi ++ else ++ # If the PIC object exists, use it instead. ++ # $xdir was prepended to $pic_object above. ++ non_pic_object="$pic_object" ++ func_append non_pic_objects " $non_pic_object" ++ fi ++ else ++ # Only an error if not doing a dry-run. ++ if $opt_dry_run; then ++ # Extract subdirectory from the argument. ++ func_dirname "$arg" "/" "" ++ xdir="$func_dirname_result" ++ ++ func_lo2o "$arg" ++ pic_object=$xdir$objdir/$func_lo2o_result ++ non_pic_object=$xdir$func_lo2o_result ++ func_append libobjs " $pic_object" ++ func_append non_pic_objects " $non_pic_object" ++ else ++ func_fatal_error "\`$arg' is not a valid libtool object" ++ fi ++ fi ++ ;; ++ ++ *.$libext) ++ # An archive. ++ deplibs="$deplibs $arg" ++ old_deplibs="$old_deplibs $arg" ++ continue ++ ;; ++ ++ *.la) ++ # A libtool-controlled library. ++ ++ if test "$prev" = dlfiles; then ++ # This library was specified with -dlopen. ++ dlfiles="$dlfiles $arg" ++ prev= ++ elif test "$prev" = dlprefiles; then ++ # The library was specified with -dlpreopen. ++ dlprefiles="$dlprefiles $arg" ++ prev= ++ else ++ deplibs="$deplibs $arg" ++ fi ++ continue ++ ;; ++ ++ # Some other compiler argument. ++ *) ++ # Unknown arguments in both finalize_command and compile_command need ++ # to be aesthetically quoted because they are evaled later. ++ func_quote_for_eval "$arg" ++ arg="$func_quote_for_eval_result" ++ ;; ++ esac # arg ++ ++ # Now actually substitute the argument into the commands. ++ if test -n "$arg"; then ++ func_append compile_command " $arg" ++ func_append finalize_command " $arg" ++ fi ++ done # argument parsing loop ++ ++ test -n "$prev" && \ ++ func_fatal_help "the \`$prevarg' option requires an argument" ++ ++ if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then ++ eval "arg=\"$export_dynamic_flag_spec\"" ++ func_append compile_command " $arg" ++ func_append finalize_command " $arg" ++ fi ++ ++ oldlibs= ++ # calculate the name of the file, without its directory ++ func_basename "$output" ++ outputname="$func_basename_result" ++ libobjs_save="$libobjs" ++ ++ if test -n "$shlibpath_var"; then ++ # get the directories listed in $shlibpath_var ++ eval shlib_search_path=\`\$ECHO \"\${$shlibpath_var}\" \| \$SED \'s/:/ /g\'\` ++ else ++ shlib_search_path= ++ fi ++ eval "sys_lib_search_path=\"$sys_lib_search_path_spec\"" ++ eval "sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\"" ++ ++ func_dirname "$output" "/" "" ++ output_objdir="$func_dirname_result$objdir" ++ # Create the object directory. ++ func_mkdir_p "$output_objdir" ++ ++ # Determine the type of output ++ case $output in ++ "") ++ func_fatal_help "you must specify an output file" ++ ;; ++ *.$libext) linkmode=oldlib ;; ++ *.lo | *.$objext) linkmode=obj ;; ++ *.la) linkmode=lib ;; ++ *) linkmode=prog ;; # Anything else should be a program. ++ esac ++ ++ specialdeplibs= ++ ++ libs= ++ # Find all interdependent deplibs by searching for libraries ++ # that are linked more than once (e.g. -la -lb -la) ++ for deplib in $deplibs; do ++ if $opt_duplicate_deps ; then ++ case "$libs " in ++ *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; ++ esac ++ fi ++ libs="$libs $deplib" ++ done ++ ++ if test "$linkmode" = lib; then ++ libs="$predeps $libs $compiler_lib_search_path $postdeps" ++ ++ # Compute libraries that are listed more than once in $predeps ++ # $postdeps and mark them as special (i.e., whose duplicates are ++ # not to be eliminated). ++ pre_post_deps= ++ if $opt_duplicate_compiler_generated_deps; then ++ for pre_post_dep in $predeps $postdeps; do ++ case "$pre_post_deps " in ++ *" $pre_post_dep "*) specialdeplibs="$specialdeplibs $pre_post_deps" ;; ++ esac ++ pre_post_deps="$pre_post_deps $pre_post_dep" ++ done ++ fi ++ pre_post_deps= ++ fi ++ ++ deplibs= ++ newdependency_libs= ++ newlib_search_path= ++ need_relink=no # whether we're linking any uninstalled libtool libraries ++ notinst_deplibs= # not-installed libtool libraries ++ notinst_path= # paths that contain not-installed libtool libraries ++ ++ case $linkmode in ++ lib) ++ passes="conv dlpreopen link" ++ for file in $dlfiles $dlprefiles; do ++ case $file in ++ *.la) ;; ++ *) ++ func_fatal_help "libraries can \`-dlopen' only libtool libraries: $file" ++ ;; ++ esac ++ done ++ ;; ++ prog) ++ compile_deplibs= ++ finalize_deplibs= ++ alldeplibs=no ++ newdlfiles= ++ newdlprefiles= ++ passes="conv scan dlopen dlpreopen link" ++ ;; ++ *) passes="conv" ++ ;; ++ esac ++ ++ for pass in $passes; do ++ # The preopen pass in lib mode reverses $deplibs; put it back here ++ # so that -L comes before libs that need it for instance... ++ if test "$linkmode,$pass" = "lib,link"; then ++ ## FIXME: Find the place where the list is rebuilt in the wrong ++ ## order, and fix it there properly ++ tmp_deplibs= ++ for deplib in $deplibs; do ++ tmp_deplibs="$deplib $tmp_deplibs" ++ done ++ deplibs="$tmp_deplibs" ++ fi ++ ++ if test "$linkmode,$pass" = "lib,link" || ++ test "$linkmode,$pass" = "prog,scan"; then ++ libs="$deplibs" ++ deplibs= ++ fi ++ if test "$linkmode" = prog; then ++ case $pass in ++ dlopen) libs="$dlfiles" ;; ++ dlpreopen) libs="$dlprefiles" ;; ++ link) libs="$deplibs %DEPLIBS% $dependency_libs" ;; ++ esac ++ fi ++ if test "$linkmode,$pass" = "lib,dlpreopen"; then ++ # Collect and forward deplibs of preopened libtool libs ++ for lib in $dlprefiles; do ++ # Ignore non-libtool-libs ++ dependency_libs= ++ case $lib in ++ *.la) func_source "$lib" ;; ++ esac ++ ++ # Collect preopened libtool deplibs, except any this library ++ # has declared as weak libs ++ for deplib in $dependency_libs; do ++ func_basename "$deplib" ++ deplib_base=$func_basename_result ++ case " $weak_libs " in ++ *" $deplib_base "*) ;; ++ *) deplibs="$deplibs $deplib" ;; ++ esac ++ done ++ done ++ libs="$dlprefiles" ++ fi ++ if test "$pass" = dlopen; then ++ # Collect dlpreopened libraries ++ save_deplibs="$deplibs" ++ deplibs= ++ fi ++ ++ for deplib in $libs; do ++ lib= ++ found=no ++ case $deplib in ++ -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads) ++ if test "$linkmode,$pass" = "prog,link"; then ++ compile_deplibs="$deplib $compile_deplibs" ++ finalize_deplibs="$deplib $finalize_deplibs" ++ else ++ compiler_flags="$compiler_flags $deplib" ++ if test "$linkmode" = lib ; then ++ case "$new_inherited_linker_flags " in ++ *" $deplib "*) ;; ++ * ) new_inherited_linker_flags="$new_inherited_linker_flags $deplib" ;; ++ esac ++ fi ++ fi ++ continue ++ ;; ++ -l*) ++ if test "$linkmode" != lib && test "$linkmode" != prog; then ++ func_warning "\`-l' is ignored for archives/objects" ++ continue ++ fi ++ func_stripname '-l' '' "$deplib" ++ name=$func_stripname_result ++ if test "$linkmode" = lib; then ++ searchdirs="$newlib_search_path $lib_search_path $compiler_lib_search_dirs $sys_lib_search_path $shlib_search_path" ++ else ++ searchdirs="$newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path" ++ fi ++ for searchdir in $searchdirs; do ++ for search_ext in .la $std_shrext .so .a; do ++ # Search the libtool library ++ lib="$searchdir/lib${name}${search_ext}" ++ if test -f "$lib"; then ++ if test "$search_ext" = ".la"; then ++ found=yes ++ else ++ found=no ++ fi ++ break 2 ++ fi ++ done ++ done ++ if test "$found" != yes; then ++ # deplib doesn't seem to be a libtool library ++ if test "$linkmode,$pass" = "prog,link"; then ++ compile_deplibs="$deplib $compile_deplibs" ++ finalize_deplibs="$deplib $finalize_deplibs" ++ else ++ deplibs="$deplib $deplibs" ++ test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs" ++ fi ++ continue ++ else # deplib is a libtool library ++ # If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib, ++ # We need to do some special things here, and not later. ++ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then ++ case " $predeps $postdeps " in ++ *" $deplib "*) ++ if func_lalib_p "$lib"; then ++ library_names= ++ old_library= ++ func_source "$lib" ++ for l in $old_library $library_names; do ++ ll="$l" ++ done ++ if test "X$ll" = "X$old_library" ; then # only static version available ++ found=no ++ func_dirname "$lib" "" "." ++ ladir="$func_dirname_result" ++ lib=$ladir/$old_library ++ if test "$linkmode,$pass" = "prog,link"; then ++ compile_deplibs="$deplib $compile_deplibs" ++ finalize_deplibs="$deplib $finalize_deplibs" ++ else ++ deplibs="$deplib $deplibs" ++ test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs" ++ fi ++ continue ++ fi ++ fi ++ ;; ++ *) ;; ++ esac ++ fi ++ fi ++ ;; # -l ++ *.ltframework) ++ if test "$linkmode,$pass" = "prog,link"; then ++ compile_deplibs="$deplib $compile_deplibs" ++ finalize_deplibs="$deplib $finalize_deplibs" ++ else ++ deplibs="$deplib $deplibs" ++ if test "$linkmode" = lib ; then ++ case "$new_inherited_linker_flags " in ++ *" $deplib "*) ;; ++ * ) new_inherited_linker_flags="$new_inherited_linker_flags $deplib" ;; ++ esac ++ fi ++ fi ++ continue ++ ;; ++ -L*) ++ case $linkmode in ++ lib) ++ deplibs="$deplib $deplibs" ++ test "$pass" = conv && continue ++ newdependency_libs="$deplib $newdependency_libs" ++ func_stripname '-L' '' "$deplib" ++ newlib_search_path="$newlib_search_path $func_stripname_result" ++ ;; ++ prog) ++ if test "$pass" = conv; then ++ deplibs="$deplib $deplibs" ++ continue ++ fi ++ if test "$pass" = scan; then ++ deplibs="$deplib $deplibs" ++ else ++ compile_deplibs="$deplib $compile_deplibs" ++ finalize_deplibs="$deplib $finalize_deplibs" ++ fi ++ func_stripname '-L' '' "$deplib" ++ newlib_search_path="$newlib_search_path $func_stripname_result" ++ ;; ++ *) ++ func_warning "\`-L' is ignored for archives/objects" ++ ;; ++ esac # linkmode ++ continue ++ ;; # -L ++ -R*) ++ if test "$pass" = link; then ++ func_stripname '-R' '' "$deplib" ++ dir=$func_stripname_result ++ # Make sure the xrpath contains only unique directories. ++ case "$xrpath " in ++ *" $dir "*) ;; ++ *) xrpath="$xrpath $dir" ;; ++ esac ++ fi ++ deplibs="$deplib $deplibs" ++ continue ++ ;; ++ *.la) lib="$deplib" ;; ++ *.$libext) ++ if test "$pass" = conv; then ++ deplibs="$deplib $deplibs" ++ continue ++ fi ++ case $linkmode in ++ lib) ++ # Linking convenience modules into shared libraries is allowed, ++ # but linking other static libraries is non-portable. ++ case " $dlpreconveniencelibs " in ++ *" $deplib "*) ;; ++ *) ++ valid_a_lib=no ++ case $deplibs_check_method in ++ match_pattern*) ++ set dummy $deplibs_check_method; shift ++ match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"` ++ if eval "\$ECHO \"$deplib\"" 2>/dev/null | $SED 10q \ ++ | $EGREP "$match_pattern_regex" > /dev/null; then ++ valid_a_lib=yes ++ fi ++ ;; ++ pass_all) ++ valid_a_lib=yes ++ ;; ++ esac ++ if test "$valid_a_lib" != yes; then ++ echo ++ $ECHO "*** Warning: Trying to link with static lib archive $deplib." ++ echo "*** I have the capability to make that library automatically link in when" ++ echo "*** you link to this library. But I can only do this if you have a" ++ echo "*** shared version of the library, which you do not appear to have" ++ echo "*** because the file extensions .$libext of this argument makes me believe" ++ echo "*** that it is just a static archive that I should not use here." ++ else ++ echo ++ $ECHO "*** Warning: Linking the shared library $output against the" ++ $ECHO "*** static library $deplib is not portable!" ++ deplibs="$deplib $deplibs" ++ fi ++ ;; ++ esac ++ continue ++ ;; ++ prog) ++ if test "$pass" != link; then ++ deplibs="$deplib $deplibs" ++ else ++ compile_deplibs="$deplib $compile_deplibs" ++ finalize_deplibs="$deplib $finalize_deplibs" ++ fi ++ continue ++ ;; ++ esac # linkmode ++ ;; # *.$libext ++ *.lo | *.$objext) ++ if test "$pass" = conv; then ++ deplibs="$deplib $deplibs" ++ elif test "$linkmode" = prog; then ++ if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then ++ # If there is no dlopen support or we're linking statically, ++ # we need to preload. ++ newdlprefiles="$newdlprefiles $deplib" ++ compile_deplibs="$deplib $compile_deplibs" ++ finalize_deplibs="$deplib $finalize_deplibs" ++ else ++ newdlfiles="$newdlfiles $deplib" ++ fi ++ fi ++ continue ++ ;; ++ %DEPLIBS%) ++ alldeplibs=yes ++ continue ++ ;; ++ esac # case $deplib ++ ++ if test "$found" = yes || test -f "$lib"; then : ++ else ++ func_fatal_error "cannot find the library \`$lib' or unhandled argument \`$deplib'" ++ fi ++ ++ # Check to see that this really is a libtool archive. ++ func_lalib_unsafe_p "$lib" \ ++ || func_fatal_error "\`$lib' is not a valid libtool archive" ++ ++ func_dirname "$lib" "" "." ++ ladir="$func_dirname_result" ++ ++ dlname= ++ dlopen= ++ dlpreopen= ++ libdir= ++ library_names= ++ old_library= ++ inherited_linker_flags= ++ # If the library was installed with an old release of libtool, ++ # it will not redefine variables installed, or shouldnotlink ++ installed=yes ++ shouldnotlink=no ++ avoidtemprpath= ++ ++ ++ # Read the .la file ++ func_source "$lib" ++ ++ # Convert "-framework foo" to "foo.ltframework" ++ if test -n "$inherited_linker_flags"; then ++ tmp_inherited_linker_flags=`$ECHO "$inherited_linker_flags" | $SED 's/-framework \([^ $]*\)/\1.ltframework/g'` ++ for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do ++ case " $new_inherited_linker_flags " in ++ *" $tmp_inherited_linker_flag "*) ;; ++ *) new_inherited_linker_flags="$new_inherited_linker_flags $tmp_inherited_linker_flag";; ++ esac ++ done ++ fi ++ dependency_libs=`$ECHO " $dependency_libs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` ++ if test "$linkmode,$pass" = "lib,link" || ++ test "$linkmode,$pass" = "prog,scan" || ++ { test "$linkmode" != prog && test "$linkmode" != lib; }; then ++ test -n "$dlopen" && dlfiles="$dlfiles $dlopen" ++ test -n "$dlpreopen" && dlprefiles="$dlprefiles $dlpreopen" ++ fi ++ ++ if test "$pass" = conv; then ++ # Only check for convenience libraries ++ deplibs="$lib $deplibs" ++ if test -z "$libdir"; then ++ if test -z "$old_library"; then ++ func_fatal_error "cannot find name of link library for \`$lib'" ++ fi ++ # It is a libtool convenience library, so add in its objects. ++ convenience="$convenience $ladir/$objdir/$old_library" ++ old_convenience="$old_convenience $ladir/$objdir/$old_library" ++ elif test "$linkmode" != prog && test "$linkmode" != lib; then ++ func_fatal_error "\`$lib' is not a convenience library" ++ fi ++ tmp_libs= ++ for deplib in $dependency_libs; do ++ deplibs="$deplib $deplibs" ++ if $opt_duplicate_deps ; then ++ case "$tmp_libs " in ++ *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; ++ esac ++ fi ++ tmp_libs="$tmp_libs $deplib" ++ done ++ continue ++ fi # $pass = conv ++ ++ ++ # Get the name of the library we link against. ++ linklib= ++ for l in $old_library $library_names; do ++ linklib="$l" ++ done ++ if test -z "$linklib"; then ++ func_fatal_error "cannot find name of link library for \`$lib'" ++ fi ++ ++ # This library was specified with -dlopen. ++ if test "$pass" = dlopen; then ++ if test -z "$libdir"; then ++ func_fatal_error "cannot -dlopen a convenience library: \`$lib'" ++ fi ++ if test -z "$dlname" || ++ test "$dlopen_support" != yes || ++ test "$build_libtool_libs" = no; then ++ # If there is no dlname, no dlopen support or we're linking ++ # statically, we need to preload. We also need to preload any ++ # dependent libraries so libltdl's deplib preloader doesn't ++ # bomb out in the load deplibs phase. ++ dlprefiles="$dlprefiles $lib $dependency_libs" ++ else ++ newdlfiles="$newdlfiles $lib" ++ fi ++ continue ++ fi # $pass = dlopen ++ ++ # We need an absolute path. ++ case $ladir in ++ [\\/]* | [A-Za-z]:[\\/]*) abs_ladir="$ladir" ;; ++ *) ++ abs_ladir=`cd "$ladir" && pwd` ++ if test -z "$abs_ladir"; then ++ func_warning "cannot determine absolute directory name of \`$ladir'" ++ func_warning "passing it literally to the linker, although it might fail" ++ abs_ladir="$ladir" ++ fi ++ ;; ++ esac ++ func_basename "$lib" ++ laname="$func_basename_result" ++ ++ # Find the relevant object directory and library name. ++ if test "X$installed" = Xyes; then ++ if test ! -f "$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then ++ func_warning "library \`$lib' was moved." ++ dir="$ladir" ++ absdir="$abs_ladir" ++ libdir="$abs_ladir" ++ else ++ dir="$libdir" ++ absdir="$libdir" ++ fi ++ test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes ++ else ++ if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then ++ dir="$ladir" ++ absdir="$abs_ladir" ++ # Remove this search path later ++ notinst_path="$notinst_path $abs_ladir" ++ else ++ dir="$ladir/$objdir" ++ absdir="$abs_ladir/$objdir" ++ # Remove this search path later ++ notinst_path="$notinst_path $abs_ladir" ++ fi ++ fi # $installed = yes ++ func_stripname 'lib' '.la' "$laname" ++ name=$func_stripname_result ++ ++ # This library was specified with -dlpreopen. ++ if test "$pass" = dlpreopen; then ++ if test -z "$libdir" && test "$linkmode" = prog; then ++ func_fatal_error "only libraries may -dlpreopen a convenience library: \`$lib'" ++ fi ++ # Prefer using a static library (so that no silly _DYNAMIC symbols ++ # are required to link). ++ if test -n "$old_library"; then ++ newdlprefiles="$newdlprefiles $dir/$old_library" ++ # Keep a list of preopened convenience libraries to check ++ # that they are being used correctly in the link pass. ++ test -z "$libdir" && \ ++ dlpreconveniencelibs="$dlpreconveniencelibs $dir/$old_library" ++ # Otherwise, use the dlname, so that lt_dlopen finds it. ++ elif test -n "$dlname"; then ++ newdlprefiles="$newdlprefiles $dir/$dlname" ++ else ++ newdlprefiles="$newdlprefiles $dir/$linklib" ++ fi ++ fi # $pass = dlpreopen ++ ++ if test -z "$libdir"; then ++ # Link the convenience library ++ if test "$linkmode" = lib; then ++ deplibs="$dir/$old_library $deplibs" ++ elif test "$linkmode,$pass" = "prog,link"; then ++ compile_deplibs="$dir/$old_library $compile_deplibs" ++ finalize_deplibs="$dir/$old_library $finalize_deplibs" ++ else ++ deplibs="$lib $deplibs" # used for prog,scan pass ++ fi ++ continue ++ fi ++ ++ ++ if test "$linkmode" = prog && test "$pass" != link; then ++ newlib_search_path="$newlib_search_path $ladir" ++ deplibs="$lib $deplibs" ++ ++ linkalldeplibs=no ++ if test "$link_all_deplibs" != no || test -z "$library_names" || ++ test "$build_libtool_libs" = no; then ++ linkalldeplibs=yes ++ fi ++ ++ tmp_libs= ++ for deplib in $dependency_libs; do ++ case $deplib in ++ -L*) func_stripname '-L' '' "$deplib" ++ newlib_search_path="$newlib_search_path $func_stripname_result" ++ ;; ++ esac ++ # Need to link against all dependency_libs? ++ if test "$linkalldeplibs" = yes; then ++ deplibs="$deplib $deplibs" ++ else ++ # Need to hardcode shared library paths ++ # or/and link against static libraries ++ newdependency_libs="$deplib $newdependency_libs" ++ fi ++ if $opt_duplicate_deps ; then ++ case "$tmp_libs " in ++ *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; ++ esac ++ fi ++ tmp_libs="$tmp_libs $deplib" ++ done # for deplib ++ continue ++ fi # $linkmode = prog... ++ ++ if test "$linkmode,$pass" = "prog,link"; then ++ if test -n "$library_names" && ++ { { test "$prefer_static_libs" = no || ++ test "$prefer_static_libs,$installed" = "built,yes"; } || ++ test -z "$old_library"; }; then ++ # We need to hardcode the library path ++ if test -n "$shlibpath_var" && test -z "$avoidtemprpath" ; then ++ # Make sure the rpath contains only unique directories. ++ case "$temp_rpath:" in ++ *"$absdir:"*) ;; ++ *) temp_rpath="$temp_rpath$absdir:" ;; ++ esac ++ fi ++ ++ # Hardcode the library path. ++ # Skip directories that are in the system default run-time ++ # search path. ++ case " $sys_lib_dlsearch_path " in ++ *" $absdir "*) ;; ++ *) ++ case "$compile_rpath " in ++ *" $absdir "*) ;; ++ *) compile_rpath="$compile_rpath $absdir" ++ esac ++ ;; ++ esac ++ case " $sys_lib_dlsearch_path " in ++ *" $libdir "*) ;; ++ *) ++ case "$finalize_rpath " in ++ *" $libdir "*) ;; ++ *) finalize_rpath="$finalize_rpath $libdir" ++ esac ++ ;; ++ esac ++ fi # $linkmode,$pass = prog,link... ++ ++ if test "$alldeplibs" = yes && ++ { test "$deplibs_check_method" = pass_all || ++ { test "$build_libtool_libs" = yes && ++ test -n "$library_names"; }; }; then ++ # We only need to search for static libraries ++ continue ++ fi ++ fi ++ ++ link_static=no # Whether the deplib will be linked statically ++ use_static_libs=$prefer_static_libs ++ if test "$use_static_libs" = built && test "$installed" = yes; then ++ use_static_libs=no ++ fi ++ if test -n "$library_names" && ++ { test "$use_static_libs" = no || test -z "$old_library"; }; then ++ case $host in ++ *cygwin* | *mingw* | *cegcc*) ++ # No point in relinking DLLs because paths are not encoded ++ notinst_deplibs="$notinst_deplibs $lib" ++ need_relink=no ++ ;; ++ *) ++ if test "$installed" = no; then ++ notinst_deplibs="$notinst_deplibs $lib" ++ need_relink=yes ++ fi ++ ;; ++ esac ++ # This is a shared library ++ ++ # Warn about portability, can't link against -module's on some ++ # systems (darwin). Don't bleat about dlopened modules though! ++ dlopenmodule="" ++ for dlpremoduletest in $dlprefiles; do ++ if test "X$dlpremoduletest" = "X$lib"; then ++ dlopenmodule="$dlpremoduletest" ++ break ++ fi ++ done ++ if test -z "$dlopenmodule" && test "$shouldnotlink" = yes && test "$pass" = link; then ++ echo ++ if test "$linkmode" = prog; then ++ $ECHO "*** Warning: Linking the executable $output against the loadable module" ++ else ++ $ECHO "*** Warning: Linking the shared library $output against the loadable module" ++ fi ++ $ECHO "*** $linklib is not portable!" ++ fi ++ if test "$linkmode" = lib && ++ test "$hardcode_into_libs" = yes; then ++ # Hardcode the library path. ++ # Skip directories that are in the system default run-time ++ # search path. ++ case " $sys_lib_dlsearch_path " in ++ *" $absdir "*) ;; ++ *) ++ case "$compile_rpath " in ++ *" $absdir "*) ;; ++ *) compile_rpath="$compile_rpath $absdir" ++ esac ++ ;; ++ esac ++ case " $sys_lib_dlsearch_path " in ++ *" $libdir "*) ;; ++ *) ++ case "$finalize_rpath " in ++ *" $libdir "*) ;; ++ *) finalize_rpath="$finalize_rpath $libdir" ++ esac ++ ;; ++ esac ++ fi ++ ++ if test -n "$old_archive_from_expsyms_cmds"; then ++ # figure out the soname ++ set dummy $library_names ++ shift ++ realname="$1" ++ shift ++ eval "libname=\"$libname_spec\"" ++ # use dlname if we got it. it's perfectly good, no? ++ if test -n "$dlname"; then ++ soname="$dlname" ++ elif test -n "$soname_spec"; then ++ # bleh windows ++ case $host in ++ *cygwin* | mingw* | *cegcc*) ++ func_arith $current - $age ++ major=$func_arith_result ++ versuffix="-$major" ++ ;; ++ esac ++ eval "soname=\"$soname_spec\"" ++ else ++ soname="$realname" ++ fi ++ ++ # Make a new name for the extract_expsyms_cmds to use ++ soroot="$soname" ++ func_basename "$soroot" ++ soname="$func_basename_result" ++ func_stripname 'lib' '.dll' "$soname" ++ newlib=libimp-$func_stripname_result.a ++ ++ # If the library has no export list, then create one now ++ if test -f "$output_objdir/$soname-def"; then : ++ else ++ func_verbose "extracting exported symbol list from \`$soname'" ++ func_execute_cmds "$extract_expsyms_cmds" 'exit $?' ++ fi ++ ++ # Create $newlib ++ if test -f "$output_objdir/$newlib"; then :; else ++ func_verbose "generating import library for \`$soname'" ++ func_execute_cmds "$old_archive_from_expsyms_cmds" 'exit $?' ++ fi ++ # make sure the library variables are pointing to the new library ++ dir=$output_objdir ++ linklib=$newlib ++ fi # test -n "$old_archive_from_expsyms_cmds" ++ ++ if test "$linkmode" = prog || test "$mode" != relink; then ++ add_shlibpath= ++ add_dir= ++ add= ++ lib_linked=yes ++ case $hardcode_action in ++ immediate | unsupported) ++ if test "$hardcode_direct" = no; then ++ add="$dir/$linklib" ++ case $host in ++ *-*-sco3.2v5.0.[024]*) add_dir="-L$dir" ;; ++ *-*-sysv4*uw2*) add_dir="-L$dir" ;; ++ *-*-sysv5OpenUNIX* | *-*-sysv5UnixWare7.[01].[10]* | \ ++ *-*-unixware7*) add_dir="-L$dir" ;; ++ *-*-darwin* ) ++ # if the lib is a (non-dlopened) module then we can not ++ # link against it, someone is ignoring the earlier warnings ++ if /usr/bin/file -L $add 2> /dev/null | ++ $GREP ": [^:]* bundle" >/dev/null ; then ++ if test "X$dlopenmodule" != "X$lib"; then ++ $ECHO "*** Warning: lib $linklib is a module, not a shared library" ++ if test -z "$old_library" ; then ++ echo ++ echo "*** And there doesn't seem to be a static archive available" ++ echo "*** The link will probably fail, sorry" ++ else ++ add="$dir/$old_library" ++ fi ++ elif test -n "$old_library"; then ++ add="$dir/$old_library" ++ fi ++ fi ++ esac ++ elif test "$hardcode_minus_L" = no; then ++ case $host in ++ *-*-sunos*) add_shlibpath="$dir" ;; ++ esac ++ add_dir="-L$dir" ++ add="-l$name" ++ elif test "$hardcode_shlibpath_var" = no; then ++ add_shlibpath="$dir" ++ add="-l$name" ++ else ++ lib_linked=no ++ fi ++ ;; ++ relink) ++ if test "$hardcode_direct" = yes && ++ test "$hardcode_direct_absolute" = no; then ++ add="$dir/$linklib" ++ elif test "$hardcode_minus_L" = yes; then ++ add_dir="-L$absdir" ++ # Try looking first in the location we're being installed to. ++ if test -n "$inst_prefix_dir"; then ++ case $libdir in ++ [\\/]*) ++ add_dir="$add_dir -L$inst_prefix_dir$libdir" ++ ;; ++ esac ++ fi ++ add="-l$name" ++ elif test "$hardcode_shlibpath_var" = yes; then ++ add_shlibpath="$dir" ++ add="-l$name" ++ else ++ lib_linked=no ++ fi ++ ;; ++ *) lib_linked=no ;; ++ esac ++ ++ if test "$lib_linked" != yes; then ++ func_fatal_configuration "unsupported hardcode properties" ++ fi ++ ++ if test -n "$add_shlibpath"; then ++ case :$compile_shlibpath: in ++ *":$add_shlibpath:"*) ;; ++ *) compile_shlibpath="$compile_shlibpath$add_shlibpath:" ;; ++ esac ++ fi ++ if test "$linkmode" = prog; then ++ test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs" ++ test -n "$add" && compile_deplibs="$add $compile_deplibs" ++ else ++ test -n "$add_dir" && deplibs="$add_dir $deplibs" ++ test -n "$add" && deplibs="$add $deplibs" ++ if test "$hardcode_direct" != yes && ++ test "$hardcode_minus_L" != yes && ++ test "$hardcode_shlibpath_var" = yes; then ++ case :$finalize_shlibpath: in ++ *":$libdir:"*) ;; ++ *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;; ++ esac ++ fi ++ fi ++ fi ++ ++ if test "$linkmode" = prog || test "$mode" = relink; then ++ add_shlibpath= ++ add_dir= ++ add= ++ # Finalize command for both is simple: just hardcode it. ++ if test "$hardcode_direct" = yes && ++ test "$hardcode_direct_absolute" = no; then ++ add="$libdir/$linklib" ++ elif test "$hardcode_minus_L" = yes; then ++ add_dir="-L$libdir" ++ add="-l$name" ++ elif test "$hardcode_shlibpath_var" = yes; then ++ case :$finalize_shlibpath: in ++ *":$libdir:"*) ;; ++ *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;; ++ esac ++ add="-l$name" ++ elif test "$hardcode_automatic" = yes; then ++ if test -n "$inst_prefix_dir" && ++ test -f "$inst_prefix_dir$libdir/$linklib" ; then ++ add="$inst_prefix_dir$libdir/$linklib" ++ else ++ add="$libdir/$linklib" ++ fi ++ else ++ # We cannot seem to hardcode it, guess we'll fake it. ++ add_dir="-L$libdir" ++ # Try looking first in the location we're being installed to. ++ if test -n "$inst_prefix_dir"; then ++ case $libdir in ++ [\\/]*) ++ add_dir="$add_dir -L$inst_prefix_dir$libdir" ++ ;; ++ esac ++ fi ++ add="-l$name" ++ fi ++ ++ if test "$linkmode" = prog; then ++ test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs" ++ test -n "$add" && finalize_deplibs="$add $finalize_deplibs" ++ else ++ test -n "$add_dir" && deplibs="$add_dir $deplibs" ++ test -n "$add" && deplibs="$add $deplibs" ++ fi ++ fi ++ elif test "$linkmode" = prog; then ++ # Here we assume that one of hardcode_direct or hardcode_minus_L ++ # is not unsupported. This is valid on all known static and ++ # shared platforms. ++ if test "$hardcode_direct" != unsupported; then ++ test -n "$old_library" && linklib="$old_library" ++ compile_deplibs="$dir/$linklib $compile_deplibs" ++ finalize_deplibs="$dir/$linklib $finalize_deplibs" ++ else ++ compile_deplibs="-l$name -L$dir $compile_deplibs" ++ finalize_deplibs="-l$name -L$dir $finalize_deplibs" ++ fi ++ elif test "$build_libtool_libs" = yes; then ++ # Not a shared library ++ if test "$deplibs_check_method" != pass_all; then ++ # We're trying link a shared library against a static one ++ # but the system doesn't support it. ++ ++ # Just print a warning and add the library to dependency_libs so ++ # that the program can be linked against the static library. ++ echo ++ $ECHO "*** Warning: This system can not link to static lib archive $lib." ++ echo "*** I have the capability to make that library automatically link in when" ++ echo "*** you link to this library. But I can only do this if you have a" ++ echo "*** shared version of the library, which you do not appear to have." ++ if test "$module" = yes; then ++ echo "*** But as you try to build a module library, libtool will still create " ++ echo "*** a static module, that should work as long as the dlopening application" ++ echo "*** is linked with the -dlopen flag to resolve symbols at runtime." ++ if test -z "$global_symbol_pipe"; then ++ echo ++ echo "*** However, this would only work if libtool was able to extract symbol" ++ echo "*** lists from a program, using \`nm' or equivalent, but libtool could" ++ echo "*** not find such a program. So, this module is probably useless." ++ echo "*** \`nm' from GNU binutils and a full rebuild may help." ++ fi ++ if test "$build_old_libs" = no; then ++ build_libtool_libs=module ++ build_old_libs=yes ++ else ++ build_libtool_libs=no ++ fi ++ fi ++ else ++ deplibs="$dir/$old_library $deplibs" ++ link_static=yes ++ fi ++ fi # link shared/static library? ++ ++ if test "$linkmode" = lib; then ++ if test -n "$dependency_libs" && ++ { test "$hardcode_into_libs" != yes || ++ test "$build_old_libs" = yes || ++ test "$link_static" = yes; }; then ++ # Extract -R from dependency_libs ++ temp_deplibs= ++ for libdir in $dependency_libs; do ++ case $libdir in ++ -R*) func_stripname '-R' '' "$libdir" ++ temp_xrpath=$func_stripname_result ++ case " $xrpath " in ++ *" $temp_xrpath "*) ;; ++ *) xrpath="$xrpath $temp_xrpath";; ++ esac;; ++ *) temp_deplibs="$temp_deplibs $libdir";; ++ esac ++ done ++ dependency_libs="$temp_deplibs" ++ fi ++ ++ newlib_search_path="$newlib_search_path $absdir" ++ # Link against this library ++ test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs" ++ # ... and its dependency_libs ++ tmp_libs= ++ for deplib in $dependency_libs; do ++ newdependency_libs="$deplib $newdependency_libs" ++ if $opt_duplicate_deps ; then ++ case "$tmp_libs " in ++ *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; ++ esac ++ fi ++ tmp_libs="$tmp_libs $deplib" ++ done ++ ++ if test "$link_all_deplibs" != no; then ++ # Add the search paths of all dependency libraries ++ for deplib in $dependency_libs; do ++ path= ++ case $deplib in ++ -L*) path="$deplib" ;; ++ *.la) ++ func_dirname "$deplib" "" "." ++ dir="$func_dirname_result" ++ # We need an absolute path. ++ case $dir in ++ [\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;; ++ *) ++ absdir=`cd "$dir" && pwd` ++ if test -z "$absdir"; then ++ func_warning "cannot determine absolute directory name of \`$dir'" ++ absdir="$dir" ++ fi ++ ;; ++ esac ++ if $GREP "^installed=no" $deplib > /dev/null; then ++ case $host in ++ *-*-darwin*) ++ depdepl= ++ deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib` ++ if test -n "$deplibrary_names" ; then ++ for tmp in $deplibrary_names ; do ++ depdepl=$tmp ++ done ++ if test -f "$absdir/$objdir/$depdepl" ; then ++ depdepl="$absdir/$objdir/$depdepl" ++ darwin_install_name=`${OTOOL} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'` ++ if test -z "$darwin_install_name"; then ++ darwin_install_name=`${OTOOL64} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'` ++ fi ++ compiler_flags="$compiler_flags ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}" ++ linker_flags="$linker_flags -dylib_file ${darwin_install_name}:${depdepl}" ++ path= ++ fi ++ fi ++ ;; ++ *) ++ path="-L$absdir/$objdir" ++ ;; ++ esac ++ else ++ libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` ++ test -z "$libdir" && \ ++ func_fatal_error "\`$deplib' is not a valid libtool archive" ++ test "$absdir" != "$libdir" && \ ++ func_warning "\`$deplib' seems to be moved" ++ ++ path="-L$absdir" ++ fi ++ ;; ++ esac ++ case " $deplibs " in ++ *" $path "*) ;; ++ *) deplibs="$path $deplibs" ;; ++ esac ++ done ++ fi # link_all_deplibs != no ++ fi # linkmode = lib ++ done # for deplib in $libs ++ if test "$pass" = link; then ++ if test "$linkmode" = "prog"; then ++ compile_deplibs="$new_inherited_linker_flags $compile_deplibs" ++ finalize_deplibs="$new_inherited_linker_flags $finalize_deplibs" ++ else ++ compiler_flags="$compiler_flags "`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` ++ fi ++ fi ++ dependency_libs="$newdependency_libs" ++ if test "$pass" = dlpreopen; then ++ # Link the dlpreopened libraries before other libraries ++ for deplib in $save_deplibs; do ++ deplibs="$deplib $deplibs" ++ done ++ fi ++ if test "$pass" != dlopen; then ++ if test "$pass" != conv; then ++ # Make sure lib_search_path contains only unique directories. ++ lib_search_path= ++ for dir in $newlib_search_path; do ++ case "$lib_search_path " in ++ *" $dir "*) ;; ++ *) lib_search_path="$lib_search_path $dir" ;; ++ esac ++ done ++ newlib_search_path= ++ fi ++ ++ if test "$linkmode,$pass" != "prog,link"; then ++ vars="deplibs" ++ else ++ vars="compile_deplibs finalize_deplibs" ++ fi ++ for var in $vars dependency_libs; do ++ # Add libraries to $var in reverse order ++ eval tmp_libs=\$$var ++ new_libs= ++ for deplib in $tmp_libs; do ++ # FIXME: Pedantically, this is the right thing to do, so ++ # that some nasty dependency loop isn't accidentally ++ # broken: ++ #new_libs="$deplib $new_libs" ++ # Pragmatically, this seems to cause very few problems in ++ # practice: ++ case $deplib in ++ -L*) new_libs="$deplib $new_libs" ;; ++ -R*) ;; ++ *) ++ # And here is the reason: when a library appears more ++ # than once as an explicit dependence of a library, or ++ # is implicitly linked in more than once by the ++ # compiler, it is considered special, and multiple ++ # occurrences thereof are not removed. Compare this ++ # with having the same library being listed as a ++ # dependency of multiple other libraries: in this case, ++ # we know (pedantically, we assume) the library does not ++ # need to be listed more than once, so we keep only the ++ # last copy. This is not always right, but it is rare ++ # enough that we require users that really mean to play ++ # such unportable linking tricks to link the library ++ # using -Wl,-lname, so that libtool does not consider it ++ # for duplicate removal. ++ case " $specialdeplibs " in ++ *" $deplib "*) new_libs="$deplib $new_libs" ;; ++ *) ++ case " $new_libs " in ++ *" $deplib "*) ;; ++ *) new_libs="$deplib $new_libs" ;; ++ esac ++ ;; ++ esac ++ ;; ++ esac ++ done ++ tmp_libs= ++ for deplib in $new_libs; do ++ case $deplib in ++ -L*) ++ case " $tmp_libs " in ++ *" $deplib "*) ;; ++ *) tmp_libs="$tmp_libs $deplib" ;; ++ esac ++ ;; ++ *) tmp_libs="$tmp_libs $deplib" ;; ++ esac ++ done ++ eval $var=\$tmp_libs ++ done # for var ++ fi ++ # Last step: remove runtime libs from dependency_libs ++ # (they stay in deplibs) ++ tmp_libs= ++ for i in $dependency_libs ; do ++ case " $predeps $postdeps $compiler_lib_search_path " in ++ *" $i "*) ++ i="" ++ ;; ++ esac ++ if test -n "$i" ; then ++ tmp_libs="$tmp_libs $i" ++ fi ++ done ++ dependency_libs=$tmp_libs ++ done # for pass ++ if test "$linkmode" = prog; then ++ dlfiles="$newdlfiles" ++ fi ++ if test "$linkmode" = prog || test "$linkmode" = lib; then ++ dlprefiles="$newdlprefiles" ++ fi ++ ++ case $linkmode in ++ oldlib) ++ if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then ++ func_warning "\`-dlopen' is ignored for archives" ++ fi ++ ++ case " $deplibs" in ++ *\ -l* | *\ -L*) ++ func_warning "\`-l' and \`-L' are ignored for archives" ;; ++ esac ++ ++ test -n "$rpath" && \ ++ func_warning "\`-rpath' is ignored for archives" ++ ++ test -n "$xrpath" && \ ++ func_warning "\`-R' is ignored for archives" ++ ++ test -n "$vinfo" && \ ++ func_warning "\`-version-info/-version-number' is ignored for archives" ++ ++ test -n "$release" && \ ++ func_warning "\`-release' is ignored for archives" ++ ++ test -n "$export_symbols$export_symbols_regex" && \ ++ func_warning "\`-export-symbols' is ignored for archives" ++ ++ # Now set the variables for building old libraries. ++ build_libtool_libs=no ++ oldlibs="$output" ++ objs="$objs$old_deplibs" ++ ;; ++ ++ lib) ++ # Make sure we only generate libraries of the form `libNAME.la'. ++ case $outputname in ++ lib*) ++ func_stripname 'lib' '.la' "$outputname" ++ name=$func_stripname_result ++ eval "shared_ext=\"$shrext_cmds\"" ++ eval "libname=\"$libname_spec\"" ++ ;; ++ *) ++ test "$module" = no && \ ++ func_fatal_help "libtool library \`$output' must begin with \`lib'" ++ ++ if test "$need_lib_prefix" != no; then ++ # Add the "lib" prefix for modules if required ++ func_stripname '' '.la' "$outputname" ++ name=$func_stripname_result ++ eval "shared_ext=\"$shrext_cmds\"" ++ eval "libname=\"$libname_spec\"" ++ else ++ func_stripname '' '.la' "$outputname" ++ libname=$func_stripname_result ++ fi ++ ;; ++ esac ++ ++ if test -n "$objs"; then ++ if test "$deplibs_check_method" != pass_all; then ++ func_fatal_error "cannot build libtool library \`$output' from non-libtool objects on this host:$objs" ++ else ++ echo ++ $ECHO "*** Warning: Linking the shared library $output against the non-libtool" ++ $ECHO "*** objects $objs is not portable!" ++ libobjs="$libobjs $objs" ++ fi ++ fi ++ ++ test "$dlself" != no && \ ++ func_warning "\`-dlopen self' is ignored for libtool libraries" ++ ++ set dummy $rpath ++ shift ++ test "$#" -gt 1 && \ ++ func_warning "ignoring multiple \`-rpath's for a libtool library" ++ ++ install_libdir="$1" ++ ++ oldlibs= ++ if test -z "$rpath"; then ++ if test "$build_libtool_libs" = yes; then ++ # Building a libtool convenience library. ++ # Some compilers have problems with a `.al' extension so ++ # convenience libraries should have the same extension an ++ # archive normally would. ++ oldlibs="$output_objdir/$libname.$libext $oldlibs" ++ build_libtool_libs=convenience ++ build_old_libs=yes ++ fi ++ ++ test -n "$vinfo" && \ ++ func_warning "\`-version-info/-version-number' is ignored for convenience libraries" ++ ++ test -n "$release" && \ ++ func_warning "\`-release' is ignored for convenience libraries" ++ else ++ ++ # Parse the version information argument. ++ save_ifs="$IFS"; IFS=':' ++ set dummy $vinfo 0 0 0 ++ shift ++ IFS="$save_ifs" ++ ++ test -n "$7" && \ ++ func_fatal_help "too many parameters to \`-version-info'" ++ ++ # convert absolute version numbers to libtool ages ++ # this retains compatibility with .la files and attempts ++ # to make the code below a bit more comprehensible ++ ++ case $vinfo_number in ++ yes) ++ number_major="$1" ++ number_minor="$2" ++ number_revision="$3" ++ # ++ # There are really only two kinds -- those that ++ # use the current revision as the major version ++ # and those that subtract age and use age as ++ # a minor version. But, then there is irix ++ # which has an extra 1 added just for fun ++ # ++ case $version_type in ++ darwin|linux|osf|windows|none) ++ func_arith $number_major + $number_minor ++ current=$func_arith_result ++ age="$number_minor" ++ revision="$number_revision" ++ ;; ++ freebsd-aout|freebsd-elf|qnx|sunos) ++ current="$number_major" ++ revision="$number_minor" ++ age="0" ++ ;; ++ irix|nonstopux) ++ func_arith $number_major + $number_minor ++ current=$func_arith_result ++ age="$number_minor" ++ revision="$number_minor" ++ lt_irix_increment=no ++ ;; ++ esac ++ ;; ++ no) ++ current="$1" ++ revision="$2" ++ age="$3" ++ ;; ++ esac ++ ++ # Check that each of the things are valid numbers. ++ case $current in ++ 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; ++ *) ++ func_error "CURRENT \`$current' must be a nonnegative integer" ++ func_fatal_error "\`$vinfo' is not valid version information" ++ ;; ++ esac ++ ++ case $revision in ++ 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; ++ *) ++ func_error "REVISION \`$revision' must be a nonnegative integer" ++ func_fatal_error "\`$vinfo' is not valid version information" ++ ;; ++ esac ++ ++ case $age in ++ 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; ++ *) ++ func_error "AGE \`$age' must be a nonnegative integer" ++ func_fatal_error "\`$vinfo' is not valid version information" ++ ;; ++ esac ++ ++ if test "$age" -gt "$current"; then ++ func_error "AGE \`$age' is greater than the current interface number \`$current'" ++ func_fatal_error "\`$vinfo' is not valid version information" ++ fi ++ ++ # Calculate the version variables. ++ major= ++ versuffix= ++ verstring= ++ case $version_type in ++ none) ;; ++ ++ darwin) ++ # Like Linux, but with the current version available in ++ # verstring for coding it into the library header ++ func_arith $current - $age ++ major=.$func_arith_result ++ versuffix="$major.$age.$revision" ++ # Darwin ld doesn't like 0 for these options... ++ func_arith $current + 1 ++ minor_current=$func_arith_result ++ xlcverstring="${wl}-compatibility_version ${wl}$minor_current ${wl}-current_version ${wl}$minor_current.$revision" ++ verstring="-compatibility_version $minor_current -current_version $minor_current.$revision" ++ ;; ++ ++ freebsd-aout) ++ major=".$current" ++ versuffix=".$current.$revision"; ++ ;; ++ ++ freebsd-elf) ++ major=".$current" ++ versuffix=".$current" ++ ;; ++ ++ irix | nonstopux) ++ if test "X$lt_irix_increment" = "Xno"; then ++ func_arith $current - $age ++ else ++ func_arith $current - $age + 1 ++ fi ++ major=$func_arith_result ++ ++ case $version_type in ++ nonstopux) verstring_prefix=nonstopux ;; ++ *) verstring_prefix=sgi ;; ++ esac ++ verstring="$verstring_prefix$major.$revision" ++ ++ # Add in all the interfaces that we are compatible with. ++ loop=$revision ++ while test "$loop" -ne 0; do ++ func_arith $revision - $loop ++ iface=$func_arith_result ++ func_arith $loop - 1 ++ loop=$func_arith_result ++ verstring="$verstring_prefix$major.$iface:$verstring" ++ done ++ ++ # Before this point, $major must not contain `.'. ++ major=.$major ++ versuffix="$major.$revision" ++ ;; ++ ++ linux) ++ func_arith $current - $age ++ major=.$func_arith_result ++ versuffix="$major.$age.$revision" ++ ;; ++ ++ osf) ++ func_arith $current - $age ++ major=.$func_arith_result ++ versuffix=".$current.$age.$revision" ++ verstring="$current.$age.$revision" ++ ++ # Add in all the interfaces that we are compatible with. ++ loop=$age ++ while test "$loop" -ne 0; do ++ func_arith $current - $loop ++ iface=$func_arith_result ++ func_arith $loop - 1 ++ loop=$func_arith_result ++ verstring="$verstring:${iface}.0" ++ done ++ ++ # Make executables depend on our current version. ++ verstring="$verstring:${current}.0" ++ ;; ++ ++ qnx) ++ major=".$current" ++ versuffix=".$current" ++ ;; ++ ++ sunos) ++ major=".$current" ++ versuffix=".$current.$revision" ++ ;; ++ ++ windows) ++ # Use '-' rather than '.', since we only want one ++ # extension on DOS 8.3 filesystems. ++ func_arith $current - $age ++ major=$func_arith_result ++ versuffix="-$major" ++ ;; ++ ++ *) ++ func_fatal_configuration "unknown library version type \`$version_type'" ++ ;; ++ esac ++ ++ # Clear the version info if we defaulted, and they specified a release. ++ if test -z "$vinfo" && test -n "$release"; then ++ major= ++ case $version_type in ++ darwin) ++ # we can't check for "0.0" in archive_cmds due to quoting ++ # problems, so we reset it completely ++ verstring= ++ ;; ++ *) ++ verstring="0.0" ++ ;; ++ esac ++ if test "$need_version" = no; then ++ versuffix= ++ else ++ versuffix=".0.0" ++ fi ++ fi ++ ++ # Remove version info from name if versioning should be avoided ++ if test "$avoid_version" = yes && test "$need_version" = no; then ++ major= ++ versuffix= ++ verstring="" ++ fi ++ ++ # Check to see if the archive will have undefined symbols. ++ if test "$allow_undefined" = yes; then ++ if test "$allow_undefined_flag" = unsupported; then ++ func_warning "undefined symbols not allowed in $host shared libraries" ++ build_libtool_libs=no ++ build_old_libs=yes ++ fi ++ else ++ # Don't allow undefined symbols. ++ allow_undefined_flag="$no_undefined_flag" ++ fi ++ ++ fi ++ ++ func_generate_dlsyms "$libname" "$libname" "yes" ++ libobjs="$libobjs $symfileobj" ++ test "X$libobjs" = "X " && libobjs= ++ ++ if test "$mode" != relink; then ++ # Remove our outputs, but don't remove object files since they ++ # may have been created when compiling PIC objects. ++ removelist= ++ tempremovelist=`$ECHO "$output_objdir/*"` ++ for p in $tempremovelist; do ++ case $p in ++ *.$objext | *.gcno) ++ ;; ++ $output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/${libname}${release}.*) ++ if test "X$precious_files_regex" != "X"; then ++ if $ECHO "$p" | $EGREP -e "$precious_files_regex" >/dev/null 2>&1 ++ then ++ continue ++ fi ++ fi ++ removelist="$removelist $p" ++ ;; ++ *) ;; ++ esac ++ done ++ test -n "$removelist" && \ ++ func_show_eval "${RM}r \$removelist" ++ fi ++ ++ # Now set the variables for building old libraries. ++ if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then ++ oldlibs="$oldlibs $output_objdir/$libname.$libext" ++ ++ # Transform .lo files to .o files. ++ oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; $lo2o" | $NL2SP` ++ fi ++ ++ # Eliminate all temporary directories. ++ #for path in $notinst_path; do ++ # lib_search_path=`$ECHO "$lib_search_path " | $SED "s% $path % %g"` ++ # deplibs=`$ECHO "$deplibs " | $SED "s% -L$path % %g"` ++ # dependency_libs=`$ECHO "$dependency_libs " | $SED "s% -L$path % %g"` ++ #done ++ ++ if test -n "$xrpath"; then ++ # If the user specified any rpath flags, then add them. ++ temp_xrpath= ++ for libdir in $xrpath; do ++ temp_xrpath="$temp_xrpath -R$libdir" ++ case "$finalize_rpath " in ++ *" $libdir "*) ;; ++ *) finalize_rpath="$finalize_rpath $libdir" ;; ++ esac ++ done ++ if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then ++ dependency_libs="$temp_xrpath $dependency_libs" ++ fi ++ fi ++ ++ # Make sure dlfiles contains only unique files that won't be dlpreopened ++ old_dlfiles="$dlfiles" ++ dlfiles= ++ for lib in $old_dlfiles; do ++ case " $dlprefiles $dlfiles " in ++ *" $lib "*) ;; ++ *) dlfiles="$dlfiles $lib" ;; ++ esac ++ done ++ ++ # Make sure dlprefiles contains only unique files ++ old_dlprefiles="$dlprefiles" ++ dlprefiles= ++ for lib in $old_dlprefiles; do ++ case "$dlprefiles " in ++ *" $lib "*) ;; ++ *) dlprefiles="$dlprefiles $lib" ;; ++ esac ++ done ++ ++ if test "$build_libtool_libs" = yes; then ++ if test -n "$rpath"; then ++ case $host in ++ *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos* | *-cegcc* | *-*-haiku*) ++ # these systems don't actually have a c library (as such)! ++ ;; ++ *-*-rhapsody* | *-*-darwin1.[012]) ++ # Rhapsody C library is in the System framework ++ deplibs="$deplibs System.ltframework" ++ ;; ++ *-*-netbsd*) ++ # Don't link with libc until the a.out ld.so is fixed. ++ ;; ++ *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) ++ # Do not include libc due to us having libc/libc_r. ++ ;; ++ *-*-sco3.2v5* | *-*-sco5v6*) ++ # Causes problems with __ctype ++ ;; ++ *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*) ++ # Compiler inserts libc in the correct place for threads to work ++ ;; ++ *) ++ # Add libc to deplibs on all other systems if necessary. ++ if test "$build_libtool_need_lc" = "yes"; then ++ deplibs="$deplibs -lc" ++ fi ++ ;; ++ esac ++ fi ++ ++ # Transform deplibs into only deplibs that can be linked in shared. ++ name_save=$name ++ libname_save=$libname ++ release_save=$release ++ versuffix_save=$versuffix ++ major_save=$major ++ # I'm not sure if I'm treating the release correctly. I think ++ # release should show up in the -l (ie -lgmp5) so we don't want to ++ # add it in twice. Is that correct? ++ release="" ++ versuffix="" ++ major="" ++ newdeplibs= ++ droppeddeps=no ++ case $deplibs_check_method in ++ pass_all) ++ # Don't check for shared/static. Everything works. ++ # This might be a little naive. We might want to check ++ # whether the library exists or not. But this is on ++ # osf3 & osf4 and I'm not really sure... Just ++ # implementing what was already the behavior. ++ newdeplibs=$deplibs ++ ;; ++ test_compile) ++ # This code stresses the "libraries are programs" paradigm to its ++ # limits. Maybe even breaks it. We compile a program, linking it ++ # against the deplibs as a proxy for the library. Then we can check ++ # whether they linked in statically or dynamically with ldd. ++ $opt_dry_run || $RM conftest.c ++ cat > conftest.c </dev/null` ++ for potent_lib in $potential_libs; do ++ # Follow soft links. ++ if ls -lLd "$potent_lib" 2>/dev/null | ++ $GREP " -> " >/dev/null; then ++ continue ++ fi ++ # The statement above tries to avoid entering an ++ # endless loop below, in case of cyclic links. ++ # We might still enter an endless loop, since a link ++ # loop can be closed while we follow links, ++ # but so what? ++ potlib="$potent_lib" ++ while test -h "$potlib" 2>/dev/null; do ++ potliblink=`ls -ld $potlib | ${SED} 's/.* -> //'` ++ case $potliblink in ++ [\\/]* | [A-Za-z]:[\\/]*) potlib="$potliblink";; ++ *) potlib=`$ECHO "$potlib" | $SED 's,[^/]*$,,'`"$potliblink";; ++ esac ++ done ++ if eval "$file_magic_cmd \"\$potlib\"" 2>/dev/null | ++ $SED -e 10q | ++ $EGREP "$file_magic_regex" > /dev/null; then ++ newdeplibs="$newdeplibs $a_deplib" ++ a_deplib="" ++ break 2 ++ fi ++ done ++ done ++ fi ++ if test -n "$a_deplib" ; then ++ droppeddeps=yes ++ echo ++ $ECHO "*** Warning: linker path does not have real file for library $a_deplib." ++ echo "*** I have the capability to make that library automatically link in when" ++ echo "*** you link to this library. But I can only do this if you have a" ++ echo "*** shared version of the library, which you do not appear to have" ++ echo "*** because I did check the linker path looking for a file starting" ++ if test -z "$potlib" ; then ++ $ECHO "*** with $libname but no candidates were found. (...for file magic test)" ++ else ++ $ECHO "*** with $libname and none of the candidates passed a file format test" ++ $ECHO "*** using a file magic. Last file checked: $potlib" ++ fi ++ fi ++ ;; ++ *) ++ # Add a -L argument. ++ newdeplibs="$newdeplibs $a_deplib" ++ ;; ++ esac ++ done # Gone through all deplibs. ++ ;; ++ match_pattern*) ++ set dummy $deplibs_check_method; shift ++ match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"` ++ for a_deplib in $deplibs; do ++ case $a_deplib in ++ -l*) ++ func_stripname -l '' "$a_deplib" ++ name=$func_stripname_result ++ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then ++ case " $predeps $postdeps " in ++ *" $a_deplib "*) ++ newdeplibs="$newdeplibs $a_deplib" ++ a_deplib="" ++ ;; ++ esac ++ fi ++ if test -n "$a_deplib" ; then ++ eval "libname=\"$libname_spec\"" ++ for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do ++ potential_libs=`ls $i/$libname[.-]* 2>/dev/null` ++ for potent_lib in $potential_libs; do ++ potlib="$potent_lib" # see symlink-check above in file_magic test ++ if eval "\$ECHO \"$potent_lib\"" 2>/dev/null | $SED 10q | \ ++ $EGREP "$match_pattern_regex" > /dev/null; then ++ newdeplibs="$newdeplibs $a_deplib" ++ a_deplib="" ++ break 2 ++ fi ++ done ++ done ++ fi ++ if test -n "$a_deplib" ; then ++ droppeddeps=yes ++ echo ++ $ECHO "*** Warning: linker path does not have real file for library $a_deplib." ++ echo "*** I have the capability to make that library automatically link in when" ++ echo "*** you link to this library. But I can only do this if you have a" ++ echo "*** shared version of the library, which you do not appear to have" ++ echo "*** because I did check the linker path looking for a file starting" ++ if test -z "$potlib" ; then ++ $ECHO "*** with $libname but no candidates were found. (...for regex pattern test)" ++ else ++ $ECHO "*** with $libname and none of the candidates passed a file format test" ++ $ECHO "*** using a regex pattern. Last file checked: $potlib" ++ fi ++ fi ++ ;; ++ *) ++ # Add a -L argument. ++ newdeplibs="$newdeplibs $a_deplib" ++ ;; ++ esac ++ done # Gone through all deplibs. ++ ;; ++ none | unknown | *) ++ newdeplibs="" ++ tmp_deplibs=`$ECHO " $deplibs" | $SED 's/ -lc$//; s/ -[LR][^ ]*//g'` ++ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then ++ for i in $predeps $postdeps ; do ++ # can't use Xsed below, because $i might contain '/' ++ tmp_deplibs=`$ECHO " $tmp_deplibs" | $SED "s,$i,,"` ++ done ++ fi ++ case $tmp_deplibs in ++ *[!\ \ ]*) ++ echo ++ if test "X$deplibs_check_method" = "Xnone"; then ++ echo "*** Warning: inter-library dependencies are not supported in this platform." ++ else ++ echo "*** Warning: inter-library dependencies are not known to be supported." ++ fi ++ echo "*** All declared inter-library dependencies are being dropped." ++ droppeddeps=yes ++ ;; ++ esac ++ ;; ++ esac ++ versuffix=$versuffix_save ++ major=$major_save ++ release=$release_save ++ libname=$libname_save ++ name=$name_save ++ ++ case $host in ++ *-*-rhapsody* | *-*-darwin1.[012]) ++ # On Rhapsody replace the C library with the System framework ++ newdeplibs=`$ECHO " $newdeplibs" | $SED 's/ -lc / System.ltframework /'` ++ ;; ++ esac ++ ++ if test "$droppeddeps" = yes; then ++ if test "$module" = yes; then ++ echo ++ echo "*** Warning: libtool could not satisfy all declared inter-library" ++ $ECHO "*** dependencies of module $libname. Therefore, libtool will create" ++ echo "*** a static module, that should work as long as the dlopening" ++ echo "*** application is linked with the -dlopen flag." ++ if test -z "$global_symbol_pipe"; then ++ echo ++ echo "*** However, this would only work if libtool was able to extract symbol" ++ echo "*** lists from a program, using \`nm' or equivalent, but libtool could" ++ echo "*** not find such a program. So, this module is probably useless." ++ echo "*** \`nm' from GNU binutils and a full rebuild may help." ++ fi ++ if test "$build_old_libs" = no; then ++ oldlibs="$output_objdir/$libname.$libext" ++ build_libtool_libs=module ++ build_old_libs=yes ++ else ++ build_libtool_libs=no ++ fi ++ else ++ echo "*** The inter-library dependencies that have been dropped here will be" ++ echo "*** automatically added whenever a program is linked with this library" ++ echo "*** or is declared to -dlopen it." ++ ++ if test "$allow_undefined" = no; then ++ echo ++ echo "*** Since this library must not contain undefined symbols," ++ echo "*** because either the platform does not support them or" ++ echo "*** it was explicitly requested with -no-undefined," ++ echo "*** libtool will only create a static version of it." ++ if test "$build_old_libs" = no; then ++ oldlibs="$output_objdir/$libname.$libext" ++ build_libtool_libs=module ++ build_old_libs=yes ++ else ++ build_libtool_libs=no ++ fi ++ fi ++ fi ++ fi ++ # Done checking deplibs! ++ deplibs=$newdeplibs ++ fi ++ # Time to change all our "foo.ltframework" stuff back to "-framework foo" ++ case $host in ++ *-*-darwin*) ++ newdeplibs=`$ECHO " $newdeplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` ++ new_inherited_linker_flags=`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` ++ deplibs=`$ECHO " $deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` ++ ;; ++ esac ++ ++ # move library search paths that coincide with paths to not yet ++ # installed libraries to the beginning of the library search list ++ new_libs= ++ for path in $notinst_path; do ++ case " $new_libs " in ++ *" -L$path/$objdir "*) ;; ++ *) ++ case " $deplibs " in ++ *" -L$path/$objdir "*) ++ new_libs="$new_libs -L$path/$objdir" ;; ++ esac ++ ;; ++ esac ++ done ++ for deplib in $deplibs; do ++ case $deplib in ++ -L*) ++ case " $new_libs " in ++ *" $deplib "*) ;; ++ *) new_libs="$new_libs $deplib" ;; ++ esac ++ ;; ++ *) new_libs="$new_libs $deplib" ;; ++ esac ++ done ++ deplibs="$new_libs" ++ ++ # All the library-specific variables (install_libdir is set above). ++ library_names= ++ old_library= ++ dlname= ++ ++ # Test again, we may have decided not to build it any more ++ if test "$build_libtool_libs" = yes; then ++ if test "$hardcode_into_libs" = yes; then ++ # Hardcode the library paths ++ hardcode_libdirs= ++ dep_rpath= ++ rpath="$finalize_rpath" ++ test "$mode" != relink && rpath="$compile_rpath$rpath" ++ for libdir in $rpath; do ++ if test -n "$hardcode_libdir_flag_spec"; then ++ if test -n "$hardcode_libdir_separator"; then ++ if test -z "$hardcode_libdirs"; then ++ hardcode_libdirs="$libdir" ++ else ++ # Just accumulate the unique libdirs. ++ case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in ++ *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) ++ ;; ++ *) ++ hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" ++ ;; ++ esac ++ fi ++ else ++ eval "flag=\"$hardcode_libdir_flag_spec\"" ++ dep_rpath="$dep_rpath $flag" ++ fi ++ elif test -n "$runpath_var"; then ++ case "$perm_rpath " in ++ *" $libdir "*) ;; ++ *) perm_rpath="$perm_rpath $libdir" ;; ++ esac ++ fi ++ done ++ # Substitute the hardcoded libdirs into the rpath. ++ if test -n "$hardcode_libdir_separator" && ++ test -n "$hardcode_libdirs"; then ++ libdir="$hardcode_libdirs" ++ if test -n "$hardcode_libdir_flag_spec_ld"; then ++ eval "dep_rpath=\"$hardcode_libdir_flag_spec_ld\"" ++ else ++ eval "dep_rpath=\"$hardcode_libdir_flag_spec\"" ++ fi ++ fi ++ if test -n "$runpath_var" && test -n "$perm_rpath"; then ++ # We should set the runpath_var. ++ rpath= ++ for dir in $perm_rpath; do ++ rpath="$rpath$dir:" ++ done ++ eval $runpath_var=\$rpath\$$runpath_var ++ export $runpath_var ++ fi ++ test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs" ++ fi ++ ++ shlibpath="$finalize_shlibpath" ++ test "$mode" != relink && shlibpath="$compile_shlibpath$shlibpath" ++ if test -n "$shlibpath"; then ++ eval $shlibpath_var=\$shlibpath\$$shlibpath_var ++ export $shlibpath_var ++ fi ++ ++ # Get the real and link names of the library. ++ eval "shared_ext=\"$shrext_cmds\"" ++ eval "library_names=\"$library_names_spec\"" ++ set dummy $library_names ++ shift ++ realname="$1" ++ shift ++ ++ if test -n "$soname_spec"; then ++ eval "soname=\"$soname_spec\"" ++ else ++ soname="$realname" ++ fi ++ if test -z "$dlname"; then ++ dlname=$soname ++ fi ++ ++ lib="$output_objdir/$realname" ++ linknames= ++ for link ++ do ++ linknames="$linknames $link" ++ done ++ ++ # Use standard objects if they are pic ++ test -z "$pic_flag" && libobjs=`$ECHO "$libobjs" | $SP2NL | $SED "$lo2o" | $NL2SP` ++ test "X$libobjs" = "X " && libobjs= ++ ++ delfiles= ++ if test -n "$export_symbols" && test -n "$include_expsyms"; then ++ $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp" ++ export_symbols="$output_objdir/$libname.uexp" ++ delfiles="$delfiles $export_symbols" ++ fi ++ ++ orig_export_symbols= ++ case $host_os in ++ cygwin* | mingw* | cegcc*) ++ if test -n "$export_symbols" && test -z "$export_symbols_regex"; then ++ # exporting using user supplied symfile ++ if test "x`$SED 1q $export_symbols`" != xEXPORTS; then ++ # and it's NOT already a .def file. Must figure out ++ # which of the given symbols are data symbols and tag ++ # them as such. So, trigger use of export_symbols_cmds. ++ # export_symbols gets reassigned inside the "prepare ++ # the list of exported symbols" if statement, so the ++ # include_expsyms logic still works. ++ orig_export_symbols="$export_symbols" ++ export_symbols= ++ always_export_symbols=yes ++ fi ++ fi ++ ;; ++ esac ++ ++ # Prepare the list of exported symbols ++ if test -z "$export_symbols"; then ++ if test "$always_export_symbols" = yes || test -n "$export_symbols_regex"; then ++ func_verbose "generating symbol list for \`$libname.la'" ++ export_symbols="$output_objdir/$libname.exp" ++ $opt_dry_run || $RM $export_symbols ++ cmds=$export_symbols_cmds ++ save_ifs="$IFS"; IFS='~' ++ for cmd in $cmds; do ++ IFS="$save_ifs" ++ eval "cmd=\"$cmd\"" ++ func_len " $cmd" ++ len=$func_len_result ++ if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then ++ func_show_eval "$cmd" 'exit $?' ++ skipped_export=false ++ else ++ # The command line is too long to execute in one step. ++ func_verbose "using reloadable object file for export list..." ++ skipped_export=: ++ # Break out early, otherwise skipped_export may be ++ # set to false by a later but shorter cmd. ++ break ++ fi ++ done ++ IFS="$save_ifs" ++ if test -n "$export_symbols_regex" && test "X$skipped_export" != "X:"; then ++ func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"' ++ func_show_eval '$MV "${export_symbols}T" "$export_symbols"' ++ fi ++ fi ++ fi ++ ++ if test -n "$export_symbols" && test -n "$include_expsyms"; then ++ tmp_export_symbols="$export_symbols" ++ test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols" ++ $opt_dry_run || $ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols" ++ fi ++ ++ if test "X$skipped_export" != "X:" && test -n "$orig_export_symbols"; then ++ # The given exports_symbols file has to be filtered, so filter it. ++ func_verbose "filter symbol list for \`$libname.la' to tag DATA exports" ++ # FIXME: $output_objdir/$libname.filter potentially contains lots of ++ # 's' commands which not all seds can handle. GNU sed should be fine ++ # though. Also, the filter scales superlinearly with the number of ++ # global variables. join(1) would be nice here, but unfortunately ++ # isn't a blessed tool. ++ $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter ++ delfiles="$delfiles $export_symbols $output_objdir/$libname.filter" ++ export_symbols=$output_objdir/$libname.def ++ $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols ++ fi ++ ++ tmp_deplibs= ++ for test_deplib in $deplibs; do ++ case " $convenience " in ++ *" $test_deplib "*) ;; ++ *) ++ tmp_deplibs="$tmp_deplibs $test_deplib" ++ ;; ++ esac ++ done ++ deplibs="$tmp_deplibs" ++ ++ if test -n "$convenience"; then ++ if test -n "$whole_archive_flag_spec" && ++ test "$compiler_needs_object" = yes && ++ test -z "$libobjs"; then ++ # extract the archives, so we have objects to list. ++ # TODO: could optimize this to just extract one archive. ++ whole_archive_flag_spec= ++ fi ++ if test -n "$whole_archive_flag_spec"; then ++ save_libobjs=$libobjs ++ eval "libobjs=\"\$libobjs $whole_archive_flag_spec\"" ++ test "X$libobjs" = "X " && libobjs= ++ else ++ gentop="$output_objdir/${outputname}x" ++ generated="$generated $gentop" ++ ++ func_extract_archives $gentop $convenience ++ libobjs="$libobjs $func_extract_archives_result" ++ test "X$libobjs" = "X " && libobjs= ++ fi ++ fi ++ ++ if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then ++ eval "flag=\"$thread_safe_flag_spec\"" ++ linker_flags="$linker_flags $flag" ++ fi ++ ++ # Make a backup of the uninstalled library when relinking ++ if test "$mode" = relink; then ++ $opt_dry_run || (cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U) || exit $? ++ fi ++ ++ # Do each of the archive commands. ++ if test "$module" = yes && test -n "$module_cmds" ; then ++ if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then ++ eval "test_cmds=\"$module_expsym_cmds\"" ++ cmds=$module_expsym_cmds ++ else ++ eval "test_cmds=\"$module_cmds\"" ++ cmds=$module_cmds ++ fi ++ else ++ if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then ++ eval "test_cmds=\"$archive_expsym_cmds\"" ++ cmds=$archive_expsym_cmds ++ else ++ eval "test_cmds=\"$archive_cmds\"" ++ cmds=$archive_cmds ++ fi ++ fi ++ ++ if test "X$skipped_export" != "X:" && ++ func_len " $test_cmds" && ++ len=$func_len_result && ++ test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then ++ : ++ else ++ # The command line is too long to link in one step, link piecewise ++ # or, if using GNU ld and skipped_export is not :, use a linker ++ # script. ++ ++ # Save the value of $output and $libobjs because we want to ++ # use them later. If we have whole_archive_flag_spec, we ++ # want to use save_libobjs as it was before ++ # whole_archive_flag_spec was expanded, because we can't ++ # assume the linker understands whole_archive_flag_spec. ++ # This may have to be revisited, in case too many ++ # convenience libraries get linked in and end up exceeding ++ # the spec. ++ if test -z "$convenience" || test -z "$whole_archive_flag_spec"; then ++ save_libobjs=$libobjs ++ fi ++ save_output=$output ++ func_basename "$output" ++ output_la=$func_basename_result ++ ++ # Clear the reloadable object creation command queue and ++ # initialize k to one. ++ test_cmds= ++ concat_cmds= ++ objlist= ++ last_robj= ++ k=1 ++ ++ if test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "$with_gnu_ld" = yes; then ++ output=${output_objdir}/${output_la}.lnkscript ++ func_verbose "creating GNU ld script: $output" ++ echo 'INPUT (' > $output ++ for obj in $save_libobjs ++ do ++ $ECHO "$obj" >> $output ++ done ++ echo ')' >> $output ++ delfiles="$delfiles $output" ++ elif test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "X$file_list_spec" != X; then ++ output=${output_objdir}/${output_la}.lnk ++ func_verbose "creating linker input file list: $output" ++ : > $output ++ set x $save_libobjs ++ shift ++ firstobj= ++ if test "$compiler_needs_object" = yes; then ++ firstobj="$1 " ++ shift ++ fi ++ for obj ++ do ++ $ECHO "$obj" >> $output ++ done ++ delfiles="$delfiles $output" ++ output=$firstobj\"$file_list_spec$output\" ++ else ++ if test -n "$save_libobjs"; then ++ func_verbose "creating reloadable object files..." ++ output=$output_objdir/$output_la-${k}.$objext ++ eval "test_cmds=\"$reload_cmds\"" ++ func_len " $test_cmds" ++ len0=$func_len_result ++ len=$len0 ++ ++ # Loop over the list of objects to be linked. ++ for obj in $save_libobjs ++ do ++ func_len " $obj" ++ func_arith $len + $func_len_result ++ len=$func_arith_result ++ if test "X$objlist" = X || ++ test "$len" -lt "$max_cmd_len"; then ++ func_append objlist " $obj" ++ else ++ # The command $test_cmds is almost too long, add a ++ # command to the queue. ++ if test "$k" -eq 1 ; then ++ # The first file doesn't have a previous command to add. ++ reload_objs=$objlist ++ eval "concat_cmds=\"$reload_cmds\"" ++ else ++ # All subsequent reloadable object files will link in ++ # the last one created. ++ reload_objs="$objlist $last_robj" ++ eval "concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\"" ++ fi ++ last_robj=$output_objdir/$output_la-${k}.$objext ++ func_arith $k + 1 ++ k=$func_arith_result ++ output=$output_objdir/$output_la-${k}.$objext ++ objlist=" $obj" ++ func_len " $last_robj" ++ func_arith $len0 + $func_len_result ++ len=$func_arith_result ++ fi ++ done ++ # Handle the remaining objects by creating one last ++ # reloadable object file. All subsequent reloadable object ++ # files will link in the last one created. ++ test -z "$concat_cmds" || concat_cmds=$concat_cmds~ ++ reload_objs="$objlist $last_robj" ++ eval "concat_cmds=\"\${concat_cmds}$reload_cmds\"" ++ if test -n "$last_robj"; then ++ eval "concat_cmds=\"\${concat_cmds}~\$RM $last_robj\"" ++ fi ++ delfiles="$delfiles $output" ++ ++ else ++ output= ++ fi ++ ++ if ${skipped_export-false}; then ++ func_verbose "generating symbol list for \`$libname.la'" ++ export_symbols="$output_objdir/$libname.exp" ++ $opt_dry_run || $RM $export_symbols ++ libobjs=$output ++ # Append the command to create the export file. ++ test -z "$concat_cmds" || concat_cmds=$concat_cmds~ ++ eval "concat_cmds=\"\$concat_cmds$export_symbols_cmds\"" ++ if test -n "$last_robj"; then ++ eval "concat_cmds=\"\$concat_cmds~\$RM $last_robj\"" ++ fi ++ fi ++ ++ test -n "$save_libobjs" && ++ func_verbose "creating a temporary reloadable object file: $output" ++ ++ # Loop through the commands generated above and execute them. ++ save_ifs="$IFS"; IFS='~' ++ for cmd in $concat_cmds; do ++ IFS="$save_ifs" ++ $opt_silent || { ++ func_quote_for_expand "$cmd" ++ eval "func_echo $func_quote_for_expand_result" ++ } ++ $opt_dry_run || eval "$cmd" || { ++ lt_exit=$? ++ ++ # Restore the uninstalled library and exit ++ if test "$mode" = relink; then ++ ( cd "$output_objdir" && \ ++ $RM "${realname}T" && \ ++ $MV "${realname}U" "$realname" ) ++ fi ++ ++ exit $lt_exit ++ } ++ done ++ IFS="$save_ifs" ++ ++ if test -n "$export_symbols_regex" && ${skipped_export-false}; then ++ func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"' ++ func_show_eval '$MV "${export_symbols}T" "$export_symbols"' ++ fi ++ fi ++ ++ if ${skipped_export-false}; then ++ if test -n "$export_symbols" && test -n "$include_expsyms"; then ++ tmp_export_symbols="$export_symbols" ++ test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols" ++ $opt_dry_run || $ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols" ++ fi ++ ++ if test -n "$orig_export_symbols"; then ++ # The given exports_symbols file has to be filtered, so filter it. ++ func_verbose "filter symbol list for \`$libname.la' to tag DATA exports" ++ # FIXME: $output_objdir/$libname.filter potentially contains lots of ++ # 's' commands which not all seds can handle. GNU sed should be fine ++ # though. Also, the filter scales superlinearly with the number of ++ # global variables. join(1) would be nice here, but unfortunately ++ # isn't a blessed tool. ++ $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter ++ delfiles="$delfiles $export_symbols $output_objdir/$libname.filter" ++ export_symbols=$output_objdir/$libname.def ++ $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols ++ fi ++ fi ++ ++ libobjs=$output ++ # Restore the value of output. ++ output=$save_output ++ ++ if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then ++ eval "libobjs=\"\$libobjs $whole_archive_flag_spec\"" ++ test "X$libobjs" = "X " && libobjs= ++ fi ++ # Expand the library linking commands again to reset the ++ # value of $libobjs for piecewise linking. ++ ++ # Do each of the archive commands. ++ if test "$module" = yes && test -n "$module_cmds" ; then ++ if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then ++ cmds=$module_expsym_cmds ++ else ++ cmds=$module_cmds ++ fi ++ else ++ if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then ++ cmds=$archive_expsym_cmds ++ else ++ cmds=$archive_cmds ++ fi ++ fi ++ fi ++ ++ if test -n "$delfiles"; then ++ # Append the command to remove temporary files to $cmds. ++ eval "cmds=\"\$cmds~\$RM $delfiles\"" ++ fi ++ ++ # Add any objects from preloaded convenience libraries ++ if test -n "$dlprefiles"; then ++ gentop="$output_objdir/${outputname}x" ++ generated="$generated $gentop" ++ ++ func_extract_archives $gentop $dlprefiles ++ libobjs="$libobjs $func_extract_archives_result" ++ test "X$libobjs" = "X " && libobjs= ++ fi ++ ++ save_ifs="$IFS"; IFS='~' ++ for cmd in $cmds; do ++ IFS="$save_ifs" ++ eval "cmd=\"$cmd\"" ++ $opt_silent || { ++ func_quote_for_expand "$cmd" ++ eval "func_echo $func_quote_for_expand_result" ++ } ++ $opt_dry_run || eval "$cmd" || { ++ lt_exit=$? ++ ++ # Restore the uninstalled library and exit ++ if test "$mode" = relink; then ++ ( cd "$output_objdir" && \ ++ $RM "${realname}T" && \ ++ $MV "${realname}U" "$realname" ) ++ fi ++ ++ exit $lt_exit ++ } ++ done ++ IFS="$save_ifs" ++ ++ # Restore the uninstalled library and exit ++ if test "$mode" = relink; then ++ $opt_dry_run || (cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname) || exit $? ++ ++ if test -n "$convenience"; then ++ if test -z "$whole_archive_flag_spec"; then ++ func_show_eval '${RM}r "$gentop"' ++ fi ++ fi ++ ++ exit $EXIT_SUCCESS ++ fi ++ ++ # Create links to the real library. ++ for linkname in $linknames; do ++ if test "$realname" != "$linkname"; then ++ func_show_eval '(cd "$output_objdir" && $RM "$linkname" && $LN_S "$realname" "$linkname")' 'exit $?' ++ fi ++ done ++ ++ # If -module or -export-dynamic was specified, set the dlname. ++ if test "$module" = yes || test "$export_dynamic" = yes; then ++ # On all known operating systems, these are identical. ++ dlname="$soname" ++ fi ++ fi ++ ;; ++ ++ obj) ++ if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then ++ func_warning "\`-dlopen' is ignored for objects" ++ fi ++ ++ case " $deplibs" in ++ *\ -l* | *\ -L*) ++ func_warning "\`-l' and \`-L' are ignored for objects" ;; ++ esac ++ ++ test -n "$rpath" && \ ++ func_warning "\`-rpath' is ignored for objects" ++ ++ test -n "$xrpath" && \ ++ func_warning "\`-R' is ignored for objects" ++ ++ test -n "$vinfo" && \ ++ func_warning "\`-version-info' is ignored for objects" ++ ++ test -n "$release" && \ ++ func_warning "\`-release' is ignored for objects" ++ ++ case $output in ++ *.lo) ++ test -n "$objs$old_deplibs" && \ ++ func_fatal_error "cannot build library object \`$output' from non-libtool objects" ++ ++ libobj=$output ++ func_lo2o "$libobj" ++ obj=$func_lo2o_result ++ ;; ++ *) ++ libobj= ++ obj="$output" ++ ;; ++ esac ++ ++ # Delete the old objects. ++ $opt_dry_run || $RM $obj $libobj ++ ++ # Objects from convenience libraries. This assumes ++ # single-version convenience libraries. Whenever we create ++ # different ones for PIC/non-PIC, this we'll have to duplicate ++ # the extraction. ++ reload_conv_objs= ++ gentop= ++ # reload_cmds runs $LD directly, so let us get rid of ++ # -Wl from whole_archive_flag_spec and hope we can get by with ++ # turning comma into space.. ++ wl= ++ ++ if test -n "$convenience"; then ++ if test -n "$whole_archive_flag_spec"; then ++ eval "tmp_whole_archive_flags=\"$whole_archive_flag_spec\"" ++ reload_conv_objs=$reload_objs\ `$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'` ++ else ++ gentop="$output_objdir/${obj}x" ++ generated="$generated $gentop" ++ ++ func_extract_archives $gentop $convenience ++ reload_conv_objs="$reload_objs $func_extract_archives_result" ++ fi ++ fi ++ ++ # Create the old-style object. ++ reload_objs="$objs$old_deplibs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; /\.lib$/d; $lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test ++ ++ output="$obj" ++ func_execute_cmds "$reload_cmds" 'exit $?' ++ ++ # Exit if we aren't doing a library object file. ++ if test -z "$libobj"; then ++ if test -n "$gentop"; then ++ func_show_eval '${RM}r "$gentop"' ++ fi ++ ++ exit $EXIT_SUCCESS ++ fi ++ ++ if test "$build_libtool_libs" != yes; then ++ if test -n "$gentop"; then ++ func_show_eval '${RM}r "$gentop"' ++ fi ++ ++ # Create an invalid libtool object if no PIC, so that we don't ++ # accidentally link it into a program. ++ # $show "echo timestamp > $libobj" ++ # $opt_dry_run || echo timestamp > $libobj || exit $? ++ exit $EXIT_SUCCESS ++ fi ++ ++ if test -n "$pic_flag" || test "$pic_mode" != default; then ++ # Only do commands if we really have different PIC objects. ++ reload_objs="$libobjs $reload_conv_objs" ++ output="$libobj" ++ func_execute_cmds "$reload_cmds" 'exit $?' ++ fi ++ ++ if test -n "$gentop"; then ++ func_show_eval '${RM}r "$gentop"' ++ fi ++ ++ exit $EXIT_SUCCESS ++ ;; ++ ++ prog) ++ case $host in ++ *cygwin*) func_stripname '' '.exe' "$output" ++ output=$func_stripname_result.exe;; ++ esac ++ test -n "$vinfo" && \ ++ func_warning "\`-version-info' is ignored for programs" ++ ++ test -n "$release" && \ ++ func_warning "\`-release' is ignored for programs" ++ ++ test "$preload" = yes \ ++ && test "$dlopen_support" = unknown \ ++ && test "$dlopen_self" = unknown \ ++ && test "$dlopen_self_static" = unknown && \ ++ func_warning "\`LT_INIT([dlopen])' not used. Assuming no dlopen support." ++ ++ case $host in ++ *-*-rhapsody* | *-*-darwin1.[012]) ++ # On Rhapsody replace the C library is the System framework ++ compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's/ -lc / System.ltframework /'` ++ finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's/ -lc / System.ltframework /'` ++ ;; ++ esac ++ ++ case $host in ++ *-*-darwin*) ++ # Don't allow lazy linking, it breaks C++ global constructors ++ # But is supposedly fixed on 10.4 or later (yay!). ++ if test "$tagname" = CXX ; then ++ case ${MACOSX_DEPLOYMENT_TARGET-10.0} in ++ 10.[0123]) ++ compile_command="$compile_command ${wl}-bind_at_load" ++ finalize_command="$finalize_command ${wl}-bind_at_load" ++ ;; ++ esac ++ fi ++ # Time to change all our "foo.ltframework" stuff back to "-framework foo" ++ compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` ++ finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` ++ ;; ++ esac ++ ++ ++ # move library search paths that coincide with paths to not yet ++ # installed libraries to the beginning of the library search list ++ new_libs= ++ for path in $notinst_path; do ++ case " $new_libs " in ++ *" -L$path/$objdir "*) ;; ++ *) ++ case " $compile_deplibs " in ++ *" -L$path/$objdir "*) ++ new_libs="$new_libs -L$path/$objdir" ;; ++ esac ++ ;; ++ esac ++ done ++ for deplib in $compile_deplibs; do ++ case $deplib in ++ -L*) ++ case " $new_libs " in ++ *" $deplib "*) ;; ++ *) new_libs="$new_libs $deplib" ;; ++ esac ++ ;; ++ *) new_libs="$new_libs $deplib" ;; ++ esac ++ done ++ compile_deplibs="$new_libs" ++ ++ ++ compile_command="$compile_command $compile_deplibs" ++ finalize_command="$finalize_command $finalize_deplibs" ++ ++ if test -n "$rpath$xrpath"; then ++ # If the user specified any rpath flags, then add them. ++ for libdir in $rpath $xrpath; do ++ # This is the magic to use -rpath. ++ case "$finalize_rpath " in ++ *" $libdir "*) ;; ++ *) finalize_rpath="$finalize_rpath $libdir" ;; ++ esac ++ done ++ fi ++ ++ # Now hardcode the library paths ++ rpath= ++ hardcode_libdirs= ++ for libdir in $compile_rpath $finalize_rpath; do ++ if test -n "$hardcode_libdir_flag_spec"; then ++ if test -n "$hardcode_libdir_separator"; then ++ if test -z "$hardcode_libdirs"; then ++ hardcode_libdirs="$libdir" ++ else ++ # Just accumulate the unique libdirs. ++ case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in ++ *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) ++ ;; ++ *) ++ hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" ++ ;; ++ esac ++ fi ++ else ++ eval "flag=\"$hardcode_libdir_flag_spec\"" ++ rpath="$rpath $flag" ++ fi ++ elif test -n "$runpath_var"; then ++ case "$perm_rpath " in ++ *" $libdir "*) ;; ++ *) perm_rpath="$perm_rpath $libdir" ;; ++ esac ++ fi ++ case $host in ++ *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*) ++ testbindir=`${ECHO} "$libdir" | ${SED} -e 's*/lib$*/bin*'` ++ case :$dllsearchpath: in ++ *":$libdir:"*) ;; ++ ::) dllsearchpath=$libdir;; ++ *) dllsearchpath="$dllsearchpath:$libdir";; ++ esac ++ case :$dllsearchpath: in ++ *":$testbindir:"*) ;; ++ ::) dllsearchpath=$testbindir;; ++ *) dllsearchpath="$dllsearchpath:$testbindir";; ++ esac ++ ;; ++ esac ++ done ++ # Substitute the hardcoded libdirs into the rpath. ++ if test -n "$hardcode_libdir_separator" && ++ test -n "$hardcode_libdirs"; then ++ libdir="$hardcode_libdirs" ++ eval "rpath=\" $hardcode_libdir_flag_spec\"" ++ fi ++ compile_rpath="$rpath" ++ ++ rpath= ++ hardcode_libdirs= ++ for libdir in $finalize_rpath; do ++ if test -n "$hardcode_libdir_flag_spec"; then ++ if test -n "$hardcode_libdir_separator"; then ++ if test -z "$hardcode_libdirs"; then ++ hardcode_libdirs="$libdir" ++ else ++ # Just accumulate the unique libdirs. ++ case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in ++ *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) ++ ;; ++ *) ++ hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" ++ ;; ++ esac ++ fi ++ else ++ eval "flag=\"$hardcode_libdir_flag_spec\"" ++ rpath="$rpath $flag" ++ fi ++ elif test -n "$runpath_var"; then ++ case "$finalize_perm_rpath " in ++ *" $libdir "*) ;; ++ *) finalize_perm_rpath="$finalize_perm_rpath $libdir" ;; ++ esac ++ fi ++ done ++ # Substitute the hardcoded libdirs into the rpath. ++ if test -n "$hardcode_libdir_separator" && ++ test -n "$hardcode_libdirs"; then ++ libdir="$hardcode_libdirs" ++ eval "rpath=\" $hardcode_libdir_flag_spec\"" ++ fi ++ finalize_rpath="$rpath" ++ ++ if test -n "$libobjs" && test "$build_old_libs" = yes; then ++ # Transform all the library objects into standard objects. ++ compile_command=`$ECHO "$compile_command" | $SP2NL | $SED "$lo2o" | $NL2SP` ++ finalize_command=`$ECHO "$finalize_command" | $SP2NL | $SED "$lo2o" | $NL2SP` ++ fi ++ ++ func_generate_dlsyms "$outputname" "@PROGRAM@" "no" ++ ++ # template prelinking step ++ if test -n "$prelink_cmds"; then ++ func_execute_cmds "$prelink_cmds" 'exit $?' ++ fi ++ ++ wrappers_required=yes ++ case $host in ++ *cegcc* | *mingw32ce*) ++ # Disable wrappers for cegcc and mingw32ce hosts, we are cross compiling anyway. ++ wrappers_required=no ++ ;; ++ *cygwin* | *mingw* ) ++ if test "$build_libtool_libs" != yes; then ++ wrappers_required=no ++ fi ++ ;; ++ *) ++ if test "$need_relink" = no || test "$build_libtool_libs" != yes; then ++ wrappers_required=no ++ fi ++ ;; ++ esac ++ if test "$wrappers_required" = no; then ++ # Replace the output file specification. ++ compile_command=`$ECHO "$compile_command" | $SED 's%@OUTPUT@%'"$output"'%g'` ++ link_command="$compile_command$compile_rpath" ++ ++ # We have no uninstalled library dependencies, so finalize right now. ++ exit_status=0 ++ func_show_eval "$link_command" 'exit_status=$?' ++ ++ # Delete the generated files. ++ if test -f "$output_objdir/${outputname}S.${objext}"; then ++ func_show_eval '$RM "$output_objdir/${outputname}S.${objext}"' ++ fi ++ ++ exit $exit_status ++ fi ++ ++ if test -n "$compile_shlibpath$finalize_shlibpath"; then ++ compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command" ++ fi ++ if test -n "$finalize_shlibpath"; then ++ finalize_command="$shlibpath_var=\"$finalize_shlibpath\$$shlibpath_var\" $finalize_command" ++ fi ++ ++ compile_var= ++ finalize_var= ++ if test -n "$runpath_var"; then ++ if test -n "$perm_rpath"; then ++ # We should set the runpath_var. ++ rpath= ++ for dir in $perm_rpath; do ++ rpath="$rpath$dir:" ++ done ++ compile_var="$runpath_var=\"$rpath\$$runpath_var\" " ++ fi ++ if test -n "$finalize_perm_rpath"; then ++ # We should set the runpath_var. ++ rpath= ++ for dir in $finalize_perm_rpath; do ++ rpath="$rpath$dir:" ++ done ++ finalize_var="$runpath_var=\"$rpath\$$runpath_var\" " ++ fi ++ fi ++ ++ if test "$no_install" = yes; then ++ # We don't need to create a wrapper script. ++ link_command="$compile_var$compile_command$compile_rpath" ++ # Replace the output file specification. ++ link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output"'%g'` ++ # Delete the old output file. ++ $opt_dry_run || $RM $output ++ # Link the executable and exit ++ func_show_eval "$link_command" 'exit $?' ++ exit $EXIT_SUCCESS ++ fi ++ ++ if test "$hardcode_action" = relink; then ++ # Fast installation is not supported ++ link_command="$compile_var$compile_command$compile_rpath" ++ relink_command="$finalize_var$finalize_command$finalize_rpath" ++ ++ func_warning "this platform does not like uninstalled shared libraries" ++ func_warning "\`$output' will be relinked during installation" ++ else ++ if test "$fast_install" != no; then ++ link_command="$finalize_var$compile_command$finalize_rpath" ++ if test "$fast_install" = yes; then ++ relink_command=`$ECHO "$compile_var$compile_command$compile_rpath" | $SED 's%@OUTPUT@%\$progdir/\$file%g'` ++ else ++ # fast_install is set to needless ++ relink_command= ++ fi ++ else ++ link_command="$compile_var$compile_command$compile_rpath" ++ relink_command="$finalize_var$finalize_command$finalize_rpath" ++ fi ++ fi ++ ++ # Replace the output file specification. ++ link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'` ++ ++ # Delete the old output files. ++ $opt_dry_run || $RM $output $output_objdir/$outputname $output_objdir/lt-$outputname ++ ++ func_show_eval "$link_command" 'exit $?' ++ ++ # Now create the wrapper script. ++ func_verbose "creating $output" ++ ++ # Quote the relink command for shipping. ++ if test -n "$relink_command"; then ++ # Preserve any variables that may affect compiler behavior ++ for var in $variables_saved_for_relink; do ++ if eval test -z \"\${$var+set}\"; then ++ relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command" ++ elif eval var_value=\$$var; test -z "$var_value"; then ++ relink_command="$var=; export $var; $relink_command" ++ else ++ func_quote_for_eval "$var_value" ++ relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command" ++ fi ++ done ++ relink_command="(cd `pwd`; $relink_command)" ++ relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"` ++ fi ++ ++ # Only actually do things if not in dry run mode. ++ $opt_dry_run || { ++ # win32 will think the script is a binary if it has ++ # a .exe suffix, so we strip it off here. ++ case $output in ++ *.exe) func_stripname '' '.exe' "$output" ++ output=$func_stripname_result ;; ++ esac ++ # test for cygwin because mv fails w/o .exe extensions ++ case $host in ++ *cygwin*) ++ exeext=.exe ++ func_stripname '' '.exe' "$outputname" ++ outputname=$func_stripname_result ;; ++ *) exeext= ;; ++ esac ++ case $host in ++ *cygwin* | *mingw* ) ++ func_dirname_and_basename "$output" "" "." ++ output_name=$func_basename_result ++ output_path=$func_dirname_result ++ cwrappersource="$output_path/$objdir/lt-$output_name.c" ++ cwrapper="$output_path/$output_name.exe" ++ $RM $cwrappersource $cwrapper ++ trap "$RM $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15 ++ ++ func_emit_cwrapperexe_src > $cwrappersource ++ ++ # The wrapper executable is built using the $host compiler, ++ # because it contains $host paths and files. If cross- ++ # compiling, it, like the target executable, must be ++ # executed on the $host or under an emulation environment. ++ $opt_dry_run || { ++ $LTCC $LTCFLAGS -o $cwrapper $cwrappersource ++ $STRIP $cwrapper ++ } ++ ++ # Now, create the wrapper script for func_source use: ++ func_ltwrapper_scriptname $cwrapper ++ $RM $func_ltwrapper_scriptname_result ++ trap "$RM $func_ltwrapper_scriptname_result; exit $EXIT_FAILURE" 1 2 15 ++ $opt_dry_run || { ++ # note: this script will not be executed, so do not chmod. ++ if test "x$build" = "x$host" ; then ++ $cwrapper --lt-dump-script > $func_ltwrapper_scriptname_result ++ else ++ func_emit_wrapper no > $func_ltwrapper_scriptname_result ++ fi ++ } ++ ;; ++ * ) ++ $RM $output ++ trap "$RM $output; exit $EXIT_FAILURE" 1 2 15 ++ ++ func_emit_wrapper no > $output ++ chmod +x $output ++ ;; ++ esac ++ } ++ exit $EXIT_SUCCESS ++ ;; ++ esac ++ ++ # See if we need to build an old-fashioned archive. ++ for oldlib in $oldlibs; do ++ ++ if test "$build_libtool_libs" = convenience; then ++ oldobjs="$libobjs_save $symfileobj" ++ addlibs="$convenience" ++ build_libtool_libs=no ++ else ++ if test "$build_libtool_libs" = module; then ++ oldobjs="$libobjs_save" ++ build_libtool_libs=no ++ else ++ oldobjs="$old_deplibs $non_pic_objects" ++ if test "$preload" = yes && test -f "$symfileobj"; then ++ oldobjs="$oldobjs $symfileobj" ++ fi ++ fi ++ addlibs="$old_convenience" ++ fi ++ ++ if test -n "$addlibs"; then ++ gentop="$output_objdir/${outputname}x" ++ generated="$generated $gentop" ++ ++ func_extract_archives $gentop $addlibs ++ oldobjs="$oldobjs $func_extract_archives_result" ++ fi ++ ++ # Do each command in the archive commands. ++ if test -n "$old_archive_from_new_cmds" && test "$build_libtool_libs" = yes; then ++ cmds=$old_archive_from_new_cmds ++ else ++ ++ # Add any objects from preloaded convenience libraries ++ if test -n "$dlprefiles"; then ++ gentop="$output_objdir/${outputname}x" ++ generated="$generated $gentop" ++ ++ func_extract_archives $gentop $dlprefiles ++ oldobjs="$oldobjs $func_extract_archives_result" ++ fi ++ ++ # POSIX demands no paths to be encoded in archives. We have ++ # to avoid creating archives with duplicate basenames if we ++ # might have to extract them afterwards, e.g., when creating a ++ # static archive out of a convenience library, or when linking ++ # the entirety of a libtool archive into another (currently ++ # not supported by libtool). ++ if (for obj in $oldobjs ++ do ++ func_basename "$obj" ++ $ECHO "$func_basename_result" ++ done | sort | sort -uc >/dev/null 2>&1); then ++ : ++ else ++ echo "copying selected object files to avoid basename conflicts..." ++ gentop="$output_objdir/${outputname}x" ++ generated="$generated $gentop" ++ func_mkdir_p "$gentop" ++ save_oldobjs=$oldobjs ++ oldobjs= ++ counter=1 ++ for obj in $save_oldobjs ++ do ++ func_basename "$obj" ++ objbase="$func_basename_result" ++ case " $oldobjs " in ++ " ") oldobjs=$obj ;; ++ *[\ /]"$objbase "*) ++ while :; do ++ # Make sure we don't pick an alternate name that also ++ # overlaps. ++ newobj=lt$counter-$objbase ++ func_arith $counter + 1 ++ counter=$func_arith_result ++ case " $oldobjs " in ++ *[\ /]"$newobj "*) ;; ++ *) if test ! -f "$gentop/$newobj"; then break; fi ;; ++ esac ++ done ++ func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj" ++ oldobjs="$oldobjs $gentop/$newobj" ++ ;; ++ *) oldobjs="$oldobjs $obj" ;; ++ esac ++ done ++ fi ++ eval "cmds=\"$old_archive_cmds\"" ++ ++ func_len " $cmds" ++ len=$func_len_result ++ if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then ++ cmds=$old_archive_cmds ++ else ++ # the command line is too long to link in one step, link in parts ++ func_verbose "using piecewise archive linking..." ++ save_RANLIB=$RANLIB ++ RANLIB=: ++ objlist= ++ concat_cmds= ++ save_oldobjs=$oldobjs ++ oldobjs= ++ # Is there a better way of finding the last object in the list? ++ for obj in $save_oldobjs ++ do ++ last_oldobj=$obj ++ done ++ eval "test_cmds=\"$old_archive_cmds\"" ++ func_len " $test_cmds" ++ len0=$func_len_result ++ len=$len0 ++ for obj in $save_oldobjs ++ do ++ func_len " $obj" ++ func_arith $len + $func_len_result ++ len=$func_arith_result ++ func_append objlist " $obj" ++ if test "$len" -lt "$max_cmd_len"; then ++ : ++ else ++ # the above command should be used before it gets too long ++ oldobjs=$objlist ++ if test "$obj" = "$last_oldobj" ; then ++ RANLIB=$save_RANLIB ++ fi ++ test -z "$concat_cmds" || concat_cmds=$concat_cmds~ ++ eval "concat_cmds=\"\${concat_cmds}$old_archive_cmds\"" ++ objlist= ++ len=$len0 ++ fi ++ done ++ RANLIB=$save_RANLIB ++ oldobjs=$objlist ++ if test "X$oldobjs" = "X" ; then ++ eval "cmds=\"\$concat_cmds\"" ++ else ++ eval "cmds=\"\$concat_cmds~\$old_archive_cmds\"" ++ fi ++ fi ++ fi ++ func_execute_cmds "$cmds" 'exit $?' ++ done ++ ++ test -n "$generated" && \ ++ func_show_eval "${RM}r$generated" ++ ++ # Now create the libtool archive. ++ case $output in ++ *.la) ++ old_library= ++ test "$build_old_libs" = yes && old_library="$libname.$libext" ++ func_verbose "creating $output" ++ ++ # Preserve any variables that may affect compiler behavior ++ for var in $variables_saved_for_relink; do ++ if eval test -z \"\${$var+set}\"; then ++ relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command" ++ elif eval var_value=\$$var; test -z "$var_value"; then ++ relink_command="$var=; export $var; $relink_command" ++ else ++ func_quote_for_eval "$var_value" ++ relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command" ++ fi ++ done ++ # Quote the link command for shipping. ++ relink_command="(cd `pwd`; $SHELL $progpath $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)" ++ relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"` ++ if test "$hardcode_automatic" = yes ; then ++ relink_command= ++ fi ++ ++ # Only create the output if not a dry run. ++ $opt_dry_run || { ++ for installed in no yes; do ++ if test "$installed" = yes; then ++ if test -z "$install_libdir"; then ++ break ++ fi ++ output="$output_objdir/$outputname"i ++ # Replace all uninstalled libtool libraries with the installed ones ++ newdependency_libs= ++ for deplib in $dependency_libs; do ++ case $deplib in ++ *.la) ++ func_basename "$deplib" ++ name="$func_basename_result" ++ libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` ++ test -z "$libdir" && \ ++ func_fatal_error "\`$deplib' is not a valid libtool archive" ++ newdependency_libs="$newdependency_libs $libdir/$name" ++ ;; ++ *) newdependency_libs="$newdependency_libs $deplib" ;; ++ esac ++ done ++ dependency_libs="$newdependency_libs" ++ newdlfiles= ++ ++ for lib in $dlfiles; do ++ case $lib in ++ *.la) ++ func_basename "$lib" ++ name="$func_basename_result" ++ libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` ++ test -z "$libdir" && \ ++ func_fatal_error "\`$lib' is not a valid libtool archive" ++ newdlfiles="$newdlfiles $libdir/$name" ++ ;; ++ *) newdlfiles="$newdlfiles $lib" ;; ++ esac ++ done ++ dlfiles="$newdlfiles" ++ newdlprefiles= ++ for lib in $dlprefiles; do ++ case $lib in ++ *.la) ++ # Only pass preopened files to the pseudo-archive (for ++ # eventual linking with the app. that links it) if we ++ # didn't already link the preopened objects directly into ++ # the library: ++ func_basename "$lib" ++ name="$func_basename_result" ++ libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` ++ test -z "$libdir" && \ ++ func_fatal_error "\`$lib' is not a valid libtool archive" ++ newdlprefiles="$newdlprefiles $libdir/$name" ++ ;; ++ esac ++ done ++ dlprefiles="$newdlprefiles" ++ else ++ newdlfiles= ++ for lib in $dlfiles; do ++ case $lib in ++ [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; ++ *) abs=`pwd`"/$lib" ;; ++ esac ++ newdlfiles="$newdlfiles $abs" ++ done ++ dlfiles="$newdlfiles" ++ newdlprefiles= ++ for lib in $dlprefiles; do ++ case $lib in ++ [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; ++ *) abs=`pwd`"/$lib" ;; ++ esac ++ newdlprefiles="$newdlprefiles $abs" ++ done ++ dlprefiles="$newdlprefiles" ++ fi ++ $RM $output ++ # place dlname in correct position for cygwin ++ # In fact, it would be nice if we could use this code for all target ++ # systems that can't hard-code library paths into their executables ++ # and that have no shared library path variable independent of PATH, ++ # but it turns out we can't easily determine that from inspecting ++ # libtool variables, so we have to hard-code the OSs to which it ++ # applies here; at the moment, that means platforms that use the PE ++ # object format with DLL files. See the long comment at the top of ++ # tests/bindir.at for full details. ++ tdlname=$dlname ++ case $host,$output,$installed,$module,$dlname in ++ *cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll | *cegcc*,*lai,yes,no,*.dll) ++ # If a -bindir argument was supplied, place the dll there. ++ if test "x$bindir" != x ; ++ then ++ func_relative_path "$install_libdir" "$bindir" ++ tdlname=$func_relative_path_result$dlname ++ else ++ # Otherwise fall back on heuristic. ++ tdlname=../bin/$dlname ++ fi ++ ;; ++ esac ++ $ECHO > $output "\ ++# $outputname - a libtool library file ++# Generated by $PROGRAM (GNU $PACKAGE$TIMESTAMP) $VERSION ++# ++# Please DO NOT delete this file! ++# It is necessary for linking the library. ++ ++# The name that we can dlopen(3). ++dlname='$tdlname' ++ ++# Names of this library. ++library_names='$library_names' ++ ++# The name of the static archive. ++old_library='$old_library' ++ ++# Linker flags that can not go in dependency_libs. ++inherited_linker_flags='$new_inherited_linker_flags' ++ ++# Libraries that this one depends upon. ++dependency_libs='$dependency_libs' ++ ++# Names of additional weak libraries provided by this library ++weak_library_names='$weak_libs' ++ ++# Version information for $libname. ++current=$current ++age=$age ++revision=$revision ++ ++# Is this an already installed library? ++installed=$installed ++ ++# Should we warn about portability when linking against -modules? ++shouldnotlink=$module ++ ++# Files to dlopen/dlpreopen ++dlopen='$dlfiles' ++dlpreopen='$dlprefiles' ++ ++# Directory that this library needs to be installed in: ++libdir='$install_libdir'" ++ if test "$installed" = no && test "$need_relink" = yes; then ++ $ECHO >> $output "\ ++relink_command=\"$relink_command\"" ++ fi ++ done ++ } ++ ++ # Do a symbolic link so that the libtool archive can be found in ++ # LD_LIBRARY_PATH before the program is installed. ++ func_show_eval '( cd "$output_objdir" && $RM "$outputname" && $LN_S "../$outputname" "$outputname" )' 'exit $?' ++ ;; ++ esac ++ exit $EXIT_SUCCESS ++} ++ ++{ test "$mode" = link || test "$mode" = relink; } && ++ func_mode_link ${1+"$@"} ++ ++ ++# func_mode_uninstall arg... ++func_mode_uninstall () ++{ ++ $opt_debug ++ RM="$nonopt" ++ files= ++ rmforce= ++ exit_status=0 ++ ++ # This variable tells wrapper scripts just to set variables rather ++ # than running their programs. ++ libtool_install_magic="$magic" ++ ++ for arg ++ do ++ case $arg in ++ -f) RM="$RM $arg"; rmforce=yes ;; ++ -*) RM="$RM $arg" ;; ++ *) files="$files $arg" ;; ++ esac ++ done ++ ++ test -z "$RM" && \ ++ func_fatal_help "you must specify an RM program" ++ ++ rmdirs= ++ ++ origobjdir="$objdir" ++ for file in $files; do ++ func_dirname "$file" "" "." ++ dir="$func_dirname_result" ++ if test "X$dir" = X.; then ++ objdir="$origobjdir" ++ else ++ objdir="$dir/$origobjdir" ++ fi ++ func_basename "$file" ++ name="$func_basename_result" ++ test "$mode" = uninstall && objdir="$dir" ++ ++ # Remember objdir for removal later, being careful to avoid duplicates ++ if test "$mode" = clean; then ++ case " $rmdirs " in ++ *" $objdir "*) ;; ++ *) rmdirs="$rmdirs $objdir" ;; ++ esac ++ fi ++ ++ # Don't error if the file doesn't exist and rm -f was used. ++ if { test -L "$file"; } >/dev/null 2>&1 || ++ { test -h "$file"; } >/dev/null 2>&1 || ++ test -f "$file"; then ++ : ++ elif test -d "$file"; then ++ exit_status=1 ++ continue ++ elif test "$rmforce" = yes; then ++ continue ++ fi ++ ++ rmfiles="$file" ++ ++ case $name in ++ *.la) ++ # Possibly a libtool archive, so verify it. ++ if func_lalib_p "$file"; then ++ func_source $dir/$name ++ ++ # Delete the libtool libraries and symlinks. ++ for n in $library_names; do ++ rmfiles="$rmfiles $objdir/$n" ++ done ++ test -n "$old_library" && rmfiles="$rmfiles $objdir/$old_library" ++ ++ case "$mode" in ++ clean) ++ case " $library_names " in ++ # " " in the beginning catches empty $dlname ++ *" $dlname "*) ;; ++ *) rmfiles="$rmfiles $objdir/$dlname" ;; ++ esac ++ test -n "$libdir" && rmfiles="$rmfiles $objdir/$name $objdir/${name}i" ++ ;; ++ uninstall) ++ if test -n "$library_names"; then ++ # Do each command in the postuninstall commands. ++ func_execute_cmds "$postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1' ++ fi ++ ++ if test -n "$old_library"; then ++ # Do each command in the old_postuninstall commands. ++ func_execute_cmds "$old_postuninstall_cmds" 'test "$rmforce" = yes || exit_status=1' ++ fi ++ # FIXME: should reinstall the best remaining shared library. ++ ;; ++ esac ++ fi ++ ;; ++ ++ *.lo) ++ # Possibly a libtool object, so verify it. ++ if func_lalib_p "$file"; then ++ ++ # Read the .lo file ++ func_source $dir/$name ++ ++ # Add PIC object to the list of files to remove. ++ if test -n "$pic_object" && ++ test "$pic_object" != none; then ++ rmfiles="$rmfiles $dir/$pic_object" ++ fi ++ ++ # Add non-PIC object to the list of files to remove. ++ if test -n "$non_pic_object" && ++ test "$non_pic_object" != none; then ++ rmfiles="$rmfiles $dir/$non_pic_object" ++ fi ++ fi ++ ;; ++ ++ *) ++ if test "$mode" = clean ; then ++ noexename=$name ++ case $file in ++ *.exe) ++ func_stripname '' '.exe' "$file" ++ file=$func_stripname_result ++ func_stripname '' '.exe' "$name" ++ noexename=$func_stripname_result ++ # $file with .exe has already been added to rmfiles, ++ # add $file without .exe ++ rmfiles="$rmfiles $file" ++ ;; ++ esac ++ # Do a test to see if this is a libtool program. ++ if func_ltwrapper_p "$file"; then ++ if func_ltwrapper_executable_p "$file"; then ++ func_ltwrapper_scriptname "$file" ++ relink_command= ++ func_source $func_ltwrapper_scriptname_result ++ rmfiles="$rmfiles $func_ltwrapper_scriptname_result" ++ else ++ relink_command= ++ func_source $dir/$noexename ++ fi ++ ++ # note $name still contains .exe if it was in $file originally ++ # as does the version of $file that was added into $rmfiles ++ rmfiles="$rmfiles $objdir/$name $objdir/${name}S.${objext}" ++ if test "$fast_install" = yes && test -n "$relink_command"; then ++ rmfiles="$rmfiles $objdir/lt-$name" ++ fi ++ if test "X$noexename" != "X$name" ; then ++ rmfiles="$rmfiles $objdir/lt-${noexename}.c" ++ fi ++ fi ++ fi ++ ;; ++ esac ++ func_show_eval "$RM $rmfiles" 'exit_status=1' ++ done ++ objdir="$origobjdir" ++ ++ # Try to remove the ${objdir}s in the directories where we deleted files ++ for dir in $rmdirs; do ++ if test -d "$dir"; then ++ func_show_eval "rmdir $dir >/dev/null 2>&1" ++ fi ++ done ++ ++ exit $exit_status ++} ++ ++{ test "$mode" = uninstall || test "$mode" = clean; } && ++ func_mode_uninstall ${1+"$@"} ++ ++test -z "$mode" && { ++ help="$generic_help" ++ func_fatal_help "you must specify a MODE" ++} ++ ++test -z "$exec_cmd" && \ ++ func_fatal_help "invalid operation mode \`$mode'" ++ ++if test -n "$exec_cmd"; then ++ eval exec "$exec_cmd" ++ exit $EXIT_FAILURE ++fi ++ ++exit $exit_status ++ ++ ++# The TAGs below are defined such that we never get into a situation ++# in which we disable both kinds of libraries. Given conflicting ++# choices, we go for a static library, that is the most portable, ++# since we can't tell whether shared libraries were disabled because ++# the user asked for that or because the platform doesn't support ++# them. This is particularly important on AIX, because we don't ++# support having both static and shared libraries enabled at the same ++# time on that platform, so we default to a shared-only configuration. ++# If a disable-shared tag is given, we'll fallback to a static-only ++# configuration. But we'll never go from static-only to shared-only. ++ ++# ### BEGIN LIBTOOL TAG CONFIG: disable-shared ++build_libtool_libs=no ++build_old_libs=yes ++# ### END LIBTOOL TAG CONFIG: disable-shared ++ ++# ### BEGIN LIBTOOL TAG CONFIG: disable-static ++build_old_libs=`case $build_libtool_libs in yes) echo no;; *) echo yes;; esac` ++# ### END LIBTOOL TAG CONFIG: disable-static ++ ++# Local Variables: ++# mode:shell-script ++# sh-indentation:2 ++# End: ++# vi:sw=2 ++ diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/missing index 000000000,000000000..f62bbae30 new file mode 100755 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/missing @@@ -1,0 -1,0 +1,215 @@@ ++#! /bin/sh ++# Common wrapper for a few potentially missing GNU programs. ++ ++scriptversion=2013-10-28.13; # UTC ++ ++# Copyright (C) 1996-2014 Free Software Foundation, Inc. ++# Originally written by Fran,cois Pinard , 1996. ++ ++# This program is free software; you can redistribute it and/or modify ++# it under the terms of the GNU General Public License as published by ++# the Free Software Foundation; either version 2, or (at your option) ++# any later version. ++ ++# This program is distributed in the hope that it will be useful, ++# but WITHOUT ANY WARRANTY; without even the implied warranty of ++# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++# GNU General Public License for more details. ++ ++# You should have received a copy of the GNU General Public License ++# along with this program. If not, see . ++ ++# As a special exception to the GNU General Public License, if you ++# distribute this file as part of a program that contains a ++# configuration script generated by Autoconf, you may include it under ++# the same distribution terms that you use for the rest of that program. ++ ++if test $# -eq 0; then ++ echo 1>&2 "Try '$0 --help' for more information" ++ exit 1 ++fi ++ ++case $1 in ++ ++ --is-lightweight) ++ # Used by our autoconf macros to check whether the available missing ++ # script is modern enough. ++ exit 0 ++ ;; ++ ++ --run) ++ # Back-compat with the calling convention used by older automake. ++ shift ++ ;; ++ ++ -h|--h|--he|--hel|--help) ++ echo "\ ++$0 [OPTION]... PROGRAM [ARGUMENT]... ++ ++Run 'PROGRAM [ARGUMENT]...', returning a proper advice when this fails due ++to PROGRAM being missing or too old. ++ ++Options: ++ -h, --help display this help and exit ++ -v, --version output version information and exit ++ ++Supported PROGRAM values: ++ aclocal autoconf autoheader autom4te automake makeinfo ++ bison yacc flex lex help2man ++ ++Version suffixes to PROGRAM as well as the prefixes 'gnu-', 'gnu', and ++'g' are ignored when checking the name. ++ ++Send bug reports to ." ++ exit $? ++ ;; ++ ++ -v|--v|--ve|--ver|--vers|--versi|--versio|--version) ++ echo "missing $scriptversion (GNU Automake)" ++ exit $? ++ ;; ++ ++ -*) ++ echo 1>&2 "$0: unknown '$1' option" ++ echo 1>&2 "Try '$0 --help' for more information" ++ exit 1 ++ ;; ++ ++esac ++ ++# Run the given program, remember its exit status. ++"$@"; st=$? ++ ++# If it succeeded, we are done. ++test $st -eq 0 && exit 0 ++ ++# Also exit now if we it failed (or wasn't found), and '--version' was ++# passed; such an option is passed most likely to detect whether the ++# program is present and works. ++case $2 in --version|--help) exit $st;; esac ++ ++# Exit code 63 means version mismatch. This often happens when the user ++# tries to use an ancient version of a tool on a file that requires a ++# minimum version. ++if test $st -eq 63; then ++ msg="probably too old" ++elif test $st -eq 127; then ++ # Program was missing. ++ msg="missing on your system" ++else ++ # Program was found and executed, but failed. Give up. ++ exit $st ++fi ++ ++perl_URL=http://www.perl.org/ ++flex_URL=http://flex.sourceforge.net/ ++gnu_software_URL=http://www.gnu.org/software ++ ++program_details () ++{ ++ case $1 in ++ aclocal|automake) ++ echo "The '$1' program is part of the GNU Automake package:" ++ echo "<$gnu_software_URL/automake>" ++ echo "It also requires GNU Autoconf, GNU m4 and Perl in order to run:" ++ echo "<$gnu_software_URL/autoconf>" ++ echo "<$gnu_software_URL/m4/>" ++ echo "<$perl_URL>" ++ ;; ++ autoconf|autom4te|autoheader) ++ echo "The '$1' program is part of the GNU Autoconf package:" ++ echo "<$gnu_software_URL/autoconf/>" ++ echo "It also requires GNU m4 and Perl in order to run:" ++ echo "<$gnu_software_URL/m4/>" ++ echo "<$perl_URL>" ++ ;; ++ esac ++} ++ ++give_advice () ++{ ++ # Normalize program name to check for. ++ normalized_program=`echo "$1" | sed ' ++ s/^gnu-//; t ++ s/^gnu//; t ++ s/^g//; t'` ++ ++ printf '%s\n' "'$1' is $msg." ++ ++ configure_deps="'configure.ac' or m4 files included by 'configure.ac'" ++ case $normalized_program in ++ autoconf*) ++ echo "You should only need it if you modified 'configure.ac'," ++ echo "or m4 files included by it." ++ program_details 'autoconf' ++ ;; ++ autoheader*) ++ echo "You should only need it if you modified 'acconfig.h' or" ++ echo "$configure_deps." ++ program_details 'autoheader' ++ ;; ++ automake*) ++ echo "You should only need it if you modified 'Makefile.am' or" ++ echo "$configure_deps." ++ program_details 'automake' ++ ;; ++ aclocal*) ++ echo "You should only need it if you modified 'acinclude.m4' or" ++ echo "$configure_deps." ++ program_details 'aclocal' ++ ;; ++ autom4te*) ++ echo "You might have modified some maintainer files that require" ++ echo "the 'autom4te' program to be rebuilt." ++ program_details 'autom4te' ++ ;; ++ bison*|yacc*) ++ echo "You should only need it if you modified a '.y' file." ++ echo "You may want to install the GNU Bison package:" ++ echo "<$gnu_software_URL/bison/>" ++ ;; ++ lex*|flex*) ++ echo "You should only need it if you modified a '.l' file." ++ echo "You may want to install the Fast Lexical Analyzer package:" ++ echo "<$flex_URL>" ++ ;; ++ help2man*) ++ echo "You should only need it if you modified a dependency" \ ++ "of a man page." ++ echo "You may want to install the GNU Help2man package:" ++ echo "<$gnu_software_URL/help2man/>" ++ ;; ++ makeinfo*) ++ echo "You should only need it if you modified a '.texi' file, or" ++ echo "any other file indirectly affecting the aspect of the manual." ++ echo "You might want to install the Texinfo package:" ++ echo "<$gnu_software_URL/texinfo/>" ++ echo "The spurious makeinfo call might also be the consequence of" ++ echo "using a buggy 'make' (AIX, DU, IRIX), in which case you might" ++ echo "want to install GNU make:" ++ echo "<$gnu_software_URL/make/>" ++ ;; ++ *) ++ echo "You might have modified some files without having the proper" ++ echo "tools for further handling them. Check the 'README' file, it" ++ echo "often tells you about the needed prerequisites for installing" ++ echo "this package. You may also peek at any GNU archive site, in" ++ echo "case some other package contains this missing '$1' program." ++ ;; ++ esac ++} ++ ++give_advice "$1" | sed -e '1s/^/WARNING: /' \ ++ -e '2,$s/^/ /' >&2 ++ ++# Propagate the correct exit status (expected to be 127 for a program ++# not found, 63 for a program that failed due to version mismatch). ++exit $st ++ ++# Local variables: ++# eval: (add-hook 'write-file-hooks 'time-stamp) ++# time-stamp-start: "scriptversion=" ++# time-stamp-format: "%:y-%02m-%02d.%02H" ++# time-stamp-time-zone: "UTC" ++# time-stamp-end: "; # UTC" ++# End: diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/mmap.c index 000000000,000000000..e30d1c1a3 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/mmap.c @@@ -1,0 -1,0 +1,303 @@@ ++/* mmap.c -- Memory allocation with mmap. ++ Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++#include "config.h" ++ ++#include ++#include ++#include ++#include ++#include ++#include ++ ++#include "backtrace.h" ++#include "internal.h" ++ ++/* Memory allocation on systems that provide anonymous mmap. This ++ permits the backtrace functions to be invoked from a signal ++ handler, assuming that mmap is async-signal safe. */ ++ ++#ifndef MAP_ANONYMOUS ++#define MAP_ANONYMOUS MAP_ANON ++#endif ++ ++#ifndef MAP_FAILED ++#define MAP_FAILED ((void *)-1) ++#endif ++ ++/* A list of free memory blocks. */ ++ ++struct backtrace_freelist_struct ++{ ++ /* Next on list. */ ++ struct backtrace_freelist_struct *next; ++ /* Size of this block, including this structure. */ ++ size_t size; ++}; ++ ++/* Free memory allocated by backtrace_alloc. */ ++ ++static void ++backtrace_free_locked (struct backtrace_state *state, void *addr, size_t size) ++{ ++ /* Just leak small blocks. We don't have to be perfect. */ ++ if (size >= sizeof (struct backtrace_freelist_struct)) ++ { ++ struct backtrace_freelist_struct *p; ++ ++ p = (struct backtrace_freelist_struct *) addr; ++ p->next = state->freelist; ++ p->size = size; ++ state->freelist = p; ++ } ++} ++ ++/* Allocate memory like malloc. If ERROR_CALLBACK is NULL, don't ++ report an error. */ ++ ++void * ++backtrace_alloc (struct backtrace_state *state, ++ size_t size, backtrace_error_callback error_callback, ++ void *data) ++{ ++ void *ret; ++ int locked; ++ struct backtrace_freelist_struct **pp; ++ size_t pagesize; ++ size_t asksize; ++ void *page; ++ ++ ret = NULL; ++ ++ /* If we can acquire the lock, then see if there is space on the ++ free list. If we can't acquire the lock, drop straight into ++ using mmap. __sync_lock_test_and_set returns the old state of ++ the lock, so we have acquired it if it returns 0. */ ++ ++ if (!state->threaded) ++ locked = 1; ++ else ++ locked = __sync_lock_test_and_set (&state->lock_alloc, 1) == 0; ++ ++ if (locked) ++ { ++ for (pp = &state->freelist; *pp != NULL; pp = &(*pp)->next) ++ { ++ if ((*pp)->size >= size) ++ { ++ struct backtrace_freelist_struct *p; ++ ++ p = *pp; ++ *pp = p->next; ++ ++ /* Round for alignment; we assume that no type we care about ++ is more than 8 bytes. */ ++ size = (size + 7) & ~ (size_t) 7; ++ if (size < p->size) ++ backtrace_free_locked (state, (char *) p + size, ++ p->size - size); ++ ++ ret = (void *) p; ++ ++ break; ++ } ++ } ++ ++ if (state->threaded) ++ __sync_lock_release (&state->lock_alloc); ++ } ++ ++ if (ret == NULL) ++ { ++ /* Allocate a new page. */ ++ ++ pagesize = getpagesize (); ++ asksize = (size + pagesize - 1) & ~ (pagesize - 1); ++ page = mmap (NULL, asksize, PROT_READ | PROT_WRITE, ++ MAP_PRIVATE | MAP_ANONYMOUS, -1, 0); ++ if (page == MAP_FAILED) ++ { ++ if (error_callback) ++ error_callback (data, "mmap", errno); ++ } ++ else ++ { ++ size = (size + 7) & ~ (size_t) 7; ++ if (size < asksize) ++ backtrace_free (state, (char *) page + size, asksize - size, ++ error_callback, data); ++ ++ ret = page; ++ } ++ } ++ ++ return ret; ++} ++ ++/* Free memory allocated by backtrace_alloc. */ ++ ++void ++backtrace_free (struct backtrace_state *state, void *addr, size_t size, ++ backtrace_error_callback error_callback ATTRIBUTE_UNUSED, ++ void *data ATTRIBUTE_UNUSED) ++{ ++ int locked; ++ ++ /* If we are freeing a large aligned block, just release it back to ++ the system. This case arises when growing a vector for a large ++ binary with lots of debug info. Calling munmap here may cause us ++ to call mmap again if there is also a large shared library; we ++ just live with that. */ ++ if (size >= 16 * 4096) ++ { ++ size_t pagesize; ++ ++ pagesize = getpagesize (); ++ if (((uintptr_t) addr & (pagesize - 1)) == 0 ++ && (size & (pagesize - 1)) == 0) ++ { ++ /* If munmap fails for some reason, just add the block to ++ the freelist. */ ++ if (munmap (addr, size) == 0) ++ return; ++ } ++ } ++ ++ /* If we can acquire the lock, add the new space to the free list. ++ If we can't acquire the lock, just leak the memory. ++ __sync_lock_test_and_set returns the old state of the lock, so we ++ have acquired it if it returns 0. */ ++ ++ if (!state->threaded) ++ locked = 1; ++ else ++ locked = __sync_lock_test_and_set (&state->lock_alloc, 1) == 0; ++ ++ if (locked) ++ { ++ backtrace_free_locked (state, addr, size); ++ ++ if (state->threaded) ++ __sync_lock_release (&state->lock_alloc); ++ } ++} ++ ++/* Grow VEC by SIZE bytes. */ ++ ++void * ++backtrace_vector_grow (struct backtrace_state *state,size_t size, ++ backtrace_error_callback error_callback, ++ void *data, struct backtrace_vector *vec) ++{ ++ void *ret; ++ ++ if (size > vec->alc) ++ { ++ size_t pagesize; ++ size_t alc; ++ void *base; ++ ++ pagesize = getpagesize (); ++ alc = vec->size + size; ++ if (vec->size == 0) ++ alc = 16 * size; ++ else if (alc < pagesize) ++ { ++ alc *= 2; ++ if (alc > pagesize) ++ alc = pagesize; ++ } ++ else ++ { ++ alc *= 2; ++ alc = (alc + pagesize - 1) & ~ (pagesize - 1); ++ } ++ base = backtrace_alloc (state, alc, error_callback, data); ++ if (base == NULL) ++ return NULL; ++ if (vec->base != NULL) ++ { ++ memcpy (base, vec->base, vec->size); ++ backtrace_free (state, vec->base, vec->size + vec->alc, ++ error_callback, data); ++ } ++ vec->base = base; ++ vec->alc = alc - vec->size; ++ } ++ ++ ret = (char *) vec->base + vec->size; ++ vec->size += size; ++ vec->alc -= size; ++ return ret; ++} ++ ++/* Finish the current allocation on VEC. */ ++ ++void * ++backtrace_vector_finish ( ++ struct backtrace_state *state ATTRIBUTE_UNUSED, ++ struct backtrace_vector *vec, ++ backtrace_error_callback error_callback ATTRIBUTE_UNUSED, ++ void *data ATTRIBUTE_UNUSED) ++{ ++ void *ret; ++ ++ ret = vec->base; ++ vec->base = (char *) vec->base + vec->size; ++ vec->size = 0; ++ return ret; ++} ++ ++/* Release any extra space allocated for VEC. */ ++ ++int ++backtrace_vector_release (struct backtrace_state *state, ++ struct backtrace_vector *vec, ++ backtrace_error_callback error_callback, ++ void *data) ++{ ++ size_t size; ++ size_t alc; ++ size_t aligned; ++ ++ /* Make sure that the block that we free is aligned on an 8-byte ++ boundary. */ ++ size = vec->size; ++ alc = vec->alc; ++ aligned = (size + 7) & ~ (size_t) 7; ++ alc -= aligned - size; ++ ++ backtrace_free (state, (char *) vec->base + aligned, alc, ++ error_callback, data); ++ vec->alc = 0; ++ return 1; ++} diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/mmapio.c index 000000000,000000000..8a9ba8e7b new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/mmapio.c @@@ -1,0 -1,0 +1,100 @@@ ++/* mmapio.c -- File views using mmap. ++ Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++#include "config.h" ++ ++#include ++#include ++#include ++#include ++ ++#include "backtrace.h" ++#include "internal.h" ++ ++#ifndef MAP_FAILED ++#define MAP_FAILED ((void *)-1) ++#endif ++ ++/* This file implements file views and memory allocation when mmap is ++ available. */ ++ ++/* Create a view of SIZE bytes from DESCRIPTOR at OFFSET. */ ++ ++int ++backtrace_get_view (struct backtrace_state *state ATTRIBUTE_UNUSED, ++ int descriptor, off_t offset, size_t size, ++ backtrace_error_callback error_callback, ++ void *data, struct backtrace_view *view) ++{ ++ size_t pagesize; ++ unsigned int inpage; ++ off_t pageoff; ++ void *map; ++ ++ pagesize = getpagesize (); ++ inpage = offset % pagesize; ++ pageoff = offset - inpage; ++ ++ size += inpage; ++ size = (size + (pagesize - 1)) & ~ (pagesize - 1); ++ ++ map = mmap (NULL, size, PROT_READ, MAP_PRIVATE, descriptor, pageoff); ++ if (map == MAP_FAILED) ++ { ++ error_callback (data, "mmap", errno); ++ return 0; ++ } ++ ++ view->data = (char *) map + inpage; ++ view->base = map; ++ view->len = size; ++ ++ return 1; ++} ++ ++/* Release a view read by backtrace_get_view. */ ++ ++void ++backtrace_release_view (struct backtrace_state *state ATTRIBUTE_UNUSED, ++ struct backtrace_view *view, ++ backtrace_error_callback error_callback, ++ void *data) ++{ ++ union { ++ const void *cv; ++ void *v; ++ } const_cast; ++ ++ const_cast.cv = view->base; ++ if (munmap (const_cast.v, view->len) < 0) ++ error_callback (data, "munmap", errno); ++} diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/nounwind.c index 000000000,000000000..0deaeef0a new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/nounwind.c @@@ -1,0 -1,0 +1,66 @@@ ++/* backtrace.c -- Entry point for stack backtrace library. ++ Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++#include "config.h" ++ ++#include ++ ++#include "backtrace.h" ++ ++#include "internal.h" ++ ++/* This source file is compiled if the unwind library is not ++ available. */ ++ ++int ++backtrace_full (struct backtrace_state *state ATTRIBUTE_UNUSED, ++ int skip ATTRIBUTE_UNUSED, ++ backtrace_full_callback callback ATTRIBUTE_UNUSED, ++ backtrace_error_callback error_callback, void *data) ++{ ++ error_callback (data, ++ "no stack trace because unwind library not available", ++ 0); ++ return 0; ++} ++ ++int ++backtrace_simple (struct backtrace_state *state ATTRIBUTE_UNUSED, ++ int skip ATTRIBUTE_UNUSED, ++ backtrace_simple_callback callback ATTRIBUTE_UNUSED, ++ backtrace_error_callback error_callback, void *data) ++{ ++ error_callback (data, ++ "no stack trace because unwind library not available", ++ 0); ++ return 0; ++} diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/pecoff.c index 000000000,000000000..c7d32aa6b new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/pecoff.c @@@ -1,0 -1,0 +1,937 @@@ ++/* pecoff.c -- Get debug data from a PE/COFFF file for backtraces. ++ Copyright (C) 2015-2016 Free Software Foundation, Inc. ++ Adapted from elf.c by Tristan Gingold, AdaCore. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++#include "config.h" ++ ++#include ++#include ++#include ++ ++#include "backtrace.h" ++#include "internal.h" ++ ++/* Coff file header. */ ++ ++typedef struct { ++ uint16_t machine; ++ uint16_t number_of_sections; ++ uint32_t time_date_stamp; ++ uint32_t pointer_to_symbol_table; ++ uint32_t number_of_symbols; ++ uint16_t size_of_optional_header; ++ uint16_t characteristics; ++} b_coff_file_header; ++ ++/* Coff optional header. */ ++ ++typedef struct { ++ uint16_t magic; ++ uint8_t major_linker_version; ++ uint8_t minor_linker_version; ++ uint32_t size_of_code; ++ uint32_t size_of_initialized_data; ++ uint32_t size_of_uninitialized_data; ++ uint32_t address_of_entry_point; ++ uint32_t base_of_code; ++ union { ++ struct { ++ uint32_t base_of_data; ++ uint32_t image_base; ++ } pe; ++ struct { ++ uint64_t image_base; ++ } pep; ++ } u; ++} b_coff_optional_header; ++ ++/* Values of magic in optional header. */ ++ ++#define PE_MAGIC 0x10b /* PE32 executable. */ ++#define PEP_MAGIC 0x20b /* PE32+ executable (for 64bit targets). */ ++ ++/* Coff section header. */ ++ ++typedef struct { ++ char name[8]; ++ uint32_t virtual_size; ++ uint32_t virtual_address; ++ uint32_t size_of_raw_data; ++ uint32_t pointer_to_raw_data; ++ uint32_t pointer_to_relocations; ++ uint32_t pointer_to_line_numbers; ++ uint16_t number_of_relocations; ++ uint16_t number_of_line_numbers; ++ uint32_t characteristics; ++} b_coff_section_header; ++ ++/* Coff symbol name. */ ++ ++typedef union { ++ char short_name[8]; ++ struct { ++ unsigned char zeroes[4]; ++ unsigned char off[4]; ++ } long_name; ++} b_coff_name; ++ ++/* Coff symbol (external representation which is unaligned). */ ++ ++typedef struct { ++ b_coff_name name; ++ unsigned char value[4]; ++ unsigned char section_number[2]; ++ unsigned char type[2]; ++ unsigned char storage_class; ++ unsigned char number_of_aux_symbols; ++} b_coff_external_symbol; ++ ++/* Symbol types. */ ++ ++#define N_TBSHFT 4 /* Shift for the derived type. */ ++#define IMAGE_SYM_DTYPE_FUNCTION 2 /* Function derived type. */ ++ ++/* Size of a coff symbol. */ ++ ++#define SYM_SZ 18 ++ ++/* Coff symbol, internal representation (aligned). */ ++ ++typedef struct { ++ const char *name; ++ uint32_t value; ++ int16_t sec; ++ uint16_t type; ++ uint16_t sc; ++} b_coff_internal_symbol; ++ ++/* An index of sections we care about. */ ++ ++enum debug_section ++{ ++ DEBUG_INFO, ++ DEBUG_LINE, ++ DEBUG_ABBREV, ++ DEBUG_RANGES, ++ DEBUG_STR, ++ DEBUG_MAX ++}; ++ ++/* Names of sections, indexed by enum debug_section. */ ++ ++static const char * const debug_section_names[DEBUG_MAX] = ++{ ++ ".debug_info", ++ ".debug_line", ++ ".debug_abbrev", ++ ".debug_ranges", ++ ".debug_str" ++}; ++ ++/* Information we gather for the sections we care about. */ ++ ++struct debug_section_info ++{ ++ /* Section file offset. */ ++ off_t offset; ++ /* Section size. */ ++ size_t size; ++ /* Section contents, after read from file. */ ++ const unsigned char *data; ++}; ++ ++/* Information we keep for an coff symbol. */ ++ ++struct coff_symbol ++{ ++ /* The name of the symbol. */ ++ const char *name; ++ /* The address of the symbol. */ ++ uintptr_t address; ++}; ++ ++/* Information to pass to coff_syminfo. */ ++ ++struct coff_syminfo_data ++{ ++ /* Symbols for the next module. */ ++ struct coff_syminfo_data *next; ++ /* The COFF symbols, sorted by address. */ ++ struct coff_symbol *symbols; ++ /* The number of symbols. */ ++ size_t count; ++}; ++ ++/* A dummy callback function used when we can't find any debug info. */ ++ ++static int ++coff_nodebug (struct backtrace_state *state ATTRIBUTE_UNUSED, ++ uintptr_t pc ATTRIBUTE_UNUSED, ++ backtrace_full_callback callback ATTRIBUTE_UNUSED, ++ backtrace_error_callback error_callback, void *data) ++{ ++ error_callback (data, "no debug info in PE/COFF executable", -1); ++ return 0; ++} ++ ++/* A dummy callback function used when we can't find a symbol ++ table. */ ++ ++static void ++coff_nosyms (struct backtrace_state *state ATTRIBUTE_UNUSED, ++ uintptr_t addr ATTRIBUTE_UNUSED, ++ backtrace_syminfo_callback callback ATTRIBUTE_UNUSED, ++ backtrace_error_callback error_callback, void *data) ++{ ++ error_callback (data, "no symbol table in PE/COFF executable", -1); ++} ++ ++/* Read a potentially unaligned 4 byte word at P, using native endianness. */ ++ ++static uint32_t ++coff_read4 (const unsigned char *p) ++{ ++ uint32_t res; ++ ++ memcpy (&res, p, 4); ++ return res; ++} ++ ++/* Read a potentially unaligned 2 byte word at P, using native endianness. ++ All 2 byte word in symbols are always aligned, but for coherency all ++ fields are declared as char arrays. */ ++ ++static uint16_t ++coff_read2 (const unsigned char *p) ++{ ++ uint16_t res; ++ ++ memcpy (&res, p, sizeof (res)); ++ return res; ++} ++ ++/* Return the length (without the trailing 0) of a COFF short name. */ ++ ++static size_t ++coff_short_name_len (const char *name) ++{ ++ int i; ++ ++ for (i = 0; i < 8; i++) ++ if (name[i] == 0) ++ return i; ++ return 8; ++} ++ ++/* Return true iff COFF short name CNAME is the same as NAME (a NUL-terminated ++ string). */ ++ ++static int ++coff_short_name_eq (const char *name, const char *cname) ++{ ++ int i; ++ ++ for (i = 0; i < 8; i++) ++ { ++ if (name[i] != cname[i]) ++ return 0; ++ if (name[i] == 0) ++ return 1; ++ } ++ return name[8] == 0; ++} ++ ++/* Return true iff NAME is the same as string at offset OFF. */ ++ ++static int ++coff_long_name_eq (const char *name, unsigned int off, ++ struct backtrace_view *str_view) ++{ ++ if (off >= str_view->len) ++ return 0; ++ return strcmp (name, (const char *)str_view->data + off) == 0; ++} ++ ++/* Compare struct coff_symbol for qsort. */ ++ ++static int ++coff_symbol_compare (const void *v1, const void *v2) ++{ ++ const struct coff_symbol *e1 = (const struct coff_symbol *) v1; ++ const struct coff_symbol *e2 = (const struct coff_symbol *) v2; ++ ++ if (e1->address < e2->address) ++ return -1; ++ else if (e1->address > e2->address) ++ return 1; ++ else ++ return 0; ++} ++ ++/* Convert SYM to internal (and aligned) format ISYM, using string table ++ from STRTAB and STRTAB_SIZE, and number of sections SECTS_NUM. ++ Return -1 in case of error (invalid section number or string index). */ ++ ++static int ++coff_expand_symbol (b_coff_internal_symbol *isym, ++ const b_coff_external_symbol *sym, ++ uint16_t sects_num, ++ const unsigned char *strtab, size_t strtab_size) ++{ ++ isym->type = coff_read2 (sym->type); ++ isym->sec = coff_read2 (sym->section_number); ++ isym->sc = sym->storage_class; ++ ++ if (isym->sec > 0 && (uint16_t) isym->sec > sects_num) ++ return -1; ++ if (sym->name.short_name[0] != 0) ++ isym->name = sym->name.short_name; ++ else ++ { ++ uint32_t off = coff_read4 (sym->name.long_name.off); ++ ++ if (off >= strtab_size) ++ return -1; ++ isym->name = (const char *) strtab + off; ++ } ++ return 0; ++} ++ ++/* Return true iff SYM is a defined symbol for a function. Data symbols ++ aren't considered because they aren't easily identified (same type as ++ section names, presence of symbols defined by the linker script). */ ++ ++static int ++coff_is_function_symbol (const b_coff_internal_symbol *isym) ++{ ++ return (isym->type >> N_TBSHFT) == IMAGE_SYM_DTYPE_FUNCTION ++ && isym->sec > 0; ++} ++ ++/* Initialize the symbol table info for coff_syminfo. */ ++ ++static int ++coff_initialize_syminfo (struct backtrace_state *state, ++ uintptr_t base_address, ++ const b_coff_section_header *sects, size_t sects_num, ++ const b_coff_external_symbol *syms, size_t syms_size, ++ const unsigned char *strtab, size_t strtab_size, ++ backtrace_error_callback error_callback, ++ void *data, struct coff_syminfo_data *sdata) ++{ ++ size_t syms_count; ++ char *coff_symstr; ++ size_t coff_symstr_len; ++ size_t coff_symbol_count; ++ size_t coff_symbol_size; ++ struct coff_symbol *coff_symbols; ++ struct coff_symbol *coff_sym; ++ char *coff_str; ++ size_t i; ++ ++ syms_count = syms_size / SYM_SZ; ++ ++ /* We only care about function symbols. Count them. Also count size of ++ strings for in-symbol names. */ ++ coff_symbol_count = 0; ++ coff_symstr_len = 0; ++ for (i = 0; i < syms_count; ++i) ++ { ++ const b_coff_external_symbol *asym = &syms[i]; ++ b_coff_internal_symbol isym; ++ ++ if (coff_expand_symbol (&isym, asym, sects_num, strtab, strtab_size) < 0) ++ { ++ error_callback (data, "invalid section or offset in coff symbol", 0); ++ return 0; ++ } ++ if (coff_is_function_symbol (&isym)) ++ { ++ ++coff_symbol_count; ++ if (asym->name.short_name[0] != 0) ++ coff_symstr_len += coff_short_name_len (asym->name.short_name) + 1; ++ } ++ ++ i += asym->number_of_aux_symbols; ++ } ++ ++ coff_symbol_size = (coff_symbol_count + 1) * sizeof (struct coff_symbol); ++ coff_symbols = ((struct coff_symbol *) ++ backtrace_alloc (state, coff_symbol_size, error_callback, ++ data)); ++ if (coff_symbols == NULL) ++ return 0; ++ ++ /* Allocate memory for symbols strings. */ ++ if (coff_symstr_len > 0) ++ { ++ coff_symstr = ((char *) ++ backtrace_alloc (state, coff_symstr_len, error_callback, ++ data)); ++ if (coff_symstr == NULL) ++ { ++ backtrace_free (state, coff_symbols, coff_symbol_size, ++ error_callback, data); ++ return 0; ++ } ++ } ++ else ++ coff_symstr = NULL; ++ ++ /* Copy symbols. */ ++ coff_sym = coff_symbols; ++ coff_str = coff_symstr; ++ for (i = 0; i < syms_count; ++i) ++ { ++ const b_coff_external_symbol *asym = &syms[i]; ++ b_coff_internal_symbol isym; ++ ++ if (coff_expand_symbol (&isym, asym, sects_num, strtab, strtab_size)) ++ { ++ /* Should not fail, as it was already tested in the previous ++ loop. */ ++ abort (); ++ } ++ if (coff_is_function_symbol (&isym)) ++ { ++ const char *name; ++ int16_t secnum; ++ ++ if (asym->name.short_name[0] != 0) ++ { ++ size_t len = coff_short_name_len (isym.name); ++ name = coff_str; ++ memcpy (coff_str, isym.name, len); ++ coff_str[len] = 0; ++ coff_str += len + 1; ++ } ++ else ++ name = isym.name; ++ ++ /* Strip leading '_'. */ ++ if (name[0] == '_') ++ name++; ++ ++ /* Symbol value is section relative, so we need to read the address ++ of its section. */ ++ secnum = coff_read2 (asym->section_number); ++ ++ coff_sym->name = name; ++ coff_sym->address = (coff_read4 (asym->value) ++ + sects[secnum - 1].virtual_address ++ + base_address); ++ coff_sym++; ++ } ++ ++ i += asym->number_of_aux_symbols; ++ } ++ ++ /* End of symbols marker. */ ++ coff_sym->name = NULL; ++ coff_sym->address = -1; ++ ++ backtrace_qsort (coff_symbols, coff_symbol_count, ++ sizeof (struct coff_symbol), coff_symbol_compare); ++ ++ sdata->next = NULL; ++ sdata->symbols = coff_symbols; ++ sdata->count = coff_symbol_count; ++ ++ return 1; ++} ++ ++/* Add EDATA to the list in STATE. */ ++ ++static void ++coff_add_syminfo_data (struct backtrace_state *state, ++ struct coff_syminfo_data *sdata) ++{ ++ if (!state->threaded) ++ { ++ struct coff_syminfo_data **pp; ++ ++ for (pp = (struct coff_syminfo_data **) (void *) &state->syminfo_data; ++ *pp != NULL; ++ pp = &(*pp)->next) ++ ; ++ *pp = sdata; ++ } ++ else ++ { ++ while (1) ++ { ++ struct coff_syminfo_data **pp; ++ ++ pp = (struct coff_syminfo_data **) (void *) &state->syminfo_data; ++ ++ while (1) ++ { ++ struct coff_syminfo_data *p; ++ ++ p = backtrace_atomic_load_pointer (pp); ++ ++ if (p == NULL) ++ break; ++ ++ pp = &p->next; ++ } ++ ++ if (__sync_bool_compare_and_swap (pp, NULL, sdata)) ++ break; ++ } ++ } ++} ++ ++/* Compare an ADDR against an elf_symbol for bsearch. We allocate one ++ extra entry in the array so that this can look safely at the next ++ entry. */ ++ ++static int ++coff_symbol_search (const void *vkey, const void *ventry) ++{ ++ const uintptr_t *key = (const uintptr_t *) vkey; ++ const struct coff_symbol *entry = (const struct coff_symbol *) ventry; ++ uintptr_t addr; ++ ++ addr = *key; ++ if (addr < entry->address) ++ return -1; ++ else if (addr >= entry[1].address) ++ return 1; ++ else ++ return 0; ++} ++ ++/* Return the symbol name and value for an ADDR. */ ++ ++static void ++coff_syminfo (struct backtrace_state *state, uintptr_t addr, ++ backtrace_syminfo_callback callback, ++ backtrace_error_callback error_callback ATTRIBUTE_UNUSED, ++ void *data) ++{ ++ struct coff_syminfo_data *sdata; ++ struct coff_symbol *sym = NULL; ++ ++ if (!state->threaded) ++ { ++ for (sdata = (struct coff_syminfo_data *) state->syminfo_data; ++ sdata != NULL; ++ sdata = sdata->next) ++ { ++ sym = ((struct coff_symbol *) ++ bsearch (&addr, sdata->symbols, sdata->count, ++ sizeof (struct coff_symbol), coff_symbol_search)); ++ if (sym != NULL) ++ break; ++ } ++ } ++ else ++ { ++ struct coff_syminfo_data **pp; ++ ++ pp = (struct coff_syminfo_data **) (void *) &state->syminfo_data; ++ while (1) ++ { ++ sdata = backtrace_atomic_load_pointer (pp); ++ if (sdata == NULL) ++ break; ++ ++ sym = ((struct coff_symbol *) ++ bsearch (&addr, sdata->symbols, sdata->count, ++ sizeof (struct coff_symbol), coff_symbol_search)); ++ if (sym != NULL) ++ break; ++ ++ pp = &sdata->next; ++ } ++ } ++ ++ if (sym == NULL) ++ callback (data, addr, NULL, 0, 0); ++ else ++ callback (data, addr, sym->name, sym->address, 0); ++} ++ ++/* Add the backtrace data for one PE/COFF file. Returns 1 on success, ++ 0 on failure (in both cases descriptor is closed). */ ++ ++static int ++coff_add (struct backtrace_state *state, int descriptor, ++ backtrace_error_callback error_callback, void *data, ++ fileline *fileline_fn, int *found_sym, int *found_dwarf) ++{ ++ struct backtrace_view fhdr_view; ++ off_t fhdr_off; ++ int magic_ok; ++ b_coff_file_header fhdr; ++ off_t opt_sects_off; ++ size_t opt_sects_size; ++ unsigned int sects_num; ++ struct backtrace_view sects_view; ++ int sects_view_valid; ++ const b_coff_optional_header *opt_hdr; ++ const b_coff_section_header *sects; ++ struct backtrace_view str_view; ++ int str_view_valid; ++ size_t str_size; ++ off_t str_off; ++ struct backtrace_view syms_view; ++ off_t syms_off; ++ size_t syms_size; ++ int syms_view_valid; ++ unsigned int syms_num; ++ unsigned int i; ++ struct debug_section_info sections[DEBUG_MAX]; ++ off_t min_offset; ++ off_t max_offset; ++ struct backtrace_view debug_view; ++ int debug_view_valid; ++ uintptr_t image_base; ++ ++ *found_sym = 0; ++ *found_dwarf = 0; ++ ++ sects_view_valid = 0; ++ syms_view_valid = 0; ++ str_view_valid = 0; ++ debug_view_valid = 0; ++ ++ /* Map the MS-DOS stub (if any) and extract file header offset. */ ++ if (!backtrace_get_view (state, descriptor, 0, 0x40, error_callback, ++ data, &fhdr_view)) ++ goto fail; ++ ++ { ++ const char *vptr = (const char *)fhdr_view.data; ++ ++ if (vptr[0] == 'M' && vptr[1] == 'Z') ++ memcpy (&fhdr_off, vptr + 0x3c, 4); ++ else ++ fhdr_off = 0; ++ } ++ ++ backtrace_release_view (state, &fhdr_view, error_callback, data); ++ ++ /* Map the coff file header. */ ++ if (!backtrace_get_view (state, descriptor, fhdr_off, ++ sizeof (b_coff_file_header) + 4, ++ error_callback, data, &fhdr_view)) ++ goto fail; ++ ++ if (fhdr_off != 0) ++ { ++ const char *magic = (const char *) fhdr_view.data; ++ magic_ok = memcmp (magic, "PE\0", 4) == 0; ++ fhdr_off += 4; ++ ++ memcpy (&fhdr, fhdr_view.data + 4, sizeof fhdr); ++ } ++ else ++ { ++ memcpy (&fhdr, fhdr_view.data, sizeof fhdr); ++ /* TODO: test fhdr.machine for coff but non-PE platforms. */ ++ magic_ok = 0; ++ } ++ backtrace_release_view (state, &fhdr_view, error_callback, data); ++ ++ if (!magic_ok) ++ { ++ error_callback (data, "executable file is not COFF", 0); ++ goto fail; ++ } ++ ++ sects_num = fhdr.number_of_sections; ++ syms_num = fhdr.number_of_symbols; ++ ++ opt_sects_off = fhdr_off + sizeof (fhdr); ++ opt_sects_size = (fhdr.size_of_optional_header ++ + sects_num * sizeof (b_coff_section_header)); ++ ++ /* To translate PC to file/line when using DWARF, we need to find ++ the .debug_info and .debug_line sections. */ ++ ++ /* Read the optional header and the section headers. */ ++ ++ if (!backtrace_get_view (state, descriptor, opt_sects_off, opt_sects_size, ++ error_callback, data, §s_view)) ++ goto fail; ++ sects_view_valid = 1; ++ opt_hdr = (const b_coff_optional_header *) sects_view.data; ++ sects = (const b_coff_section_header *) ++ (sects_view.data + fhdr.size_of_optional_header); ++ ++ if (fhdr.size_of_optional_header > sizeof (*opt_hdr)) ++ { ++ if (opt_hdr->magic == PE_MAGIC) ++ image_base = opt_hdr->u.pe.image_base; ++ else if (opt_hdr->magic == PEP_MAGIC) ++ image_base = opt_hdr->u.pep.image_base; ++ else ++ { ++ error_callback (data, "bad magic in PE optional header", 0); ++ goto fail; ++ } ++ } ++ else ++ image_base = 0; ++ ++ /* Read the symbol table and the string table. */ ++ ++ if (fhdr.pointer_to_symbol_table == 0) ++ { ++ /* No symbol table, no string table. */ ++ str_off = 0; ++ str_size = 0; ++ syms_num = 0; ++ syms_size = 0; ++ } ++ else ++ { ++ /* Symbol table is followed by the string table. The string table ++ starts with its length (on 4 bytes). ++ Map the symbol table and the length of the string table. */ ++ syms_off = fhdr.pointer_to_symbol_table; ++ syms_size = syms_num * SYM_SZ; ++ ++ if (!backtrace_get_view (state, descriptor, syms_off, syms_size + 4, ++ error_callback, data, &syms_view)) ++ goto fail; ++ syms_view_valid = 1; ++ ++ memcpy (&str_size, syms_view.data + syms_size, 4); ++ ++ str_off = syms_off + syms_size; ++ ++ if (str_size > 4) ++ { ++ /* Map string table (including the length word). */ ++ ++ if (!backtrace_get_view (state, descriptor, str_off, str_size, ++ error_callback, data, &str_view)) ++ goto fail; ++ str_view_valid = 1; ++ } ++ } ++ ++ memset (sections, 0, sizeof sections); ++ ++ /* Look for the symbol table. */ ++ for (i = 0; i < sects_num; ++i) ++ { ++ const b_coff_section_header *s = sects + i; ++ unsigned int str_off; ++ int j; ++ ++ if (s->name[0] == '/') ++ { ++ /* Extended section name. */ ++ str_off = atoi (s->name + 1); ++ } ++ else ++ str_off = 0; ++ ++ for (j = 0; j < (int) DEBUG_MAX; ++j) ++ { ++ const char *dbg_name = debug_section_names[j]; ++ int match; ++ ++ if (str_off != 0) ++ match = coff_long_name_eq (dbg_name, str_off, &str_view); ++ else ++ match = coff_short_name_eq (dbg_name, s->name); ++ if (match) ++ { ++ sections[j].offset = s->pointer_to_raw_data; ++ sections[j].size = s->virtual_size <= s->size_of_raw_data ? ++ s->virtual_size : s->size_of_raw_data; ++ break; ++ } ++ } ++ } ++ ++ if (syms_num != 0) ++ { ++ struct coff_syminfo_data *sdata; ++ ++ sdata = ((struct coff_syminfo_data *) ++ backtrace_alloc (state, sizeof *sdata, error_callback, data)); ++ if (sdata == NULL) ++ goto fail; ++ ++ if (!coff_initialize_syminfo (state, image_base, ++ sects, sects_num, ++ syms_view.data, syms_size, ++ str_view.data, str_size, ++ error_callback, data, sdata)) ++ { ++ backtrace_free (state, sdata, sizeof *sdata, error_callback, data); ++ goto fail; ++ } ++ ++ *found_sym = 1; ++ ++ coff_add_syminfo_data (state, sdata); ++ } ++ ++ backtrace_release_view (state, §s_view, error_callback, data); ++ sects_view_valid = 0; ++ backtrace_release_view (state, &syms_view, error_callback, data); ++ syms_view_valid = 0; ++ ++ /* Read all the debug sections in a single view, since they are ++ probably adjacent in the file. We never release this view. */ ++ ++ min_offset = 0; ++ max_offset = 0; ++ for (i = 0; i < (int) DEBUG_MAX; ++i) ++ { ++ off_t end; ++ ++ if (sections[i].size == 0) ++ continue; ++ if (min_offset == 0 || sections[i].offset < min_offset) ++ min_offset = sections[i].offset; ++ end = sections[i].offset + sections[i].size; ++ if (end > max_offset) ++ max_offset = end; ++ } ++ if (min_offset == 0 || max_offset == 0) ++ { ++ if (!backtrace_close (descriptor, error_callback, data)) ++ goto fail; ++ *fileline_fn = coff_nodebug; ++ return 1; ++ } ++ ++ if (!backtrace_get_view (state, descriptor, min_offset, ++ max_offset - min_offset, ++ error_callback, data, &debug_view)) ++ goto fail; ++ debug_view_valid = 1; ++ ++ /* We've read all we need from the executable. */ ++ if (!backtrace_close (descriptor, error_callback, data)) ++ goto fail; ++ descriptor = -1; ++ ++ for (i = 0; i < (int) DEBUG_MAX; ++i) ++ { ++ if (sections[i].size == 0) ++ sections[i].data = NULL; ++ else ++ sections[i].data = ((const unsigned char *) debug_view.data ++ + (sections[i].offset - min_offset)); ++ } ++ ++ if (!backtrace_dwarf_add (state, /* base_address */ 0, ++ sections[DEBUG_INFO].data, ++ sections[DEBUG_INFO].size, ++ sections[DEBUG_LINE].data, ++ sections[DEBUG_LINE].size, ++ sections[DEBUG_ABBREV].data, ++ sections[DEBUG_ABBREV].size, ++ sections[DEBUG_RANGES].data, ++ sections[DEBUG_RANGES].size, ++ sections[DEBUG_STR].data, ++ sections[DEBUG_STR].size, ++ 0, /* FIXME */ ++ error_callback, data, fileline_fn)) ++ goto fail; ++ ++ *found_dwarf = 1; ++ ++ return 1; ++ ++ fail: ++ if (sects_view_valid) ++ backtrace_release_view (state, §s_view, error_callback, data); ++ if (str_view_valid) ++ backtrace_release_view (state, &str_view, error_callback, data); ++ if (syms_view_valid) ++ backtrace_release_view (state, &syms_view, error_callback, data); ++ if (debug_view_valid) ++ backtrace_release_view (state, &debug_view, error_callback, data); ++ if (descriptor != -1) ++ backtrace_close (descriptor, error_callback, data); ++ return 0; ++} ++ ++/* Initialize the backtrace data we need from an ELF executable. At ++ the ELF level, all we need to do is find the debug info ++ sections. */ ++ ++int ++backtrace_initialize (struct backtrace_state *state, int descriptor, ++ backtrace_error_callback error_callback, ++ void *data, fileline *fileline_fn) ++{ ++ int ret; ++ int found_sym; ++ int found_dwarf; ++ fileline coff_fileline_fn; ++ ++ ret = coff_add (state, descriptor, error_callback, data, ++ &coff_fileline_fn, &found_sym, &found_dwarf); ++ if (!ret) ++ return 0; ++ ++ if (!state->threaded) ++ { ++ if (found_sym) ++ state->syminfo_fn = coff_syminfo; ++ else if (state->syminfo_fn == NULL) ++ state->syminfo_fn = coff_nosyms; ++ } ++ else ++ { ++ if (found_sym) ++ backtrace_atomic_store_pointer (&state->syminfo_fn, coff_syminfo); ++ else ++ __sync_bool_compare_and_swap (&state->syminfo_fn, NULL, coff_nosyms); ++ } ++ ++ if (!state->threaded) ++ { ++ if (state->fileline_fn == NULL || state->fileline_fn == coff_nodebug) ++ *fileline_fn = coff_fileline_fn; ++ } ++ else ++ { ++ fileline current_fn; ++ ++ current_fn = backtrace_atomic_load_pointer (&state->fileline_fn); ++ if (current_fn == NULL || current_fn == coff_nodebug) ++ *fileline_fn = coff_fileline_fn; ++ } ++ ++ return 1; ++} diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/posix.c index 000000000,000000000..be7357e6b new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/posix.c @@@ -1,0 -1,0 +1,100 @@@ ++/* posix.c -- POSIX file I/O routines for the backtrace library. ++ Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++#include "config.h" ++ ++#include ++#include ++#include ++#include ++#include ++ ++#include "backtrace.h" ++#include "internal.h" ++ ++#ifndef O_BINARY ++#define O_BINARY 0 ++#endif ++ ++#ifndef O_CLOEXEC ++#define O_CLOEXEC 0 ++#endif ++ ++#ifndef FD_CLOEXEC ++#define FD_CLOEXEC 1 ++#endif ++ ++/* Open a file for reading. */ ++ ++int ++backtrace_open (const char *filename, backtrace_error_callback error_callback, ++ void *data, int *does_not_exist) ++{ ++ int descriptor; ++ ++ if (does_not_exist != NULL) ++ *does_not_exist = 0; ++ ++ descriptor = open (filename, (int) (O_RDONLY | O_BINARY | O_CLOEXEC)); ++ if (descriptor < 0) ++ { ++ if (does_not_exist != NULL && errno == ENOENT) ++ *does_not_exist = 1; ++ else ++ error_callback (data, filename, errno); ++ return -1; ++ } ++ ++#ifdef HAVE_FCNTL ++ /* Set FD_CLOEXEC just in case the kernel does not support ++ O_CLOEXEC. It doesn't matter if this fails for some reason. ++ FIXME: At some point it should be safe to only do this if ++ O_CLOEXEC == 0. */ ++ fcntl (descriptor, F_SETFD, FD_CLOEXEC); ++#endif ++ ++ return descriptor; ++} ++ ++/* Close DESCRIPTOR. */ ++ ++int ++backtrace_close (int descriptor, backtrace_error_callback error_callback, ++ void *data) ++{ ++ if (close (descriptor) < 0) ++ { ++ error_callback (data, "close", errno); ++ return 0; ++ } ++ return 1; ++} diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/print.c index 000000000,000000000..73b8abc19 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/print.c @@@ -1,0 -1,0 +1,92 @@@ ++/* print.c -- Print the current backtrace. ++ Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++#include "config.h" ++ ++#include ++#include ++#include ++ ++#include "backtrace.h" ++#include "internal.h" ++ ++/* Passed to callbacks. */ ++ ++struct print_data ++{ ++ struct backtrace_state *state; ++ FILE *f; ++}; ++ ++/* Print one level of a backtrace. */ ++ ++static int ++print_callback (void *data, uintptr_t pc, const char *filename, int lineno, ++ const char *function) ++{ ++ struct print_data *pdata = (struct print_data *) data; ++ ++ fprintf (pdata->f, "0x%lx %s\n\t%s:%d\n", ++ (unsigned long) pc, ++ function == NULL ? "???" : function, ++ filename == NULL ? "???" : filename, ++ lineno); ++ return 0; ++} ++ ++/* Print errors to stderr. */ ++ ++static void ++error_callback (void *data, const char *msg, int errnum) ++{ ++ struct print_data *pdata = (struct print_data *) data; ++ ++ if (pdata->state->filename != NULL) ++ fprintf (stderr, "%s: ", pdata->state->filename); ++ fprintf (stderr, "libbacktrace: %s", msg); ++ if (errnum > 0) ++ fprintf (stderr, ": %s", strerror (errnum)); ++ fputc ('\n', stderr); ++} ++ ++/* Print a backtrace. */ ++ ++void ++backtrace_print (struct backtrace_state *state, int skip, FILE *f) ++{ ++ struct print_data data; ++ ++ data.state = state; ++ data.f = f; ++ backtrace_full (state, skip + 1, print_callback, error_callback, ++ (void *) &data); ++} diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/read.c index 000000000,000000000..33b68f843 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/read.c @@@ -1,0 -1,0 +1,96 @@@ ++/* read.c -- File views without mmap. ++ Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++#include "config.h" ++ ++#include ++#include ++#include ++#include ++ ++#include "backtrace.h" ++#include "internal.h" ++ ++/* This file implements file views when mmap is not available. */ ++ ++/* Create a view of SIZE bytes from DESCRIPTOR at OFFSET. */ ++ ++int ++backtrace_get_view (struct backtrace_state *state, int descriptor, ++ off_t offset, size_t size, ++ backtrace_error_callback error_callback, ++ void *data, struct backtrace_view *view) ++{ ++ ssize_t got; ++ ++ if (lseek (descriptor, offset, SEEK_SET) < 0) ++ { ++ error_callback (data, "lseek", errno); ++ return 0; ++ } ++ ++ view->base = backtrace_alloc (state, size, error_callback, data); ++ if (view->base == NULL) ++ return 0; ++ view->data = view->base; ++ view->len = size; ++ ++ got = read (descriptor, view->base, size); ++ if (got < 0) ++ { ++ error_callback (data, "read", errno); ++ free (view->base); ++ return 0; ++ } ++ ++ if ((size_t) got < size) ++ { ++ error_callback (data, "file too short", 0); ++ free (view->base); ++ return 0; ++ } ++ ++ return 1; ++} ++ ++/* Release a view read by backtrace_get_view. */ ++ ++void ++backtrace_release_view (struct backtrace_state *state, ++ struct backtrace_view *view, ++ backtrace_error_callback error_callback, ++ void *data) ++{ ++ backtrace_free (state, view->base, view->len, error_callback, data); ++ view->data = NULL; ++ view->base = NULL; ++} diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/simple.c index 000000000,000000000..493fd6de7 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/simple.c @@@ -1,0 -1,0 +1,108 @@@ ++/* simple.c -- The backtrace_simple function. ++ Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++#include "config.h" ++ ++#include "unwind.h" ++#include "backtrace.h" ++ ++/* The simple_backtrace routine. */ ++ ++/* Data passed through _Unwind_Backtrace. */ ++ ++struct backtrace_simple_data ++{ ++ /* Number of frames to skip. */ ++ int skip; ++ /* Library state. */ ++ struct backtrace_state *state; ++ /* Callback routine. */ ++ backtrace_simple_callback callback; ++ /* Error callback routine. */ ++ backtrace_error_callback error_callback; ++ /* Data to pass to callback routine. */ ++ void *data; ++ /* Value to return from backtrace. */ ++ int ret; ++}; ++ ++/* Unwind library callback routine. This is passd to ++ _Unwind_Backtrace. */ ++ ++static _Unwind_Reason_Code ++simple_unwind (struct _Unwind_Context *context, void *vdata) ++{ ++ struct backtrace_simple_data *bdata = (struct backtrace_simple_data *) vdata; ++ uintptr_t pc; ++ int ip_before_insn = 0; ++ ++#ifdef HAVE_GETIPINFO ++ pc = _Unwind_GetIPInfo (context, &ip_before_insn); ++#else ++ pc = _Unwind_GetIP (context); ++#endif ++ ++ if (bdata->skip > 0) ++ { ++ --bdata->skip; ++ return _URC_NO_REASON; ++ } ++ ++ if (!ip_before_insn) ++ --pc; ++ ++ bdata->ret = bdata->callback (bdata->data, pc); ++ ++ if (bdata->ret != 0) ++ return _URC_END_OF_STACK; ++ ++ return _URC_NO_REASON; ++} ++ ++/* Get a simple stack backtrace. */ ++ ++int ++backtrace_simple (struct backtrace_state *state, int skip, ++ backtrace_simple_callback callback, ++ backtrace_error_callback error_callback, void *data) ++{ ++ struct backtrace_simple_data bdata; ++ ++ bdata.skip = skip + 1; ++ bdata.state = state; ++ bdata.callback = callback; ++ bdata.error_callback = error_callback; ++ bdata.data = data; ++ bdata.ret = 0; ++ _Unwind_Backtrace (simple_unwind, &bdata); ++ return bdata.ret; ++} diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/sort.c index 000000000,000000000..f352fca5e new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/sort.c @@@ -1,0 -1,0 +1,108 @@@ ++/* sort.c -- Sort without allocating memory ++ Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++#include "config.h" ++ ++#include ++#include ++ ++#include "backtrace.h" ++#include "internal.h" ++ ++/* The GNU glibc version of qsort allocates memory, which we must not ++ do if we are invoked by a signal handler. So provide our own ++ sort. */ ++ ++static void ++swap (char *a, char *b, size_t size) ++{ ++ size_t i; ++ ++ for (i = 0; i < size; i++, a++, b++) ++ { ++ char t; ++ ++ t = *a; ++ *a = *b; ++ *b = t; ++ } ++} ++ ++void ++backtrace_qsort (void *basearg, size_t count, size_t size, ++ int (*compar) (const void *, const void *)) ++{ ++ char *base = (char *) basearg; ++ size_t i; ++ size_t mid; ++ ++ tail_recurse: ++ if (count < 2) ++ return; ++ ++ /* The symbol table and DWARF tables, which is all we use this ++ routine for, tend to be roughly sorted. Pick the middle element ++ in the array as our pivot point, so that we are more likely to ++ cut the array in half for each recursion step. */ ++ swap (base, base + (count / 2) * size, size); ++ ++ mid = 0; ++ for (i = 1; i < count; i++) ++ { ++ if ((*compar) (base, base + i * size) > 0) ++ { ++ ++mid; ++ if (i != mid) ++ swap (base + mid * size, base + i * size, size); ++ } ++ } ++ ++ if (mid > 0) ++ swap (base, base + mid * size, size); ++ ++ /* Recurse with the smaller array, loop with the larger one. That ++ ensures that our maximum stack depth is log count. */ ++ if (2 * mid < count) ++ { ++ backtrace_qsort (base, mid, size, compar); ++ base += (mid + 1) * size; ++ count -= mid + 1; ++ goto tail_recurse; ++ } ++ else ++ { ++ backtrace_qsort (base + (mid + 1) * size, count - (mid + 1), ++ size, compar); ++ count = mid; ++ goto tail_recurse; ++ } ++} diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/state.c index 000000000,000000000..361a3963c new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/state.c @@@ -1,0 -1,0 +1,72 @@@ ++/* state.c -- Create the backtrace state. ++ Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++#include "config.h" ++ ++#include ++#include ++ ++#include "backtrace.h" ++#include "backtrace-supported.h" ++#include "internal.h" ++ ++/* Create the backtrace state. This will then be passed to all the ++ other routines. */ ++ ++struct backtrace_state * ++backtrace_create_state (const char *filename, int threaded, ++ backtrace_error_callback error_callback, ++ void *data) ++{ ++ struct backtrace_state init_state; ++ struct backtrace_state *state; ++ ++#ifndef HAVE_SYNC_FUNCTIONS ++ if (threaded) ++ { ++ error_callback (data, "backtrace library does not support threads", 0); ++ return NULL; ++ } ++#endif ++ ++ memset (&init_state, 0, sizeof init_state); ++ init_state.filename = filename; ++ init_state.threaded = threaded; ++ ++ state = ((struct backtrace_state *) ++ backtrace_alloc (&init_state, sizeof *state, error_callback, data)); ++ if (state == NULL) ++ return NULL; ++ *state = init_state; ++ ++ return state; ++} diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/stest.c index 000000000,000000000..7a0b06d3e new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/stest.c @@@ -1,0 -1,0 +1,137 @@@ ++/* stest.c -- Test for libbacktrace internal sort function ++ Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++#include "config.h" ++ ++#include ++#include ++#include ++#include ++ ++#include "backtrace.h" ++#include "internal.h" ++ ++/* Test the local qsort implementation. */ ++ ++#define MAX 10 ++ ++struct test ++{ ++ size_t count; ++ int input[MAX]; ++ int output[MAX]; ++}; ++ ++static struct test tests[] = ++ { ++ { ++ 10, ++ { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, ++ { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 } ++ }, ++ { ++ 9, ++ { 1, 2, 3, 4, 5, 6, 7, 8, 9 }, ++ { 1, 2, 3, 4, 5, 6, 7, 8, 9 } ++ }, ++ { ++ 10, ++ { 10, 9, 8, 7, 6, 5, 4, 3, 2, 1 }, ++ { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, ++ }, ++ { ++ 9, ++ { 9, 8, 7, 6, 5, 4, 3, 2, 1 }, ++ { 1, 2, 3, 4, 5, 6, 7, 8, 9 }, ++ }, ++ { ++ 10, ++ { 2, 4, 6, 8, 10, 1, 3, 5, 7, 9 }, ++ { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, ++ }, ++ { ++ 5, ++ { 4, 5, 3, 1, 2 }, ++ { 1, 2, 3, 4, 5 }, ++ }, ++ { ++ 5, ++ { 1, 1, 1, 1, 1 }, ++ { 1, 1, 1, 1, 1 }, ++ }, ++ { ++ 5, ++ { 1, 1, 2, 1, 1 }, ++ { 1, 1, 1, 1, 2 }, ++ }, ++ { ++ 5, ++ { 2, 1, 1, 1, 1 }, ++ { 1, 1, 1, 1, 2 }, ++ }, ++ }; ++ ++static int ++compare (const void *a, const void *b) ++{ ++ const int *ai = (const int *) a; ++ const int *bi = (const int *) b; ++ ++ return *ai - *bi; ++} ++ ++int ++main (int argc ATTRIBUTE_UNUSED, char **argv ATTRIBUTE_UNUSED) ++{ ++ int failures; ++ size_t i; ++ int a[MAX]; ++ ++ failures = 0; ++ for (i = 0; i < sizeof tests / sizeof tests[0]; i++) ++ { ++ memcpy (a, tests[i].input, tests[i].count * sizeof (int)); ++ backtrace_qsort (a, tests[i].count, sizeof (int), compare); ++ if (memcmp (a, tests[i].output, tests[i].count * sizeof (int)) != 0) ++ { ++ size_t j; ++ ++ fprintf (stderr, "test %d failed:", (int) i); ++ for (j = 0; j < tests[i].count; j++) ++ fprintf (stderr, " %d", a[j]); ++ fprintf (stderr, "\n"); ++ ++failures; ++ } ++ } ++ ++ exit (failures > 0 ? EXIT_FAILURE : EXIT_SUCCESS); ++} diff --cc vendor/backtrace-sys-0.1.12/src/libbacktrace/unknown.c index 000000000,000000000..8afe65b3f new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/src/libbacktrace/unknown.c @@@ -1,0 -1,0 +1,64 @@@ ++/* unknown.c -- used when backtrace configury does not know file format. ++ Copyright (C) 2012-2016 Free Software Foundation, Inc. ++ Written by Ian Lance Taylor, Google. ++ ++Redistribution and use in source and binary forms, with or without ++modification, are permitted provided that the following conditions are ++met: ++ ++ (1) Redistributions of source code must retain the above copyright ++ notice, this list of conditions and the following disclaimer. ++ ++ (2) Redistributions in binary form must reproduce the above copyright ++ notice, this list of conditions and the following disclaimer in ++ the documentation and/or other materials provided with the ++ distribution. ++ ++ (3) The name of the author may not be used to ++ endorse or promote products derived from this software without ++ specific prior written permission. ++ ++THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR ++IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ++WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE ++DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, ++INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES ++(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR ++SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ++HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, ++STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING ++IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE ++POSSIBILITY OF SUCH DAMAGE. */ ++ ++#include "config.h" ++ ++#include ++ ++#include "backtrace.h" ++#include "internal.h" ++ ++/* A trivial routine that always fails to find fileline data. */ ++ ++static int ++unknown_fileline (struct backtrace_state *state ATTRIBUTE_UNUSED, ++ uintptr_t pc, backtrace_full_callback callback, ++ backtrace_error_callback error_callback ATTRIBUTE_UNUSED, ++ void *data) ++ ++{ ++ return callback (data, pc, NULL, 0, NULL); ++} ++ ++/* Initialize the backtrace data when we don't know how to read the ++ debug info. */ ++ ++int ++backtrace_initialize (struct backtrace_state *state ATTRIBUTE_UNUSED, ++ int descriptor ATTRIBUTE_UNUSED, ++ backtrace_error_callback error_callback ATTRIBUTE_UNUSED, ++ void *data ATTRIBUTE_UNUSED, fileline *fileline_fn) ++{ ++ state->fileline_data = NULL; ++ *fileline_fn = unknown_fileline; ++ return 1; ++} diff --cc vendor/backtrace-sys-0.1.12/symbol-map index 000000000,000000000..fbadc7369 new file mode 100644 --- /dev/null +++ b/vendor/backtrace-sys-0.1.12/symbol-map @@@ -1,0 -1,0 +1,18 @@@ ++backtrace_full __rbt_backtrace_full ++backtrace_dwarf_add __rbt_backtrace_dwarf_add ++backtrace_initialize __rbt_backtrace_initialize ++backtrace_pcinfo __rbt_backtrace_pcinfo ++backtrace_syminfo __rbt_backtrace_syminfo ++backtrace_get_view __rbt_backtrace_get_view ++backtrace_release_view __rbt_backtrace_release_view ++backtrace_alloc __rbt_backtrace_alloc ++backtrace_free __rbt_backtrace_free ++backtrace_vector_finish __rbt_backtrace_vector_finish ++backtrace_vector_grow __rbt_backtrace_vector_grow ++backtrace_vector_release __rbt_backtrace_vector_release ++backtrace_close __rbt_backtrace_close ++backtrace_open __rbt_backtrace_open ++backtrace_print __rbt_backtrace_print ++backtrace_simple __rbt_backtrace_simple ++backtrace_qsort __rbt_backtrace_qsort ++backtrace_create_state __rbt_backtrace_create_state diff --cc vendor/bitflags-0.7.0/.cargo-checksum.json index 000000000,000000000..16b5f3f0f new file mode 100644 --- /dev/null +++ b/vendor/bitflags-0.7.0/.cargo-checksum.json @@@ -1,0 -1,0 +1,1 @@@ ++{"files":{},"package":"aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d"} diff --cc vendor/bitflags-0.7.0/.cargo-ok index 000000000,000000000..e69de29bb new file mode 100644 --- /dev/null +++ b/vendor/bitflags-0.7.0/.cargo-ok diff --cc vendor/bitflags-0.7.0/.travis.yml index 000000000,000000000..60344466a new file mode 100644 --- /dev/null +++ b/vendor/bitflags-0.7.0/.travis.yml @@@ -1,0 -1,0 +1,24 @@@ ++language: rust ++rust: ++ - stable ++ - beta ++ - nightly ++sudo: false ++script: ++ - cargo build --verbose ++ - cargo test --verbose ++ - cargo doc ++after_success: | ++ [ $TRAVIS_BRANCH = master ] && ++ [ $TRAVIS_PULL_REQUEST = false ] && ++ [ $TRAVIS_RUST_VERSION = nightly ] && ++ echo '' > target/doc/index.html && ++ pip install ghp-import --user $USER && ++ $HOME/.local/bin/ghp-import -n target/doc && ++ git push -qf https://${TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages ++env: ++ global: ++ secure: d+l63TtlF6cfFVDGauYRexgx4lBww4ORqqK4Vt75nWbiCbjZYsKXbcTUdhAr193nIVGiNW50A8SekM01F3EngHwHwr6u5kFleOggm+HA0kkBVeX+k2A4WCVVfYI+gth+zk99WaF8h46MA0evhx6FYDoqeyl9oqmVifI4kaqhMwc= ++notifications: ++ email: ++ on_success: never diff --cc vendor/bitflags-0.7.0/Cargo.toml index 000000000,000000000..042497e9c new file mode 100644 --- /dev/null +++ b/vendor/bitflags-0.7.0/Cargo.toml @@@ -1,0 -1,0 +1,13 @@@ ++[package] ++ ++name = "bitflags" ++version = "0.7.0" ++authors = ["The Rust Project Developers"] ++license = "MIT/Apache-2.0" ++readme = "README.md" ++repository = "https://github.com/rust-lang/bitflags" ++homepage = "https://github.com/rust-lang/bitflags" ++documentation = "https://doc.rust-lang.org/bitflags" ++description = """ ++A macro to generate structures which behave like bitflags. ++""" diff --cc vendor/bitflags-0.7.0/LICENSE-APACHE index 000000000,000000000..16fe87b06 new file mode 100644 --- /dev/null +++ b/vendor/bitflags-0.7.0/LICENSE-APACHE @@@ -1,0 -1,0 +1,201 @@@ ++ Apache License ++ Version 2.0, January 2004 ++ http://www.apache.org/licenses/ ++ ++TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION ++ ++1. Definitions. ++ ++ "License" shall mean the terms and conditions for use, reproduction, ++ and distribution as defined by Sections 1 through 9 of this document. ++ ++ "Licensor" shall mean the copyright owner or entity authorized by ++ the copyright owner that is granting the License. ++ ++ "Legal Entity" shall mean the union of the acting entity and all ++ other entities that control, are controlled by, or are under common ++ control with that entity. For the purposes of this definition, ++ "control" means (i) the power, direct or indirect, to cause the ++ direction or management of such entity, whether by contract or ++ otherwise, or (ii) ownership of fifty percent (50%) or more of the ++ outstanding shares, or (iii) beneficial ownership of such entity. ++ ++ "You" (or "Your") shall mean an individual or Legal Entity ++ exercising permissions granted by this License. ++ ++ "Source" form shall mean the preferred form for making modifications, ++ including but not limited to software source code, documentation ++ source, and configuration files. ++ ++ "Object" form shall mean any form resulting from mechanical ++ transformation or translation of a Source form, including but ++ not limited to compiled object code, generated documentation, ++ and conversions to other media types. ++ ++ "Work" shall mean the work of authorship, whether in Source or ++ Object form, made available under the License, as indicated by a ++ copyright notice that is included in or attached to the work ++ (an example is provided in the Appendix below). ++ ++ "Derivative Works" shall mean any work, whether in Source or Object ++ form, that is based on (or derived from) the Work and for which the ++ editorial revisions, annotations, elaborations, or other modifications ++ represent, as a whole, an original work of authorship. For the purposes ++ of this License, Derivative Works shall not include works that remain ++ separable from, or merely link (or bind by name) to the interfaces of, ++ the Work and Derivative Works thereof. ++ ++ "Contribution" shall mean any work of authorship, including ++ the original version of the Work and any modifications or additions ++ to that Work or Derivative Works thereof, that is intentionally ++ submitted to Licensor for inclusion in the Work by the copyright owner ++ or by an individual or Legal Entity authorized to submit on behalf of ++ the copyright owner. For the purposes of this definition, "submitted" ++ means any form of electronic, verbal, or written communication sent ++ to the Licensor or its representatives, including but not limited to ++ communication on electronic mailing lists, source code control systems, ++ and issue tracking systems that are managed by, or on behalf of, the ++ Licensor for the purpose of discussing and improving the Work, but ++ excluding communication that is conspicuously marked or otherwise ++ designated in writing by the copyright owner as "Not a Contribution." ++ ++ "Contributor" shall mean Licensor and any individual or Legal Entity ++ on behalf of whom a Contribution has been received by Licensor and ++ subsequently incorporated within the Work. ++ ++2. Grant of Copyright License. Subject to the terms and conditions of ++ this License, each Contributor hereby grants to You a perpetual, ++ worldwide, non-exclusive, no-charge, royalty-free, irrevocable ++ copyright license to reproduce, prepare Derivative Works of, ++ publicly display, publicly perform, sublicense, and distribute the ++ Work and such Derivative Works in Source or Object form. ++ ++3. Grant of Patent License. Subject to the terms and conditions of ++ this License, each Contributor hereby grants to You a perpetual, ++ worldwide, non-exclusive, no-charge, royalty-free, irrevocable ++ (except as stated in this section) patent license to make, have made, ++ use, offer to sell, sell, import, and otherwise transfer the Work, ++ where such license applies only to those patent claims licensable ++ by such Contributor that are necessarily infringed by their ++ Contribution(s) alone or by combination of their Contribution(s) ++ with the Work to which such Contribution(s) was submitted. If You ++ institute patent litigation against any entity (including a ++ cross-claim or counterclaim in a lawsuit) alleging that the Work ++ or a Contribution incorporated within the Work constitutes direct ++ or contributory patent infringement, then any patent licenses ++ granted to You under this License for that Work shall terminate ++ as of the date such litigation is filed. ++ ++4. Redistribution. You may reproduce and distribute copies of the ++ Work or Derivative Works thereof in any medium, with or without ++ modifications, and in Source or Object form, provided that You ++ meet the following conditions: ++ ++ (a) You must give any other recipients of the Work or ++ Derivative Works a copy of this License; and ++ ++ (b) You must cause any modified files to carry prominent notices ++ stating that You changed the files; and ++ ++ (c) You must retain, in the Source form of any Derivative Works ++ that You distribute, all copyright, patent, trademark, and ++ attribution notices from the Source form of the Work, ++ excluding those notices that do not pertain to any part of ++ the Derivative Works; and ++ ++ (d) If the Work includes a "NOTICE" text file as part of its ++ distribution, then any Derivative Works that You distribute must ++ include a readable copy of the attribution notices contained ++ within such NOTICE file, excluding those notices that do not ++ pertain to any part of the Derivative Works, in at least one ++ of the following places: within a NOTICE text file distributed ++ as part of the Derivative Works; within the Source form or ++ documentation, if provided along with the Derivative Works; or, ++ within a display generated by the Derivative Works, if and ++ wherever such third-party notices normally appear. The contents ++ of the NOTICE file are for informational purposes only and ++ do not modify the License. You may add Your own attribution ++ notices within Derivative Works that You distribute, alongside ++ or as an addendum to the NOTICE text from the Work, provided ++ that such additional attribution notices cannot be construed ++ as modifying the License. ++ ++ You may add Your own copyright statement to Your modifications and ++ may provide additional or different license terms and conditions ++ for use, reproduction, or distribution of Your modifications, or ++ for any such Derivative Works as a whole, provided Your use, ++ reproduction, and distribution of the Work otherwise complies with ++ the conditions stated in this License. ++ ++5. Submission of Contributions. Unless You explicitly state otherwise, ++ any Contribution intentionally submitted for inclusion in the Work ++ by You to the Licensor shall be under the terms and conditions of ++ this License, without any additional terms or conditions. ++ Notwithstanding the above, nothing herein shall supersede or modify ++ the terms of any separate license agreement you may have executed ++ with Licensor regarding such Contributions. ++ ++6. Trademarks. This License does not grant permission to use the trade ++ names, trademarks, service marks, or product names of the Licensor, ++ except as required for reasonable and customary use in describing the ++ origin of the Work and reproducing the content of the NOTICE file. ++ ++7. Disclaimer of Warranty. Unless required by applicable law or ++ agreed to in writing, Licensor provides the Work (and each ++ Contributor provides its Contributions) on an "AS IS" BASIS, ++ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or ++ implied, including, without limitation, any warranties or conditions ++ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A ++ PARTICULAR PURPOSE. You are solely responsible for determining the ++ appropriateness of using or redistributing the Work and assume any ++ risks associated with Your exercise of permissions under this License. ++ ++8. Limitation of Liability. In no event and under no legal theory, ++ whether in tort (including negligence), contract, or otherwise, ++ unless required by applicable law (such as deliberate and grossly ++ negligent acts) or agreed to in writing, shall any Contributor be ++ liable to You for damages, including any direct, indirect, special, ++ incidental, or consequential damages of any character arising as a ++ result of this License or out of the use or inability to use the ++ Work (including but not limited to damages for loss of goodwill, ++ work stoppage, computer failure or malfunction, or any and all ++ other commercial damages or losses), even if such Contributor ++ has been advised of the possibility of such damages. ++ ++9. Accepting Warranty or Additional Liability. While redistributing ++ the Work or Derivative Works thereof, You may choose to offer, ++ and charge a fee for, acceptance of support, warranty, indemnity, ++ or other liability obligations and/or rights consistent with this ++ License. However, in accepting such obligations, You may act only ++ on Your own behalf and on Your sole responsibility, not on behalf ++ of any other Contributor, and only if You agree to indemnify, ++ defend, and hold each Contributor harmless for any liability ++ incurred by, or claims asserted against, such Contributor by reason ++ of your accepting any such warranty or additional liability. ++ ++END OF TERMS AND CONDITIONS ++ ++APPENDIX: How to apply the Apache License to your work. ++ ++ To apply the Apache License to your work, attach the following ++ boilerplate notice, with the fields enclosed by brackets "[]" ++ replaced with your own identifying information. (Don't include ++ the brackets!) The text should be enclosed in the appropriate ++ comment syntax for the file format. We also recommend that a ++ file or class name and description of purpose be included on the ++ same "printed page" as the copyright notice for easier ++ identification within third-party archives. ++ ++Copyright [yyyy] [name of copyright owner] ++ ++Licensed under the Apache License, Version 2.0 (the "License"); ++you may not use this file except in compliance with the License. ++You may obtain a copy of the License at ++ ++ http://www.apache.org/licenses/LICENSE-2.0 ++ ++Unless required by applicable law or agreed to in writing, software ++distributed under the License is distributed on an "AS IS" BASIS, ++WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++See the License for the specific language governing permissions and ++limitations under the License. diff --cc vendor/bitflags-0.7.0/LICENSE-MIT index 000000000,000000000..39d4bdb5a new file mode 100644 --- /dev/null +++ b/vendor/bitflags-0.7.0/LICENSE-MIT @@@ -1,0 -1,0 +1,25 @@@ ++Copyright (c) 2014 The Rust Project Developers ++ ++Permission is hereby granted, free of charge, to any ++person obtaining a copy of this software and associated ++documentation files (the "Software"), to deal in the ++Software without restriction, including without ++limitation the rights to use, copy, modify, merge, ++publish, distribute, sublicense, and/or sell copies of ++the Software, and to permit persons to whom the Software ++is furnished to do so, subject to the following ++conditions: ++ ++The above copyright notice and this permission notice ++shall be included in all copies or substantial portions ++of the Software. ++ ++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ++ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED ++TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A ++PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT ++SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY ++CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION ++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR ++IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER ++DEALINGS IN THE SOFTWARE. diff --cc vendor/bitflags-0.7.0/README.md index 000000000,000000000..3edd8a361 new file mode 100644 --- /dev/null +++ b/vendor/bitflags-0.7.0/README.md @@@ -1,0 -1,0 +1,24 @@@ ++bitflags ++======== ++ ++A Rust macro to generate structures which behave like a set of bitflags ++ ++[![Build Status](https://travis-ci.org/rust-lang-nursery/bitflags.svg?branch=master)](https://travis-ci.org/rust-lang-nursery/bitflags) ++ ++[Documentation](https://doc.rust-lang.org/bitflags) ++ ++## Usage ++ ++Add this to your `Cargo.toml`: ++ ++```toml ++[dependencies] ++bitflags = "0.6" ++``` ++ ++and this to your crate root: ++ ++```rust ++#[macro_use] ++extern crate bitflags; ++``` diff --cc vendor/bitflags-0.7.0/src/lib.rs index 000000000,000000000..698799dab new file mode 100644 --- /dev/null +++ b/vendor/bitflags-0.7.0/src/lib.rs @@@ -1,0 -1,0 +1,808 @@@ ++// Copyright 2014 The Rust Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution and at ++// http://rust-lang.org/COPYRIGHT. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++//! A typesafe bitmask flag generator. ++ ++#![no_std] ++ ++#[cfg(test)] ++#[macro_use] ++extern crate std; ++ ++// Re-export libstd/libcore using an alias so that the macros can work in no_std ++// crates while remaining compatible with normal crates. ++#[allow(private_in_public)] ++#[doc(hidden)] ++pub use core as __core; ++ ++/// The `bitflags!` macro generates a `struct` that holds a set of C-style ++/// bitmask flags. It is useful for creating typesafe wrappers for C APIs. ++/// ++/// The flags should only be defined for integer types, otherwise unexpected ++/// type errors may occur at compile time. ++/// ++/// # Example ++/// ++/// ```{.rust} ++/// #[macro_use] ++/// extern crate bitflags; ++/// ++/// bitflags! { ++/// flags Flags: u32 { ++/// const FLAG_A = 0b00000001, ++/// const FLAG_B = 0b00000010, ++/// const FLAG_C = 0b00000100, ++/// const FLAG_ABC = FLAG_A.bits ++/// | FLAG_B.bits ++/// | FLAG_C.bits, ++/// } ++/// } ++/// ++/// fn main() { ++/// let e1 = FLAG_A | FLAG_C; ++/// let e2 = FLAG_B | FLAG_C; ++/// assert_eq!((e1 | e2), FLAG_ABC); // union ++/// assert_eq!((e1 & e2), FLAG_C); // intersection ++/// assert_eq!((e1 - e2), FLAG_A); // set difference ++/// assert_eq!(!e2, FLAG_A); // set complement ++/// } ++/// ``` ++/// ++/// The generated `struct`s can also be extended with type and trait ++/// implementations: ++/// ++/// ```{.rust} ++/// #[macro_use] ++/// extern crate bitflags; ++/// ++/// use std::fmt; ++/// ++/// bitflags! { ++/// flags Flags: u32 { ++/// const FLAG_A = 0b00000001, ++/// const FLAG_B = 0b00000010, ++/// } ++/// } ++/// ++/// impl Flags { ++/// pub fn clear(&mut self) { ++/// self.bits = 0; // The `bits` field can be accessed from within the ++/// // same module where the `bitflags!` macro was invoked. ++/// } ++/// } ++/// ++/// impl fmt::Display for Flags { ++/// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++/// write!(f, "hi!") ++/// } ++/// } ++/// ++/// fn main() { ++/// let mut flags = FLAG_A | FLAG_B; ++/// flags.clear(); ++/// assert!(flags.is_empty()); ++/// assert_eq!(format!("{}", flags), "hi!"); ++/// assert_eq!(format!("{:?}", FLAG_A | FLAG_B), "FLAG_A | FLAG_B"); ++/// assert_eq!(format!("{:?}", FLAG_B), "FLAG_B"); ++/// } ++/// ``` ++/// ++/// # Visibility ++/// ++/// The generated struct and its associated flag constants are not exported ++/// out of the current module by default. A definition can be exported out of ++/// the current module by adding `pub` before `flags`: ++/// ++/// ```{.rust},ignore ++/// #[macro_use] ++/// extern crate bitflags; ++/// ++/// mod example { ++/// bitflags! { ++/// pub flags Flags1: u32 { ++/// const FLAG_A = 0b00000001, ++/// } ++/// } ++/// bitflags! { ++/// flags Flags2: u32 { ++/// const FLAG_B = 0b00000010, ++/// } ++/// } ++/// } ++/// ++/// fn main() { ++/// let flag1 = example::FLAG_A; ++/// let flag2 = example::FLAG_B; // error: const `FLAG_B` is private ++/// } ++/// ``` ++/// ++/// # Attributes ++/// ++/// Attributes can be attached to the generated `struct` by placing them ++/// before the `flags` keyword. ++/// ++/// # Trait implementations ++/// ++/// The `Copy`, `Clone`, `PartialEq`, `Eq`, `PartialOrd`, `Ord` and `Hash` ++/// traits automatically derived for the `struct` using the `derive` attribute. ++/// Additional traits can be derived by providing an explicit `derive` ++/// attribute on `flags`. ++/// ++/// The `Extend` and `FromIterator` traits are implemented for the `struct`, ++/// too: `Extend` adds the union of the instances of the `struct` iterated over, ++/// while `FromIterator` calculates the union. ++/// ++/// The `Debug` trait is also implemented by displaying the bits value of the ++/// internal struct. ++/// ++/// ## Operators ++/// ++/// The following operator traits are implemented for the generated `struct`: ++/// ++/// - `BitOr` and `BitOrAssign`: union ++/// - `BitAnd` and `BitAndAssign`: intersection ++/// - `BitXor` and `BitXorAssign`: toggle ++/// - `Sub` and `SubAssign`: set difference ++/// - `Not`: set complement ++/// ++/// As long as the assignment operators are unstable rust feature they are only ++/// available with the crate feature `assignment_ops` enabled. ++/// ++/// # Methods ++/// ++/// The following methods are defined for the generated `struct`: ++/// ++/// - `empty`: an empty set of flags ++/// - `all`: the set of all flags ++/// - `bits`: the raw value of the flags currently stored ++/// - `from_bits`: convert from underlying bit representation, unless that ++/// representation contains bits that do not correspond to a flag ++/// - `from_bits_truncate`: convert from underlying bit representation, dropping ++/// any bits that do not correspond to flags ++/// - `is_empty`: `true` if no flags are currently stored ++/// - `is_all`: `true` if all flags are currently set ++/// - `intersects`: `true` if there are flags common to both `self` and `other` ++/// - `contains`: `true` all of the flags in `other` are contained within `self` ++/// - `insert`: inserts the specified flags in-place ++/// - `remove`: removes the specified flags in-place ++/// - `toggle`: the specified flags will be inserted if not present, and removed ++/// if they are. ++#[macro_export] ++macro_rules! bitflags { ++ ($(#[$attr:meta])* pub flags $BitFlags:ident: $T:ty { ++ $($(#[$Flag_attr:meta])* const $Flag:ident = $value:expr),+ ++ }) => { ++ #[derive(Copy, PartialEq, Eq, Clone, PartialOrd, Ord, Hash)] ++ $(#[$attr])* ++ pub struct $BitFlags { ++ bits: $T, ++ } ++ ++ $($(#[$Flag_attr])* pub const $Flag: $BitFlags = $BitFlags { bits: $value };)+ ++ ++ bitflags! { ++ @_impl flags $BitFlags: $T { ++ $($(#[$Flag_attr])* const $Flag = $value),+ ++ } ++ } ++ }; ++ ($(#[$attr:meta])* flags $BitFlags:ident: $T:ty { ++ $($(#[$Flag_attr:meta])* const $Flag:ident = $value:expr),+ ++ }) => { ++ #[derive(Copy, PartialEq, Eq, Clone, PartialOrd, Ord, Hash)] ++ $(#[$attr])* ++ struct $BitFlags { ++ bits: $T, ++ } ++ ++ $($(#[$Flag_attr])* const $Flag: $BitFlags = $BitFlags { bits: $value };)+ ++ ++ bitflags! { ++ @_impl flags $BitFlags: $T { ++ $($(#[$Flag_attr])* const $Flag = $value),+ ++ } ++ } ++ }; ++ (@_impl flags $BitFlags:ident: $T:ty { ++ $($(#[$Flag_attr:meta])* const $Flag:ident = $value:expr),+ ++ }) => { ++ impl $crate::__core::fmt::Debug for $BitFlags { ++ fn fmt(&self, f: &mut $crate::__core::fmt::Formatter) -> $crate::__core::fmt::Result { ++ // This convoluted approach is to handle #[cfg]-based flag ++ // omission correctly. Some of the $Flag variants may not be ++ // defined in this module so we create an inner module which ++ // defines *all* flags to the value of 0. We then create a ++ // second inner module that defines all of the flags with #[cfg] ++ // to their real values. Afterwards the glob will import ++ // variants from the second inner module, shadowing all ++ // defined variants, leaving only the undefined ones with the ++ // bit value of 0. ++ #[allow(dead_code)] ++ #[allow(unused_assignments)] ++ mod dummy { ++ // We can't use the real $BitFlags struct because it may be ++ // private, which prevents us from using it to define ++ // public constants. ++ pub struct $BitFlags { ++ bits: u64, ++ } ++ mod real_flags { ++ use super::$BitFlags; ++ $($(#[$Flag_attr])* pub const $Flag: $BitFlags = $BitFlags { ++ bits: super::super::$Flag.bits as u64 ++ };)+ ++ } ++ // Now we define the "undefined" versions of the flags. ++ // This way, all the names exist, even if some are #[cfg]ed ++ // out. ++ $(const $Flag: $BitFlags = $BitFlags { bits: 0 };)+ ++ ++ #[inline] ++ pub fn fmt(self_: u64, ++ f: &mut $crate::__core::fmt::Formatter) ++ -> $crate::__core::fmt::Result { ++ // Now we import the real values for the flags. ++ // Only ones that are #[cfg]ed out will be 0. ++ use self::real_flags::*; ++ ++ let mut first = true; ++ $( ++ // $Flag.bits == 0 means that $Flag doesn't exist ++ if $Flag.bits != 0 && self_ & $Flag.bits as u64 == $Flag.bits as u64 { ++ if !first { ++ try!(f.write_str(" | ")); ++ } ++ first = false; ++ try!(f.write_str(stringify!($Flag))); ++ } ++ )+ ++ Ok(()) ++ } ++ } ++ dummy::fmt(self.bits as u64, f) ++ } ++ } ++ ++ #[allow(dead_code)] ++ impl $BitFlags { ++ /// Returns an empty set of flags. ++ #[inline] ++ pub fn empty() -> $BitFlags { ++ $BitFlags { bits: 0 } ++ } ++ ++ /// Returns the set containing all flags. ++ #[inline] ++ pub fn all() -> $BitFlags { ++ // See above `dummy` module for why this approach is taken. ++ #[allow(dead_code)] ++ mod dummy { ++ pub struct $BitFlags { ++ bits: u64, ++ } ++ mod real_flags { ++ use super::$BitFlags; ++ $($(#[$Flag_attr])* pub const $Flag: $BitFlags = $BitFlags { ++ bits: super::super::$Flag.bits as u64 ++ };)+ ++ } ++ $(const $Flag: $BitFlags = $BitFlags { bits: 0 };)+ ++ ++ #[inline] ++ pub fn all() -> u64 { ++ use self::real_flags::*; ++ $($Flag.bits)|+ ++ } ++ } ++ $BitFlags { bits: dummy::all() as $T } ++ } ++ ++ /// Returns the raw value of the flags currently stored. ++ #[inline] ++ pub fn bits(&self) -> $T { ++ self.bits ++ } ++ ++ /// Convert from underlying bit representation, unless that ++ /// representation contains bits that do not correspond to a flag. ++ #[inline] ++ pub fn from_bits(bits: $T) -> $crate::__core::option::Option<$BitFlags> { ++ if (bits & !$BitFlags::all().bits()) == 0 { ++ $crate::__core::option::Option::Some($BitFlags { bits: bits }) ++ } else { ++ $crate::__core::option::Option::None ++ } ++ } ++ ++ /// Convert from underlying bit representation, dropping any bits ++ /// that do not correspond to flags. ++ #[inline] ++ pub fn from_bits_truncate(bits: $T) -> $BitFlags { ++ $BitFlags { bits: bits } & $BitFlags::all() ++ } ++ ++ /// Returns `true` if no flags are currently stored. ++ #[inline] ++ pub fn is_empty(&self) -> bool { ++ *self == $BitFlags::empty() ++ } ++ ++ /// Returns `true` if all flags are currently set. ++ #[inline] ++ pub fn is_all(&self) -> bool { ++ *self == $BitFlags::all() ++ } ++ ++ /// Returns `true` if there are flags common to both `self` and `other`. ++ #[inline] ++ pub fn intersects(&self, other: $BitFlags) -> bool { ++ !(*self & other).is_empty() ++ } ++ ++ /// Returns `true` all of the flags in `other` are contained within `self`. ++ #[inline] ++ pub fn contains(&self, other: $BitFlags) -> bool { ++ (*self & other) == other ++ } ++ ++ /// Inserts the specified flags in-place. ++ #[inline] ++ pub fn insert(&mut self, other: $BitFlags) { ++ self.bits |= other.bits; ++ } ++ ++ /// Removes the specified flags in-place. ++ #[inline] ++ pub fn remove(&mut self, other: $BitFlags) { ++ self.bits &= !other.bits; ++ } ++ ++ /// Toggles the specified flags in-place. ++ #[inline] ++ pub fn toggle(&mut self, other: $BitFlags) { ++ self.bits ^= other.bits; ++ } ++ } ++ ++ impl $crate::__core::ops::BitOr for $BitFlags { ++ type Output = $BitFlags; ++ ++ /// Returns the union of the two sets of flags. ++ #[inline] ++ fn bitor(self, other: $BitFlags) -> $BitFlags { ++ $BitFlags { bits: self.bits | other.bits } ++ } ++ } ++ ++ impl $crate::__core::ops::BitOrAssign for $BitFlags { ++ ++ /// Adds the set of flags. ++ #[inline] ++ fn bitor_assign(&mut self, other: $BitFlags) { ++ self.bits |= other.bits; ++ } ++ } ++ ++ impl $crate::__core::ops::BitXor for $BitFlags { ++ type Output = $BitFlags; ++ ++ /// Returns the left flags, but with all the right flags toggled. ++ #[inline] ++ fn bitxor(self, other: $BitFlags) -> $BitFlags { ++ $BitFlags { bits: self.bits ^ other.bits } ++ } ++ } ++ ++ impl $crate::__core::ops::BitXorAssign for $BitFlags { ++ ++ /// Toggles the set of flags. ++ #[inline] ++ fn bitxor_assign(&mut self, other: $BitFlags) { ++ self.bits ^= other.bits; ++ } ++ } ++ ++ impl $crate::__core::ops::BitAnd for $BitFlags { ++ type Output = $BitFlags; ++ ++ /// Returns the intersection between the two sets of flags. ++ #[inline] ++ fn bitand(self, other: $BitFlags) -> $BitFlags { ++ $BitFlags { bits: self.bits & other.bits } ++ } ++ } ++ ++ impl $crate::__core::ops::BitAndAssign for $BitFlags { ++ ++ /// Disables all flags disabled in the set. ++ #[inline] ++ fn bitand_assign(&mut self, other: $BitFlags) { ++ self.bits &= other.bits; ++ } ++ } ++ ++ impl $crate::__core::ops::Sub for $BitFlags { ++ type Output = $BitFlags; ++ ++ /// Returns the set difference of the two sets of flags. ++ #[inline] ++ fn sub(self, other: $BitFlags) -> $BitFlags { ++ $BitFlags { bits: self.bits & !other.bits } ++ } ++ } ++ ++ impl $crate::__core::ops::SubAssign for $BitFlags { ++ ++ /// Disables all flags enabled in the set. ++ #[inline] ++ fn sub_assign(&mut self, other: $BitFlags) { ++ self.bits &= !other.bits; ++ } ++ } ++ ++ impl $crate::__core::ops::Not for $BitFlags { ++ type Output = $BitFlags; ++ ++ /// Returns the complement of this set of flags. ++ #[inline] ++ fn not(self) -> $BitFlags { ++ $BitFlags { bits: !self.bits } & $BitFlags::all() ++ } ++ } ++ ++ impl $crate::__core::iter::Extend<$BitFlags> for $BitFlags { ++ fn extend>(&mut self, iterator: T) { ++ for item in iterator { ++ self.insert(item) ++ } ++ } ++ } ++ ++ impl $crate::__core::iter::FromIterator<$BitFlags> for $BitFlags { ++ fn from_iter>(iterator: T) -> $BitFlags { ++ let mut result = Self::empty(); ++ result.extend(iterator); ++ result ++ } ++ } ++ }; ++ ($(#[$attr:meta])* pub flags $BitFlags:ident: $T:ty { ++ $($(#[$Flag_attr:meta])* const $Flag:ident = $value:expr),+, ++ }) => { ++ bitflags! { ++ $(#[$attr])* ++ pub flags $BitFlags: $T { ++ $($(#[$Flag_attr])* const $Flag = $value),+ ++ } ++ } ++ }; ++ ($(#[$attr:meta])* flags $BitFlags:ident: $T:ty { ++ $($(#[$Flag_attr:meta])* const $Flag:ident = $value:expr),+, ++ }) => { ++ bitflags! { ++ $(#[$attr])* ++ flags $BitFlags: $T { ++ $($(#[$Flag_attr])* const $Flag = $value),+ ++ } ++ } ++ }; ++} ++ ++#[cfg(test)] ++#[allow(non_upper_case_globals, dead_code)] ++mod tests { ++ use std::hash::{SipHasher, Hash, Hasher}; ++ ++ bitflags! { ++ #[doc = "> The first principle is that you must not fool yourself — and"] ++ #[doc = "> you are the easiest person to fool."] ++ #[doc = "> "] ++ #[doc = "> - Richard Feynman"] ++ flags Flags: u32 { ++ const FlagA = 0b00000001, ++ #[doc = " macros are way better at generating code than trans is"] ++ const FlagB = 0b00000010, ++ const FlagC = 0b00000100, ++ #[doc = "* cmr bed"] ++ #[doc = "* strcat table"] ++ #[doc = " wait what?"] ++ const FlagABC = FlagA.bits ++ | FlagB.bits ++ | FlagC.bits, ++ } ++ } ++ ++ bitflags! { ++ flags _CfgFlags: u32 { ++ #[cfg(windows)] ++ const _CfgA = 0b01, ++ #[cfg(unix)] ++ const _CfgB = 0b01, ++ #[cfg(windows)] ++ const _CfgC = _CfgA.bits | 0b10, ++ } ++ } ++ ++ bitflags! { ++ flags AnotherSetOfFlags: i8 { ++ const AnotherFlag = -1_i8, ++ } ++ } ++ ++ #[test] ++ fn test_bits(){ ++ assert_eq!(Flags::empty().bits(), 0b00000000); ++ assert_eq!(FlagA.bits(), 0b00000001); ++ assert_eq!(FlagABC.bits(), 0b00000111); ++ ++ assert_eq!(AnotherSetOfFlags::empty().bits(), 0b00); ++ assert_eq!(AnotherFlag.bits(), !0_i8); ++ } ++ ++ #[test] ++ fn test_from_bits() { ++ assert_eq!(Flags::from_bits(0), Some(Flags::empty())); ++ assert_eq!(Flags::from_bits(0b1), Some(FlagA)); ++ assert_eq!(Flags::from_bits(0b10), Some(FlagB)); ++ assert_eq!(Flags::from_bits(0b11), Some(FlagA | FlagB)); ++ assert_eq!(Flags::from_bits(0b1000), None); ++ ++ assert_eq!(AnotherSetOfFlags::from_bits(!0_i8), Some(AnotherFlag)); ++ } ++ ++ #[test] ++ fn test_from_bits_truncate() { ++ assert_eq!(Flags::from_bits_truncate(0), Flags::empty()); ++ assert_eq!(Flags::from_bits_truncate(0b1), FlagA); ++ assert_eq!(Flags::from_bits_truncate(0b10), FlagB); ++ assert_eq!(Flags::from_bits_truncate(0b11), (FlagA | FlagB)); ++ assert_eq!(Flags::from_bits_truncate(0b1000), Flags::empty()); ++ assert_eq!(Flags::from_bits_truncate(0b1001), FlagA); ++ ++ assert_eq!(AnotherSetOfFlags::from_bits_truncate(0_i8), AnotherSetOfFlags::empty()); ++ } ++ ++ #[test] ++ fn test_is_empty(){ ++ assert!(Flags::empty().is_empty()); ++ assert!(!FlagA.is_empty()); ++ assert!(!FlagABC.is_empty()); ++ ++ assert!(!AnotherFlag.is_empty()); ++ } ++ ++ #[test] ++ fn test_is_all() { ++ assert!(Flags::all().is_all()); ++ assert!(!FlagA.is_all()); ++ assert!(FlagABC.is_all()); ++ ++ assert!(AnotherFlag.is_all()); ++ } ++ ++ #[test] ++ fn test_two_empties_do_not_intersect() { ++ let e1 = Flags::empty(); ++ let e2 = Flags::empty(); ++ assert!(!e1.intersects(e2)); ++ ++ assert!(AnotherFlag.intersects(AnotherFlag)); ++ } ++ ++ #[test] ++ fn test_empty_does_not_intersect_with_full() { ++ let e1 = Flags::empty(); ++ let e2 = FlagABC; ++ assert!(!e1.intersects(e2)); ++ } ++ ++ #[test] ++ fn test_disjoint_intersects() { ++ let e1 = FlagA; ++ let e2 = FlagB; ++ assert!(!e1.intersects(e2)); ++ } ++ ++ #[test] ++ fn test_overlapping_intersects() { ++ let e1 = FlagA; ++ let e2 = FlagA | FlagB; ++ assert!(e1.intersects(e2)); ++ } ++ ++ #[test] ++ fn test_contains() { ++ let e1 = FlagA; ++ let e2 = FlagA | FlagB; ++ assert!(!e1.contains(e2)); ++ assert!(e2.contains(e1)); ++ assert!(FlagABC.contains(e2)); ++ ++ assert!(AnotherFlag.contains(AnotherFlag)); ++ } ++ ++ #[test] ++ fn test_insert(){ ++ let mut e1 = FlagA; ++ let e2 = FlagA | FlagB; ++ e1.insert(e2); ++ assert_eq!(e1, e2); ++ ++ let mut e3 = AnotherSetOfFlags::empty(); ++ e3.insert(AnotherFlag); ++ assert_eq!(e3, AnotherFlag); ++ } ++ ++ #[test] ++ fn test_remove(){ ++ let mut e1 = FlagA | FlagB; ++ let e2 = FlagA | FlagC; ++ e1.remove(e2); ++ assert_eq!(e1, FlagB); ++ ++ let mut e3 = AnotherFlag; ++ e3.remove(AnotherFlag); ++ assert_eq!(e3, AnotherSetOfFlags::empty()); ++ } ++ ++ #[test] ++ fn test_operators() { ++ let e1 = FlagA | FlagC; ++ let e2 = FlagB | FlagC; ++ assert_eq!((e1 | e2), FlagABC); // union ++ assert_eq!((e1 & e2), FlagC); // intersection ++ assert_eq!((e1 - e2), FlagA); // set difference ++ assert_eq!(!e2, FlagA); // set complement ++ assert_eq!(e1 ^ e2, FlagA | FlagB); // toggle ++ let mut e3 = e1; ++ e3.toggle(e2); ++ assert_eq!(e3, FlagA | FlagB); ++ ++ let mut m4 = AnotherSetOfFlags::empty(); ++ m4.toggle(AnotherSetOfFlags::empty()); ++ assert_eq!(m4, AnotherSetOfFlags::empty()); ++ } ++ ++ #[test] ++ fn test_assignment_operators() { ++ let mut m1 = Flags::empty(); ++ let e1 = FlagA | FlagC; ++ // union ++ m1 |= FlagA; ++ assert_eq!(m1, FlagA); ++ // intersection ++ m1 &= e1; ++ assert_eq!(m1, FlagA); ++ // set difference ++ m1 -= m1; ++ assert_eq!(m1, Flags::empty()); ++ // toggle ++ m1 ^= e1; ++ assert_eq!(m1, e1); ++ } ++ ++ #[test] ++ fn test_extend() { ++ let mut flags; ++ ++ flags = Flags::empty(); ++ flags.extend([].iter().cloned()); ++ assert_eq!(flags, Flags::empty()); ++ ++ flags = Flags::empty(); ++ flags.extend([FlagA, FlagB].iter().cloned()); ++ assert_eq!(flags, FlagA | FlagB); ++ ++ flags = FlagA; ++ flags.extend([FlagA, FlagB].iter().cloned()); ++ assert_eq!(flags, FlagA | FlagB); ++ ++ flags = FlagB; ++ flags.extend([FlagA, FlagABC].iter().cloned()); ++ assert_eq!(flags, FlagABC); ++ } ++ ++ #[test] ++ fn test_from_iterator() { ++ assert_eq!([].iter().cloned().collect::(), Flags::empty()); ++ assert_eq!([FlagA, FlagB].iter().cloned().collect::(), FlagA | FlagB); ++ assert_eq!([FlagA, FlagABC].iter().cloned().collect::(), FlagABC); ++ } ++ ++ #[test] ++ fn test_lt() { ++ let mut a = Flags::empty(); ++ let mut b = Flags::empty(); ++ ++ assert!(!(a < b) && !(b < a)); ++ b = FlagB; ++ assert!(a < b); ++ a = FlagC; ++ assert!(!(a < b) && b < a); ++ b = FlagC | FlagB; ++ assert!(a < b); ++ } ++ ++ #[test] ++ fn test_ord() { ++ let mut a = Flags::empty(); ++ let mut b = Flags::empty(); ++ ++ assert!(a <= b && a >= b); ++ a = FlagA; ++ assert!(a > b && a >= b); ++ assert!(b < a && b <= a); ++ b = FlagB; ++ assert!(b > a && b >= a); ++ assert!(a < b && a <= b); ++ } ++ ++ fn hash(t: &T) -> u64 { ++ let mut s = SipHasher::new_with_keys(0, 0); ++ t.hash(&mut s); ++ s.finish() ++ } ++ ++ #[test] ++ fn test_hash() { ++ let mut x = Flags::empty(); ++ let mut y = Flags::empty(); ++ assert_eq!(hash(&x), hash(&y)); ++ x = Flags::all(); ++ y = FlagABC; ++ assert_eq!(hash(&x), hash(&y)); ++ } ++ ++ #[test] ++ fn test_debug() { ++ assert_eq!(format!("{:?}", FlagA | FlagB), "FlagA | FlagB"); ++ assert_eq!(format!("{:?}", FlagABC), "FlagA | FlagB | FlagC | FlagABC"); ++ } ++ ++ mod submodule { ++ bitflags! { ++ pub flags PublicFlags: i8 { ++ const FlagX = 0, ++ } ++ } ++ bitflags! { ++ flags PrivateFlags: i8 { ++ const FlagY = 0, ++ } ++ } ++ ++ #[test] ++ fn test_private() { ++ let _ = FlagY; ++ } ++ } ++ ++ #[test] ++ fn test_public() { ++ let _ = submodule::FlagX; ++ } ++ ++ mod t1 { ++ mod foo { ++ pub type Bar = i32; ++ } ++ ++ bitflags! { ++ /// baz ++ flags Flags: foo::Bar { ++ const A = 0b00000001, ++ #[cfg(foo)] ++ const B = 0b00000010, ++ #[cfg(foo)] ++ const C = 0b00000010, ++ } ++ } ++ } ++} diff --cc vendor/bitflags-0.7.0/tests/external.rs index 000000000,000000000..0f0c7f665 new file mode 100644 --- /dev/null +++ b/vendor/bitflags-0.7.0/tests/external.rs @@@ -1,0 -1,0 +1,21 @@@ ++#![allow(dead_code)] ++ ++#[macro_use] ++extern crate bitflags; ++ ++bitflags! { ++ /// baz ++ flags Flags: u32 { ++ const A = 0b00000001, ++ #[doc = "bar"] ++ const B = 0b00000010, ++ const C = 0b00000100, ++ #[doc = "foo"] ++ const ABC = A.bits | B.bits | C.bits, ++ } ++} ++ ++#[test] ++fn smoke() { ++ assert_eq!(ABC, A | B | C); ++} diff --cc vendor/bitflags-0.7.0/tests/external_no_std.rs index 000000000,000000000..46526fd71 new file mode 100644 --- /dev/null +++ b/vendor/bitflags-0.7.0/tests/external_no_std.rs @@@ -1,0 -1,0 +1,22 @@@ ++#![allow(dead_code)] ++#![no_std] ++ ++#[macro_use] ++extern crate bitflags; ++ ++bitflags! { ++ /// baz ++ flags Flags: u32 { ++ const A = 0b00000001, ++ #[doc = "bar"] ++ const B = 0b00000010, ++ const C = 0b00000100, ++ #[doc = "foo"] ++ const ABC = A.bits | B.bits | C.bits, ++ } ++} ++ ++#[test] ++fn smoke() { ++ assert_eq!(ABC, A | B | C); ++} diff --cc vendor/conv-0.3.3/.cargo-checksum.json index 000000000,000000000..d87627ea8 new file mode 100644 --- /dev/null +++ b/vendor/conv-0.3.3/.cargo-checksum.json @@@ -1,0 -1,0 +1,1 @@@ ++{"files":{},"package":"78ff10625fd0ac447827aa30ea8b861fead473bb60aeb73af6c1c58caf0d1299"} diff --cc vendor/conv-0.3.3/.cargo-ok index 000000000,000000000..e69de29bb new file mode 100644 --- /dev/null +++ b/vendor/conv-0.3.3/.cargo-ok diff --cc vendor/conv-0.3.3/.travis.yml index 000000000,000000000..067ec09c7 new file mode 100644 --- /dev/null +++ b/vendor/conv-0.3.3/.travis.yml @@@ -1,0 -1,0 +1,17 @@@ ++language: rust ++script: cargo build --verbose && cargo test --verbose ++rust: ++ - 1.2.0 ++ - 1.3.0 ++ - 1.4.0 ++ - 1.5.0 ++ - 1.6.0 ++ - stable ++ - beta ++ - nightly ++matrix: ++ allow_failures: ++ - rust: nightly ++branches: ++ except: ++ - /^issue-.*$/ diff --cc vendor/conv-0.3.3/Cargo.toml index 000000000,000000000..42f42aa0d new file mode 100644 --- /dev/null +++ b/vendor/conv-0.3.3/Cargo.toml @@@ -1,0 -1,0 +1,22 @@@ ++[package] ++name = "conv" ++version = "0.3.3" ++authors = ["Daniel Keep "] ++ ++description = "This crate provides a number of conversion traits with more specific semantics than those provided by 'as' or 'From'/'Into'." ++repository = "https://github.com/DanielKeep/rust-conv" ++documentation = "https://danielkeep.github.io/rust-conv/doc/conv/index.html" ++readme = "README.md" ++license = "MIT" ++keywords = ["from", "into", "conversion", "approximation"] ++ ++exclude = [ ++ "scripts/*", ++ "update-docs.py", ++] ++ ++[dependencies] ++custom_derive = "0.1.2" ++ ++[dev-dependencies] ++quickcheck = "0.2.21, < 0.2.25" diff --cc vendor/conv-0.3.3/LICENSE index 000000000,000000000..a2af29bc7 new file mode 100644 --- /dev/null +++ b/vendor/conv-0.3.3/LICENSE @@@ -1,0 -1,0 +1,25 @@@ ++Copyright (c) 2015 Daniel Keep ++ ++Permission is hereby granted, free of charge, to any ++person obtaining a copy of this software and associated ++documentation files (the "Software"), to deal in the ++Software without restriction, including without ++limitation the rights to use, copy, modify, merge, ++publish, distribute, sublicense, and/or sell copies of ++the Software, and to permit persons to whom the Software ++is furnished to do so, subject to the following ++conditions: ++ ++The above copyright notice and this permission notice ++shall be included in all copies or substantial portions ++of the Software. ++ ++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ++ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED ++TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A ++PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT ++SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY ++CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION ++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR ++IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER ++DEALINGS IN THE SOFTWARE. diff --cc vendor/conv-0.3.3/README.md index 000000000,000000000..85a1a287c new file mode 100644 --- /dev/null +++ b/vendor/conv-0.3.3/README.md @@@ -1,0 -1,0 +1,129 @@@ ++ ++# `conv` ++ ++This crate provides a number of conversion traits with more specific semantics than those provided by `as` or `From`/`Into`. ++ ++The goal with the traits provided here is to be more specific about what generic code can rely on, as well as provide reasonably self-describing alternatives to the standard `From`/`Into` traits. For example, the although `T: From` might be satisfied in generic code, this says nothing about what *kind* of conversion that represents. ++ ++In addition, `From`/`Into` provide no facility for a conversion failing, meaning that implementations may need to choose between conversions that may not be valid, or panicking; neither option is appealing in general. ++ ++**Links** ++ ++* [Latest Release](https://crates.io/crates/scan-rules/) ++* [Latest Docs](https://danielkeep.github.io/rust-scan-rules/doc/scan_rules/index.html) ++* [Repository](https://github.com/DanielKeep/rust-scan-rules) ++ ++## Compatibility ++ ++`conv` is compatible with Rust 1.2 and higher. ++ ++## Examples ++ ++```rust ++# extern crate conv; ++# use conv::*; ++# fn main() { ++// This *cannot* fail, so we can use `unwrap_ok` to discard the `Result`. ++assert_eq!(u8::value_from(0u8).unwrap_ok(), 0u8); ++ ++// This *can* fail. Specifically, it can overflow toward negative infinity. ++assert_eq!(u8::value_from(0i8), Ok(0u8)); ++assert_eq!(u8::value_from(-1i8), Err(NegOverflow(-1))); ++ ++// This can overflow in *either* direction; hence the change to `RangeError`. ++assert_eq!(u8::value_from(-1i16), Err(RangeError::NegOverflow(-1))); ++assert_eq!(u8::value_from(0i16), Ok(0u8)); ++assert_eq!(u8::value_from(256i16), Err(RangeError::PosOverflow(256))); ++ ++// We can use the extension traits to simplify this a little. ++assert_eq!(u8::value_from(-1i16).unwrap_or_saturate(), 0u8); ++assert_eq!(u8::value_from(0i16).unwrap_or_saturate(), 0u8); ++assert_eq!(u8::value_from(256i16).unwrap_or_saturate(), 255u8); ++ ++// Obviously, all integers can be "approximated" using the default scheme (it ++// doesn't *do* anything), but they can *also* be approximated with the ++// `Wrapping` scheme. ++assert_eq!( ++ >::approx_from(400u16), ++ Err(PosOverflow(400))); ++assert_eq!( ++ >::approx_from(400u16), ++ Ok(144u8)); ++ ++// This is rather inconvenient; as such, there are a number of convenience ++// extension methods available via `ConvUtil` and `ConvAsUtil`. ++assert_eq!(400u16.approx(), Err::(PosOverflow(400))); ++assert_eq!(400u16.approx_by::(), Ok::(144u8)); ++assert_eq!(400u16.approx_as::(), Err(PosOverflow(400))); ++assert_eq!(400u16.approx_as_by::(), Ok(144)); ++ ++// Integer -> float conversions *can* fail due to limited precision. ++// Once the continuous range of exactly representable integers is exceeded, the ++// provided implementations fail with overflow errors. ++assert_eq!(f32::value_from(16_777_216i32), Ok(16_777_216.0f32)); ++assert_eq!(f32::value_from(16_777_217i32), Err(RangeError::PosOverflow(16_777_217))); ++ ++// Float -> integer conversions have to be done using approximations. Although ++// exact conversions are *possible*, "advertising" this with an implementation ++// is misleading. ++// ++// Note that `DefaultApprox` for float -> integer uses whatever rounding ++// mode is currently active (*i.e.* whatever `as` would do). ++assert_eq!(41.0f32.approx(), Ok(41u8)); ++assert_eq!(41.3f32.approx(), Ok(41u8)); ++assert_eq!(41.5f32.approx(), Ok(41u8)); ++assert_eq!(41.8f32.approx(), Ok(41u8)); ++assert_eq!(42.0f32.approx(), Ok(42u8)); ++ ++assert_eq!(255.0f32.approx(), Ok(255u8)); ++assert_eq!(256.0f32.approx(), Err::(FloatError::PosOverflow(256.0))); ++ ++// Sometimes, it can be useful to saturate the conversion from float to ++// integer directly, then account for NaN as input separately. The `Saturate` ++// extension trait exists for this reason. ++assert_eq!((-23.0f32).approx_as::().saturate(), Ok(0)); ++assert_eq!(302.0f32.approx_as::().saturate(), Ok(255u8)); ++assert!(std::f32::NAN.approx_as::().saturate().is_err()); ++ ++// If you really don't care about the specific kind of error, you can just rely ++// on automatic conversion to `GeneralErrorKind`. ++fn too_many_errors() -> Result<(), GeneralErrorKind> { ++ assert_eq!({let r: u8 = try!(0u8.value_into()); r}, 0u8); ++ assert_eq!({let r: u8 = try!(0i8.value_into()); r}, 0u8); ++ assert_eq!({let r: u8 = try!(0i16.value_into()); r}, 0u8); ++ assert_eq!({let r: u8 = try!(0.0f32.approx()); r}, 0u8); ++ Ok(()) ++} ++# let _ = too_many_errors(); ++# } ++``` ++ ++## Change Log ++ ++### v0.3.2 ++ ++- Added integer ↔ `char` conversions. ++- Added missing `isize`/`usize` → `f32`/`f64` conversions. ++- Fixed the error type of `i64` → `usize` for 64-bit targets. ++ ++### v0.3.1 ++ ++- Change to `unwrap_ok` for better codegen (thanks bluss). ++- Fix for Rust breaking change (code in question was dodgy anyway; thanks m4rw3r). ++ ++### v0.3.0 ++ ++- Added an `Error` constraint to all `Err` associated types. This will break any user-defined conversions where the `Err` type does not implement `Error`. ++- Renamed the `Overflow` and `Underflow` errors to `PosOverflow` and `NegOverflow` respectively. In the context of floating point conversions, "underflow" usually means the value was too close to zero to correctly represent. ++ ++### v0.2.1 ++ ++- Added `ConvUtil::into_as` as a shortcut for `Into::::into`. ++- Added `#[inline]` attributes. ++- Added `Saturate::saturate`, which can saturate `Result`s arising from over/underflow. ++ ++### v0.2.0 ++ ++- Changed all error types to include the original input as payload. This breaks pretty much *everything*. Sorry about that. On the bright side, there's now no downside to using the conversion traits for non-`Copy` types. ++- Added the normal rounding modes for float → int approximations: `RoundToNearest`, `RoundToNegInf`, `RoundToPosInf`, and `RoundToZero`. ++- `ApproxWith` is now subsumed by a pair of extension traits (`ConvUtil` and `ConvAsUtil`), that also have shortcuts for `TryInto` and `ValueInto` so that you can specify the destination type on the method. diff --cc vendor/conv-0.3.3/src/errors.rs index 000000000,000000000..9e005b044 new file mode 100644 --- /dev/null +++ b/vendor/conv-0.3.3/src/errors.rs @@@ -1,0 -1,0 +1,606 @@@ ++/*! ++This module defines the various error types that can be produced by a failed conversion. ++ ++In addition, it also defines some extension traits to make working with failable conversions more ergonomic (see the `Unwrap*` traits). ++*/ ++ ++use std::any::Any; ++use std::error::Error; ++use std::fmt::{self, Debug, Display}; ++use misc::{Saturated, InvalidSentinel, SignedInfinity}; ++ ++macro_rules! Desc { ++ ( ++ ($desc:expr) ++ pub struct $name:ident<$t:ident> $_body:tt; ++ ) => { ++ impl<$t> Display for $name<$t> { ++ fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { ++ write!(fmt, $desc) ++ } ++ } ++ ++ impl<$t> Error for $name<$t> where $t: Any { ++ fn description(&self) -> &str { ++ $desc ++ } ++ } ++ }; ++} ++ ++macro_rules! DummyDebug { ++ ( ++ () pub enum $name:ident<$t:ident> { ++ $(#[doc=$_doc:tt] $vname:ident($_vpay:ident),)+ ++ } ++ ) => { ++ impl<$t> Debug for $name<$t> { ++ fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { ++ let msg = match *self { ++ $($name::$vname(_) => stringify!($vname),)+ ++ }; ++ write!(fmt, concat!(stringify!($name), "::{}(..)"), msg) ++ } ++ } ++ }; ++ ++ ( ++ () pub struct $name:ident<$t:ident>(pub $_pay:ident); ++ ) => { ++ impl<$t> Debug for $name<$t> { ++ fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { ++ write!(fmt, concat!(stringify!($name), "(..)")) ++ } ++ } ++ }; ++} ++ ++macro_rules! EnumDesc { ++ ( ++ ($($vname:ident => $vdesc:expr,)+) ++ pub enum $name:ident $_body:tt ++ ) => { ++ impl Display for $name { ++ fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { ++ write!(fmt, "{}", ++ match *self { $($name::$vname => $vdesc,)+ }) ++ } ++ } ++ ++ impl Error for $name { ++ fn description(&self) -> &str { ++ match *self { $($name::$vname => $vdesc,)+ } ++ } ++ } ++ }; ++ ++ ( ++ ($($vname:ident => $vdesc:expr,)+) ++ pub enum $name:ident<$t:ident> $_body:tt ++ ) => { ++ impl<$t> Display for $name<$t> { ++ fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { ++ write!(fmt, "{}", ++ match *self { $($name::$vname(..) => $vdesc,)+ }) ++ } ++ } ++ ++ impl<$t> Error for $name<$t> where $t: Any { ++ fn description(&self) -> &str { ++ match *self { $($name::$vname(..) => $vdesc,)+ } ++ } ++ } ++ }; ++} ++ ++macro_rules! FromName { ++ ( ++ ($fname:ident) ++ pub enum $name:ident<$t:ident> $_body:tt ++ ) => { ++ impl<$t> From<$fname<$t>> for $name<$t> { ++ #[inline] ++ fn from(e: $fname<$t>) -> Self { ++ $name::$fname(e.into_inner()) ++ } ++ } ++ }; ++ ++ ( ++ ($fname:ident<$t:ident>) ++ pub enum $name:ident $_body:tt ++ ) => { ++ impl<$t> From<$fname<$t>> for $name { ++ #[inline] ++ fn from(_: $fname<$t>) -> Self { ++ $name::$fname ++ } ++ } ++ }; ++} ++ ++macro_rules! FromNoError { ++ ( ++ () pub enum $name:ident $_body:tt ++ ) => { ++ impl From for $name { ++ #[inline] ++ fn from(_: NoError) -> Self { ++ panic!(concat!("cannot convert NoError into ", stringify!($name))) ++ } ++ } ++ }; ++ ++ ( ++ () pub enum $name:ident<$t:ident> $_body:tt ++ ) => { ++ impl<$t> From for $name<$t> { ++ fn from(_: NoError) -> Self { ++ panic!(concat!("cannot convert NoError into ", stringify!($name))) ++ } ++ } ++ }; ++ ++ ( ++ () pub struct $name:ident<$t:ident> $_body:tt; ++ ) => { ++ impl<$t> From for $name<$t> { ++ fn from(_: NoError) -> Self { ++ panic!(concat!("cannot convert NoError into ", stringify!($name))) ++ } ++ } ++ }; ++} ++ ++macro_rules! FromRemap { ++ ( ++ ($from:ident($($vname:ident),+)) ++ pub enum $name:ident $_body:tt ++ ) => { ++ impl From<$from> for $name { ++ #[inline] ++ fn from(e: $from) -> Self { ++ match e { ++ $($from::$vname => $name::$vname,)+ ++ } ++ } ++ } ++ }; ++ ++ ( ++ ($from:ident<$t:ident>($($vname:ident),+)) ++ pub enum $name:ident $_body:tt ++ ) => { ++ impl<$t> From<$from<$t>> for $name { ++ #[inline] ++ fn from(e: $from<$t>) -> Self { ++ match e { ++ $($from::$vname(..) => $name::$vname,)+ ++ } ++ } ++ } ++ }; ++ ++ ( ++ ($from:ident($($vname:ident),+)) ++ pub enum $name:ident<$t:ident> $_body:tt ++ ) => { ++ impl<$t> From<$from<$t>> for $name<$t> { ++ #[inline] ++ fn from(e: $from<$t>) -> Self { ++ match e { ++ $($from::$vname(v) => $name::$vname(v),)+ ++ } ++ } ++ } ++ }; ++} ++ ++macro_rules! IntoInner { ++ ( ++ () pub enum $name:ident<$t:ident> { ++ $(#[doc=$_doc:tt] $vname:ident($_vpay:ident),)+ ++ } ++ ) => { ++ impl<$t> $name<$t> { ++ /// Returns the value stored in this error. ++ #[inline] ++ pub fn into_inner(self) -> $t { ++ match self { $($name::$vname(v))|+ => v } ++ } ++ } ++ }; ++ ++ ( ++ () pub struct $name:ident<$t:ident>(pub $_pay:ident); ++ ) => { ++ impl<$t> $name<$t> { ++ /// Returns the value stored in this error. ++ #[inline] ++ pub fn into_inner(self) -> $t { ++ self.0 ++ } ++ } ++ }; ++} ++ ++custom_derive!{ ++ /** ++ A general error enumeration that subsumes all other conversion errors. ++ ++ This exists primarily as a "catch-all" for reliably unifying various different kinds of conversion errors. ++ */ ++ #[derive( ++ Copy, Clone, Eq, PartialEq, Ord, PartialOrd, ++ IntoInner, DummyDebug, FromNoError, ++ EnumDesc( ++ NegOverflow => "conversion resulted in negative overflow", ++ PosOverflow => "conversion resulted in positive overflow", ++ Unrepresentable => "could not convert unrepresentable value", ++ ), ++ FromName(Unrepresentable), ++ FromName(NegOverflow), ++ FromName(PosOverflow), ++ FromRemap(RangeError(NegOverflow, PosOverflow)) ++ )] ++ pub enum GeneralError { ++ /// Input was too negative for the target type. ++ NegOverflow(T), ++ ++ /// Input was too positive for the target type. ++ PosOverflow(T), ++ ++ /// Input was not representable in the target type. ++ Unrepresentable(T), ++ } ++} ++ ++impl From> for GeneralError { ++ #[inline] ++ fn from(e: FloatError) -> GeneralError { ++ use self::FloatError as F; ++ use self::GeneralError as G; ++ match e { ++ F::NegOverflow(v) => G::NegOverflow(v), ++ F::PosOverflow(v) => G::PosOverflow(v), ++ F::NotANumber(v) => G::Unrepresentable(v), ++ } ++ } ++} ++ ++custom_derive! { ++ /** ++ A general error enumeration that subsumes all other conversion errors, but discards all input payloads the errors may be carrying. ++ ++ This exists primarily as a "catch-all" for reliably unifying various different kinds of conversion errors, and between different input types. ++ */ ++ #[derive( ++ Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug, ++ FromNoError, ++ EnumDesc( ++ NegOverflow => "conversion resulted in negative overflow", ++ PosOverflow => "conversion resulted in positive overflow", ++ Unrepresentable => "could not convert unrepresentable value", ++ ), ++ FromName(Unrepresentable), ++ FromName(NegOverflow), ++ FromName(PosOverflow), ++ FromRemap(RangeErrorKind(NegOverflow, PosOverflow)), ++ FromRemap(RangeError(NegOverflow, PosOverflow)), ++ FromRemap(GeneralError(NegOverflow, PosOverflow, Unrepresentable)) ++ )] ++ pub enum GeneralErrorKind { ++ /// Input was too negative for the target type. ++ NegOverflow, ++ ++ /// Input was too positive for the target type. ++ PosOverflow, ++ ++ /// Input was not representable in the target type. ++ Unrepresentable, ++ } ++} ++ ++impl From> for GeneralErrorKind { ++ #[inline] ++ fn from(e: FloatError) -> GeneralErrorKind { ++ use self::FloatError as F; ++ use self::GeneralErrorKind as G; ++ match e { ++ F::NegOverflow(..) => G::NegOverflow, ++ F::PosOverflow(..) => G::PosOverflow, ++ F::NotANumber(..) => G::Unrepresentable, ++ } ++ } ++} ++ ++/** ++Indicates that it is not possible for the conversion to fail. ++ ++You can use the [`UnwrapOk::unwrap_ok`](./trait.UnwrapOk.html#tymethod.unwrap_ok) method to discard the (statically impossible) `Err` case from a `Result<_, NoError>`, without using `Result::unwrap` (which is typically viewed as a "code smell"). ++*/ ++#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug)] ++pub enum NoError {} ++ ++impl Display for NoError { ++ fn fmt(&self, _: &mut fmt::Formatter) -> Result<(), fmt::Error> { ++ unreachable!() ++ } ++} ++ ++impl Error for NoError { ++ fn description(&self) -> &str { ++ unreachable!() ++ } ++} ++ ++custom_derive! { ++ /// Indicates that the conversion failed because the value was not representable. ++ #[derive( ++ Copy, Clone, Eq, PartialEq, Ord, PartialOrd, ++ IntoInner, DummyDebug, FromNoError, ++ Desc("could not convert unrepresentable value") ++ )] ++ pub struct Unrepresentable(pub T); ++} ++ ++custom_derive! { ++ /// Indicates that the conversion failed due to a negative overflow. ++ #[derive( ++ Copy, Clone, Eq, PartialEq, Ord, PartialOrd, ++ IntoInner, DummyDebug, FromNoError, ++ Desc("conversion resulted in negative overflow") ++ )] ++ pub struct NegOverflow(pub T); ++} ++ ++custom_derive! { ++ /// Indicates that the conversion failed due to a positive overflow. ++ #[derive( ++ Copy, Clone, Eq, PartialEq, Ord, PartialOrd, ++ IntoInner, DummyDebug, FromNoError, ++ Desc("conversion resulted in positive overflow") ++ )] ++ pub struct PosOverflow(pub T); ++} ++ ++custom_derive! { ++ /** ++ Indicates that a conversion from a floating point type failed. ++ */ ++ #[derive( ++ Copy, Clone, Eq, PartialEq, Ord, PartialOrd, ++ IntoInner, DummyDebug, FromNoError, ++ EnumDesc( ++ NegOverflow => "conversion resulted in negative overflow", ++ PosOverflow => "conversion resulted in positive overflow", ++ NotANumber => "conversion target does not support not-a-number", ++ ), ++ FromName(NegOverflow), ++ FromName(PosOverflow), ++ FromRemap(RangeError(NegOverflow, PosOverflow)) ++ )] ++ pub enum FloatError { ++ /// Input was too negative for the target type. ++ NegOverflow(T), ++ ++ /// Input was too positive for the target type. ++ PosOverflow(T), ++ ++ /// Input was not-a-number, which the target type could not represent. ++ NotANumber(T), ++ } ++} ++ ++custom_derive! { ++ /** ++ Indicates that a conversion failed due to a range error. ++ */ ++ #[derive( ++ Copy, Clone, Eq, PartialEq, Ord, PartialOrd, ++ IntoInner, DummyDebug, FromNoError, ++ EnumDesc( ++ NegOverflow => "conversion resulted in negative overflow", ++ PosOverflow => "conversion resulted in positive overflow", ++ ), ++ FromName(NegOverflow), ++ FromName(PosOverflow) ++ )] ++ pub enum RangeError { ++ /// Input was too negative for the target type. ++ NegOverflow(T), ++ ++ /// Input was too positive the target type. ++ PosOverflow(T), ++ } ++} ++ ++custom_derive! { ++ /** ++ Indicates that a conversion failed due to a range error. ++ ++ This is a variant of `RangeError` that does not retain the input value which caused the error. It exists to help unify some utility methods and should not generally be used directly, unless you are targeting the `Unwrap*` traits. ++ */ ++ #[derive( ++ Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug, ++ FromNoError, ++ EnumDesc( ++ NegOverflow => "conversion resulted in negative overflow", ++ PosOverflow => "conversion resulted in positive overflow", ++ ), ++ FromName(NegOverflow), ++ FromName(PosOverflow), ++ FromRemap(RangeError(NegOverflow, PosOverflow)) ++ )] ++ pub enum RangeErrorKind { ++ /// Input was too negative for the target type. ++ NegOverflow, ++ ++ /// Input was too positive for the target type. ++ PosOverflow, ++ } ++} ++ ++/** ++Saturates a `Result`. ++*/ ++pub trait Saturate { ++ /// The result of saturating. ++ type Output; ++ ++ /** ++ Replaces an overflow error with a saturated value. ++ ++ Unlike `unwrap_or_saturate`, this method can be used in cases where the `Result` error type can encode failures *other* than overflow and underflow. For example, you cannot saturate a float-to-integer conversion using `unwrap_or_saturate` as the error might be `NotANumber`, which doesn't have a meaningful saturation "direction". ++ ++ The output of this method will be a `Result` where the error type *does not* contain overflow conditions. What conditions remain must still be dealt with in some fashion. ++ */ ++ fn saturate(self) -> Self::Output; ++} ++ ++impl Saturate for Result> ++where T: Saturated { ++ type Output = Result>; ++ ++ #[inline] ++ fn saturate(self) -> Self::Output { ++ use self::FloatError::*; ++ match self { ++ Ok(v) => Ok(v), ++ Err(NegOverflow(_)) => Ok(T::saturated_min()), ++ Err(PosOverflow(_)) => Ok(T::saturated_max()), ++ Err(NotANumber(v)) => Err(Unrepresentable(v)) ++ } ++ } ++} ++ ++impl Saturate for Result> ++where T: Saturated { ++ type Output = Result; ++ ++ #[inline] ++ fn saturate(self) -> Self::Output { ++ use self::RangeError::*; ++ match self { ++ Ok(v) => Ok(v), ++ Err(NegOverflow(_)) => Ok(T::saturated_min()), ++ Err(PosOverflow(_)) => Ok(T::saturated_max()) ++ } ++ } ++} ++ ++impl Saturate for Result ++where T: Saturated { ++ type Output = Result; ++ ++ #[inline] ++ fn saturate(self) -> Self::Output { ++ use self::RangeErrorKind::*; ++ match self { ++ Ok(v) => Ok(v), ++ Err(NegOverflow) => Ok(T::saturated_min()), ++ Err(PosOverflow) => Ok(T::saturated_max()) ++ } ++ } ++} ++ ++/** ++Safely unwrap a `Result` that cannot contain an error. ++*/ ++pub trait UnwrapOk { ++ /** ++ Unwraps a `Result` without possibility of failing. ++ ++ Technically, this is not necessary; it's provided simply to make user code a little clearer. ++ */ ++ fn unwrap_ok(self) -> T; ++} ++ ++impl UnwrapOk for Result { ++ #[inline] ++ fn unwrap_ok(self) -> T { ++ match self { ++ Ok(v) => v, ++ Err(no_error) => match no_error {}, ++ } ++ } ++} ++ ++/** ++Unwrap a conversion by saturating to infinity. ++*/ ++pub trait UnwrapOrInf { ++ /// The result of unwrapping. ++ type Output; ++ ++ /** ++ Either unwraps the successfully converted value, or saturates to infinity in the "direction" of overflow. ++ */ ++ fn unwrap_or_inf(self) -> Self::Output; ++} ++ ++/** ++Unwrap a conversion by replacing a failure with an invalid sentinel value. ++*/ ++pub trait UnwrapOrInvalid { ++ /// The result of unwrapping. ++ type Output; ++ ++ /** ++ Either unwraps the successfully converted value, or returns the output type's invalid sentinel value. ++ */ ++ fn unwrap_or_invalid(self) -> Self::Output; ++} ++ ++/** ++Unwrap a conversion by saturating. ++*/ ++pub trait UnwrapOrSaturate { ++ /// The result of unwrapping. ++ type Output; ++ ++ /** ++ Either unwraps the successfully converted value, or saturates in the "direction" of overflow. ++ */ ++ fn unwrap_or_saturate(self) -> Self::Output; ++} ++ ++impl UnwrapOrInf for Result ++where T: SignedInfinity, E: Into { ++ type Output = T; ++ #[inline] ++ fn unwrap_or_inf(self) -> T { ++ use self::RangeErrorKind::*; ++ match self.map_err(Into::into) { ++ Ok(v) => v, ++ Err(NegOverflow) => T::neg_infinity(), ++ Err(PosOverflow) => T::pos_infinity(), ++ } ++ } ++} ++ ++impl UnwrapOrInvalid for Result ++where T: InvalidSentinel { ++ type Output = T; ++ #[inline] ++ fn unwrap_or_invalid(self) -> T { ++ match self { ++ Ok(v) => v, ++ Err(..) => T::invalid_sentinel(), ++ } ++ } ++} ++ ++impl UnwrapOrSaturate for Result ++where T: Saturated, E: Into { ++ type Output = T; ++ #[inline] ++ fn unwrap_or_saturate(self) -> T { ++ use self::RangeErrorKind::*; ++ match self.map_err(Into::into) { ++ Ok(v) => v, ++ Err(NegOverflow) => T::saturated_min(), ++ Err(PosOverflow) => T::saturated_max(), ++ } ++ } ++} diff --cc vendor/conv-0.3.3/src/impls.rs index 000000000,000000000..424ee8338 new file mode 100644 --- /dev/null +++ b/vendor/conv-0.3.3/src/impls.rs @@@ -1,0 -1,0 +1,591 @@@ ++macro_rules! max_of { ++ ($name:ident) => { ::std::$name::MAX }; ++} ++ ++macro_rules! min_of { ++ ($name:ident) => { ::std::$name::MIN }; ++} ++ ++macro_rules! approx_blind { ++ (($($attrs:tt)*), $src:ty, $dst:ty, $scheme:ty) => { ++ as_item! { ++ $($attrs)* ++ impl ::ApproxFrom<$src, $scheme> for $dst { ++ type Err = ::errors::NoError; ++ #[inline] ++ fn approx_from(src: $src) -> Result<$dst, Self::Err> { ++ Ok(src as $dst) ++ } ++ } ++ } ++ }; ++} ++ ++macro_rules! approx_z_to_dmax { ++ (($($attrs:tt)*), $src:ty, $dst:ident, $scheme:ty) => { ++ as_item! { ++ $($attrs)* ++ impl ::ApproxFrom<$src, $scheme> for $dst { ++ type Err = ::errors::RangeError<$src>; ++ #[inline] ++ fn approx_from(src: $src) -> Result<$dst, Self::Err> { ++ if !(0 <= src) { ++ return Err(::errors::RangeError::NegOverflow(src)); ++ } ++ if !(src <= max_of!($dst) as $src) { ++ return Err(::errors::RangeError::PosOverflow(src)); ++ } ++ Ok(src as $dst) ++ } ++ } ++ } ++ }; ++} ++ ++macro_rules! approx_to_dmax { ++ (($($attrs:tt)*), $src:ty, $dst:ident, $scheme:ty) => { ++ as_item! { ++ $($attrs)* ++ impl ::ApproxFrom<$src, $scheme> for $dst { ++ type Err = ::errors::PosOverflow<$src>; ++ #[inline] ++ fn approx_from(src: $src) -> Result<$dst, Self::Err> { ++ if !(src <= max_of!($dst) as $src) { ++ return Err(::errors::PosOverflow(src)); ++ } ++ Ok(src as $dst) ++ } ++ } ++ } ++ }; ++} ++ ++macro_rules! approx_dmin_to_dmax { ++ (($($attrs:tt)*), $src:ty, $dst:ident, $scheme:ty) => { ++ as_item! { ++ $($attrs)* ++ impl ::ApproxFrom<$src, $scheme> for $dst { ++ type Err = ::errors::RangeError<$src>; ++ #[inline] ++ fn approx_from(src: $src) -> Result<$dst, Self::Err> { ++ if !(min_of!($dst) as $src <= src) { ++ return Err(::errors::RangeError::NegOverflow(src)); ++ } ++ if !(src <= max_of!($dst) as $src) { ++ return Err(::errors::RangeError::PosOverflow(src)); ++ } ++ Ok(src as $dst) ++ } ++ } ++ } ++ } ++} ++ ++macro_rules! approx_z_up { ++ (($($attrs:tt)*), $src:ty, $dst:ident, $scheme:ty) => { ++ as_item! { ++ $($attrs)* ++ impl ::ApproxFrom<$src, $scheme> for $dst { ++ type Err = ::errors::NegOverflow<$src>; ++ #[inline] ++ fn approx_from(src: $src) -> Result<$dst, Self::Err> { ++ if !(0 <= src) { ++ return Err(::errors::NegOverflow(src)); ++ } ++ Ok(src as $dst) ++ } ++ } ++ } ++ }; ++} ++ ++macro_rules! approx_dmin_to_dmax_no_nan { ++ (($($attrs:tt)*), $src:ty, $dst:ident, $scheme:ty) => { ++ approx_dmin_to_dmax_no_nan! { ($($attrs)*), $src, $dst, $scheme, approx: |s| s } ++ }; ++ ++ (($($attrs:tt)*), $src:ty, $dst:ident, $scheme:ty, approx: |$src_name:ident| $conv:expr) => { ++ approx_range_no_nan! { ++ ($($attrs)*), $src, ++ $dst, [min_of!($dst) as $src, max_of!($dst) as $src], ++ $scheme, approx: |$src_name| $conv ++ } ++ }; ++} ++ ++macro_rules! approx_range_no_nan { ++ (($($attrs:tt)*), $src:ty, $dst:ident, [$min:expr, $max:expr], $scheme:ty) => { ++ approx_range_no_nan! { ($($attrs)*), $src, $dst, [$min, $max], $scheme, approx: |s| s } ++ }; ++ ++ (($($attrs:tt)*), $src:ty, $dst:ident, [$min:expr, $max:expr], $scheme:ty, approx: |$src_name:ident| $conv:expr) => { ++ as_item! { ++ $($attrs)* ++ impl ::ApproxFrom<$src, $scheme> for $dst { ++ type Err = ::errors::FloatError<$src>; ++ #[inline] ++ fn approx_from(src: $src) -> Result<$dst, Self::Err> { ++ if src.is_nan() { ++ return Err(::errors::FloatError::NotANumber(src)); ++ } ++ let approx = { let $src_name = src; $conv }; ++ if !($min <= approx) { ++ return Err(::errors::FloatError::NegOverflow(src)); ++ } ++ if !(approx <= $max) { ++ return Err(::errors::FloatError::PosOverflow(src)); ++ } ++ Ok(approx as $dst) ++ } ++ } ++ } ++ }; ++} ++ ++macro_rules! num_conv { ++ (@ $src:ty=> $(,)*) => {}; ++ ++ (@ $src:ty=> #[32] $($tail:tt)*) => { ++ num_conv! { @ $src=> (#[cfg(target_pointer_width="32")]) $($tail)* } ++ }; ++ ++ (@ $src:ty=> #[64] $($tail:tt)*) => { ++ num_conv! { @ $src=> (#[cfg(target_pointer_width="64")]) $($tail)* } ++ }; ++ ++ (@ $src:ty=> e $($tail:tt)*) => { num_conv! { @ $src=> () e $($tail)* } }; ++ (@ $src:ty=> n+ $($tail:tt)*) => { num_conv! { @ $src=> () n+ $($tail)* } }; ++ (@ $src:ty=> n $($tail:tt)*) => { num_conv! { @ $src=> () n $($tail)* } }; ++ (@ $src:ty=> w+ $($tail:tt)*) => { num_conv! { @ $src=> () w+ $($tail)* } }; ++ (@ $src:ty=> w $($tail:tt)*) => { num_conv! { @ $src=> () w $($tail)* } }; ++ (@ $src:ty=> aW $($tail:tt)*) => { num_conv! { @ $src=> () aW $($tail)* } }; ++ (@ $src:ty=> nf $($tail:tt)*) => { num_conv! { @ $src=> () nf $($tail)* } }; ++ (@ $src:ty=> fan $($tail:tt)*) => { num_conv! { @ $src=> () fan $($tail)* } }; ++ ++ // Exact conversion ++ (@ $src:ty=> ($($attrs:tt)*) e $dst:ty, $($tail:tt)*) => { ++ as_item! { ++ approx_blind! { ($($attrs)*), $src, $dst, ::DefaultApprox } ++ approx_blind! { ($($attrs)*), $src, $dst, ::Wrapping } ++ ++ $($attrs)* ++ impl ::ValueFrom<$src> for $dst { ++ type Err = ::errors::NoError; ++ #[inline] ++ fn value_from(src: $src) -> Result<$dst, Self::Err> { ++ Ok(src as $dst) ++ } ++ } ++ } ++ num_conv! { @ $src=> $($tail)* } ++ }; ++ ++ // Narrowing a signed type *into* an unsigned type where the destination type's maximum value is representable by the source type. ++ (@ $src:ty=> ($($attrs:tt)*) n+ $dst:ident, $($tail:tt)*) => { ++ as_item! { ++ approx_z_to_dmax! { ($($attrs)*), $src, $dst, ::DefaultApprox } ++ approx_blind! { ($($attrs)*), $src, $dst, ::Wrapping } ++ ++ $($attrs)* ++ impl ::ValueFrom<$src> for $dst { ++ type Err = ::errors::RangeError<$src>; ++ #[inline] ++ fn value_from(src: $src) -> Result<$dst, Self::Err> { ++ if !(0 <= src) { ++ return Err(::errors::RangeError::NegOverflow(src)); ++ } ++ if !(src <= max_of!($dst) as $src) { ++ return Err(::errors::RangeError::PosOverflow(src)); ++ } ++ Ok(src as $dst) ++ } ++ } ++ } ++ num_conv! { @ $src=> $($tail)* } ++ }; ++ ++ // Narrowing an unsigned type *into* a type where the destination type's maximum value is representable by the source type. ++ (@ $src:ty=> ($($attrs:tt)*) n- $dst:ident, $($tail:tt)*) => { ++ as_item! { ++ approx_to_dmax! { ($($attrs)*), $src, $dst, ::DefaultApprox } ++ approx_blind! { ($($attrs)*), $src, $dst, ::Wrapping } ++ ++ $($attrs)* ++ impl ::ValueFrom<$src> for $dst { ++ type Err = ::errors::PosOverflow<$src>; ++ #[inline] ++ fn value_from(src: $src) -> Result<$dst, Self::Err> { ++ if !(src <= max_of!($dst) as $src) { ++ return Err(::errors::PosOverflow(src)); ++ } ++ Ok(src as $dst) ++ } ++ } ++ } ++ num_conv! { @ $src=> $($tail)* } ++ }; ++ ++ // Narrowing where the destination type's bounds are representable by the source type. ++ (@ $src:ty=> ($($attrs:tt)*) n $dst:ident, $($tail:tt)*) => { ++ as_item! { ++ approx_dmin_to_dmax! { ($($attrs)*), $src, $dst, ::DefaultApprox } ++ approx_blind! { ($($attrs)*), $src, $dst, ::Wrapping } ++ ++ $($attrs)* ++ impl ::ValueFrom<$src> for $dst { ++ type Err = ::errors::RangeError<$src>; ++ #[inline] ++ fn value_from(src: $src) -> Result<$dst, Self::Err> { ++ if !(min_of!($dst) as $src <= src) { ++ return Err(::errors::RangeError::NegOverflow(src)); ++ } ++ if !(src <= max_of!($dst) as $src) { ++ return Err(::errors::RangeError::PosOverflow(src)); ++ } ++ Ok(src as $dst) ++ } ++ } ++ } ++ num_conv! { @ $src=> $($tail)* } ++ }; ++ ++ // Widening a signed type *into* an unsigned type. ++ (@ $src:ty=> ($($attrs:tt)*) w+ $dst:ident, $($tail:tt)*) => { ++ as_item! { ++ approx_z_up! { ($($attrs)*), $src, $dst, ::DefaultApprox } ++ approx_blind! { ($($attrs)*), $src, $dst, ::Wrapping } ++ ++ $($attrs)* ++ impl ::ValueFrom<$src> for $dst { ++ type Err = ::errors::NegOverflow<$src>; ++ #[inline] ++ fn value_from(src: $src) -> Result<$dst, Self::Err> { ++ if !(0 <= src) { ++ return Err(::errors::NegOverflow(src)); ++ } ++ Ok(src as $dst) ++ } ++ } ++ } ++ num_conv! { @ $src=> $($tail)* } ++ }; ++ ++ // Widening. ++ (@ $src:ty=> ($($attrs:tt)*) w $dst:ident, $($tail:tt)*) => { ++ as_item! { ++ approx_blind! { ($($attrs)*), $src, $dst, ::DefaultApprox } ++ approx_blind! { ($($attrs)*), $src, $dst, ::Wrapping } ++ ++ $($attrs)* ++ impl ::ValueFrom<$src> for $dst { ++ type Err = ::errors::NoError; ++ #[inline] ++ fn value_from(src: $src) -> Result<$dst, Self::Err> { ++ Ok(src as $dst) ++ } ++ } ++ } ++ num_conv! { @ $src=> $($tail)* } ++ }; ++ ++ // Narrowing *into* a floating-point type where the conversion is only exact within a given range. ++ (@ $src:ty=> ($($attrs:tt)*) nf [+- $bound:expr] $dst:ident, $($tail:tt)*) => { ++ as_item! { ++ approx_blind! { ($($attrs)*), $src, $dst, ::DefaultApprox } ++ ++ $($attrs)* ++ impl ::ValueFrom<$src> for $dst { ++ type Err = ::errors::RangeError<$src>; ++ #[inline] ++ fn value_from(src: $src) -> Result<$dst, Self::Err> { ++ if !(-$bound <= src) { ++ return Err(::errors::RangeError::NegOverflow(src)); ++ } ++ if !(src <= $bound) { ++ return Err(::errors::RangeError::PosOverflow(src)); ++ } ++ Ok(src as $dst) ++ } ++ } ++ } ++ num_conv! { @ $src=> $($tail)* } ++ }; ++ ++ (@ $src:ty=> ($($attrs:tt)*) nf [, $max:expr] $dst:ident, $($tail:tt)*) => { ++ as_item! { ++ approx_blind! { ($($attrs)*), $src, $dst, ::DefaultApprox } ++ ++ $($attrs)* ++ impl ::ValueFrom<$src> for $dst { ++ type Err = ::errors::PosOverflow<$src>; ++ #[inline] ++ fn value_from(src: $src) -> Result<$dst, Self::Err> { ++ if !(src <= $max) { ++ return Err(::errors::PosOverflow(src)); ++ } ++ Ok(src as $dst) ++ } ++ } ++ } ++ num_conv! { @ $src=> $($tail)* } ++ }; ++ ++ // Approximately narrowing a floating point value *into* a type where the source value is constrained by the given range of values. ++ (@ $src:ty=> ($($attrs:tt)*) fan [$min:expr, $max:expr] $dst:ident, $($tail:tt)*) => { ++ as_item! { ++ approx_range_no_nan! { ($($attrs)*), $src, $dst, [$min, $max], ++ ::DefaultApprox } ++ approx_range_no_nan! { ($($attrs)*), $src, $dst, [$min, $max], ++ ::RoundToNearest, approx: |s| s.round() } ++ approx_range_no_nan! { ($($attrs)*), $src, $dst, [$min, $max], ++ ::RoundToNegInf, approx: |s| s.floor() } ++ approx_range_no_nan! { ($($attrs)*), $src, $dst, [$min, $max], ++ ::RoundToPosInf, approx: |s| s.ceil() } ++ approx_range_no_nan! { ($($attrs)*), $src, $dst, [$min, $max], ++ ::RoundToZero, approx: |s| s.trunc() } ++ } ++ num_conv! { @ $src=> $($tail)* } ++ }; ++ ++ (@ $src:ty=> ($($attrs:tt)*) fan $dst:ident, $($tail:tt)*) => { ++ as_item! { ++ approx_dmin_to_dmax_no_nan! { ($($attrs)*), $src, $dst, ::DefaultApprox } ++ approx_dmin_to_dmax_no_nan! { ($($attrs)*), $src, $dst, ::RoundToNearest, ++ approx: |s| s.round() } ++ approx_dmin_to_dmax_no_nan! { ($($attrs)*), $src, $dst, ::RoundToNegInf, ++ approx: |s| s.floor() } ++ approx_dmin_to_dmax_no_nan! { ($($attrs)*), $src, $dst, ::RoundToPosInf, ++ approx: |s| s.ceil() } ++ approx_dmin_to_dmax_no_nan! { ($($attrs)*), $src, $dst, ::RoundToZero, ++ approx: |s| s.trunc() } ++ } ++ num_conv! { @ $src=> $($tail)* } ++ }; ++ ++ ($src:ty=> $($tail:tt)*) => { ++ num_conv! { @ $src=> $($tail)*, } ++ }; ++} ++ ++mod lang_ints { ++ num_conv! { i8=> w i16, w i32, w i64, w+u8, w+u16, w+u32, w+u64, w isize, w+usize } ++ num_conv! { i16=> n i8, w i32, w i64, n+u8, w+u16, w+u32, w+u64, w isize, w+usize } ++ num_conv! { i32=> n i8, n i16, w i64, n+u8, n+u16, w+u32, w+u64 } ++ num_conv! { i64=> n i8, n i16, n i32, n+u8, n+u16, n+u32, w+u64 } ++ num_conv! { i32=> #[32] e isize, #[64] w isize, w+usize } ++ num_conv! { i64=> #[32] n isize, #[64] e isize, #[32] n+usize, #[64] w+usize } ++ ++ num_conv! { u8=> n-i8, w i16, w i32, w i64, w u16, w u32, w u64, w isize, w usize } ++ num_conv! { u16=> n-i8, n-i16, w i32, w i64, n-u8, w u32, w u64, w isize, w usize } ++ num_conv! { u32=> n-i8, n-i16, n-i32, w i64, n-u8, n-u16, w u64 } ++ num_conv! { u64=> n-i8, n-i16, n-i32, n-i64, n-u8, n-u16, n-u32 } ++ num_conv! { u32=> #[32] n-isize, #[64] w isize, #[32] e usize, #[64] w usize } ++ num_conv! { u64=> n-isize, #[32] n-usize, #[64] e usize } ++ ++ num_conv! { isize=> n i8, n i16, #[32] e i32, #[32] w i64, #[64] n i32, #[64] e i64 } ++ num_conv! { isize=> n+u8, n+u16, #[32] w+u32, #[32] w+u64, #[64] n+u32, #[64] w+u64 } ++ num_conv! { isize=> w+usize } ++ ++ num_conv! { usize=> n-i8, n-i16, #[32] n-i32, #[32] w i64, #[64] n-i32, #[64] n-i64 } ++ num_conv! { usize=> n-u8, n-u16, #[32] e u32, #[32] w u64, #[64] n-u32, #[64] e u64 } ++ num_conv! { usize=> n-isize } ++} ++ ++mod lang_floats { ++ use {ApproxFrom, ApproxScheme}; ++ use ValueFrom; ++ use errors::{NoError, RangeError}; ++ ++ // f32 -> f64: strictly widening ++ impl ApproxFrom for f64 ++ where Scheme: ApproxScheme { ++ type Err = NoError; ++ #[inline] ++ fn approx_from(src: f32) -> Result { ++ Ok(src as f64) ++ } ++ } ++ ++ impl ValueFrom for f64 { ++ type Err = NoError; ++ #[inline] ++ fn value_from(src: f32) -> Result { ++ Ok(src as f64) ++ } ++ } ++ ++ // f64 -> f32: narrowing, approximate ++ impl ApproxFrom for f32 { ++ type Err = RangeError; ++ #[inline] ++ fn approx_from(src: f64) -> Result { ++ if !src.is_finite() { ++ return Ok(src as f32); ++ } ++ if !(::std::f32::MIN as f64 <= src) { ++ return Err(RangeError::NegOverflow(src)); ++ } ++ if !(src <= ::std::f32::MAX as f64) { ++ return Err(RangeError::PosOverflow(src)); ++ } ++ Ok(src as f32) ++ } ++ } ++} ++ ++mod lang_int_to_float { ++ num_conv! { i8=> w f32, w f64 } ++ num_conv! { i16=> w f32, w f64 } ++ num_conv! { i32=> nf [+- 16_777_216] f32, w f64 } ++ num_conv! { i64=> nf [+- 16_777_216] f32, nf [+- 9_007_199_254_740_992] f64 } ++ ++ num_conv! { u8=> w f32, w f64 } ++ num_conv! { u16=> w f32, w f64 } ++ num_conv! { u32=> nf [, 16_777_216] f32, w f64 } ++ num_conv! { u64=> nf [, 16_777_216] f32, nf [, 9_007_199_254_740_992] f64 } ++ ++ num_conv! { isize=> nf [+- 16_777_216] f32, ++ #[32] w f64, #[64] nf [+- 9_007_199_254_740_992] f64 } ++ num_conv! { usize=> nf [, 16_777_216] f32, ++ #[32] w f64, #[64] nf [, 9_007_199_254_740_992] f64 } ++} ++ ++mod lang_float_to_int { ++ /* ++ We use explicit ranges on narrowing float-to-int conversions because it *turns out* that just because you can cast an integer to a float, this *does not* mean you can cast it back and get the original input. The non-explicit-range implementation of `fan` *depends* on this, so it was kinda *totally broken* for narrowing conversions. ++ ++ *Yeah.* That's floating point for you! ++ */ ++ num_conv! { f32=> fan i8, fan i16, ++ fan [-2.1474836e9, 2.1474835e9] i32, ++ fan [-9.223372e18, 9.2233715e18] i64 } ++ num_conv! { f32=> fan u8, fan u16, ++ fan [0.0, 4.294967e9] u32, ++ fan [0.0, 1.8446743e19] u64 } ++ num_conv! { f32=> ++ #[32] fan [-2.1474836e9, 2.1474835e9] isize, ++ #[32] fan [0.0, 4.294967e9] usize, ++ #[64] fan [-9.223372e18, 9.2233715e18] isize, ++ #[64] fan [0.0, 1.8446743e19] usize } ++ ++ num_conv! { f64=> fan i8, fan i16, fan i32, ++ fan [-9.223372036854776e18, 9.223372036854775e18] i64 } ++ num_conv! { f64=> fan u8, fan u16, fan u32, ++ fan [0.0, 1.844674407370955e19] u64 } ++ num_conv! { f64=> ++ #[32] fan isize, #[32] fan usize, ++ #[64] fan [-9.223372036854776e18, 9.223372036854775e18] isize, ++ #[64] fan [0.0, 1.844674407370955e19] usize } ++} ++ ++mod lang_char_to_int { ++ use TryFrom; ++ use ValueFrom; ++ use errors::{NoError, PosOverflow}; ++ ++ impl TryFrom for u32 { ++ type Err = NoError; ++ #[inline] ++ fn try_from(src: char) -> Result { ++ Ok(src as u32) ++ } ++ } ++ ++ impl TryFrom for usize { ++ type Err = NoError; ++ #[inline] ++ fn try_from(src: char) -> Result { ++ Ok(src as usize) ++ } ++ } ++ ++ impl TryFrom for isize { ++ type Err = NoError; ++ #[inline] ++ fn try_from(src: char) -> Result { ++ Ok(src as isize) ++ } ++ } ++ ++ macro_rules! conv_char_to_int { ++ ($($ts:ty),* $(,)*) => { ++ $( ++ impl TryFrom for $ts { ++ type Err = PosOverflow; ++ #[inline] ++ fn try_from(src: char) -> Result<$ts, Self::Err> { ++ <$ts as ValueFrom<_>>::value_from(src as u32) ++ .map_err(|_| PosOverflow(src)) ++ } ++ } ++ )* ++ }; ++ } ++ ++ macro_rules! conv_char_to_int_wide { ++ ($($ts:ty),* $(,)*) => { ++ $( ++ impl TryFrom for $ts { ++ type Err = NoError; ++ #[inline] ++ fn try_from(src: char) -> Result<$ts, Self::Err> { ++ <$ts as ValueFrom<_>>::value_from(src as u32) ++ } ++ } ++ )* ++ }; ++ } ++ ++ conv_char_to_int! { i8, i16, i32, u8, u16 } ++ conv_char_to_int_wide! { i64, u64 } ++} ++ ++mod lang_int_to_char { ++ use TryFrom; ++ use ValueFrom; ++ use errors::{NoError, Unrepresentable, UnwrapOk}; ++ ++ impl TryFrom for char { ++ type Err = NoError; ++ #[inline] ++ fn try_from(src: u8) -> Result { ++ Ok(src as char) ++ } ++ } ++ impl TryFrom for char { ++ type Err = Unrepresentable; ++ #[inline] ++ fn try_from(src: u16) -> Result { ++ TryFrom::try_from( ++ >::value_from(src).unwrap_ok() ++ ).map_err(|_| Unrepresentable(src)) ++ } ++ } ++ ++ impl TryFrom for char { ++ type Err = Unrepresentable; ++ #[inline] ++ fn try_from(src: u32) -> Result { ++ ::std::char::from_u32(src).ok_or_else(|| Unrepresentable(src)) ++ } ++ } ++ ++ macro_rules! conv_int_to_char { ++ ($($ts:ty),* $(,)*) => { ++ $( ++ impl TryFrom<$ts> for char { ++ type Err = Unrepresentable<$ts>; ++ #[inline] ++ fn try_from(src: $ts) -> Result { ++ >::value_from(src) ++ .map_err(|_| Unrepresentable(src)) ++ .and_then(|usv| TryFrom::try_from(usv) ++ .map_err(|_| Unrepresentable(src))) ++ } ++ } ++ )* ++ }; ++ } ++ ++ conv_int_to_char! { i8, i16, i32, i64, isize, u64, usize } ++} diff --cc vendor/conv-0.3.3/src/lib.rs index 000000000,000000000..195b8d10f new file mode 100644 --- /dev/null +++ b/vendor/conv-0.3.3/src/lib.rs @@@ -1,0 -1,0 +1,525 @@@ ++/*! ++This crate provides a number of conversion traits with more specific semantics than those provided by `as` or `From`/`Into`. ++ ++The goal with the traits provided here is to be more specific about what generic code can rely on, as well as provide reasonably self-describing alternatives to the standard `From`/`Into` traits. For example, the although `T: From` might be satisfied, it imposes no restrictions on the *kind* of conversion being implemented. As such, the traits in this crate try to be very specific about what conversions are allowed. This makes them less generally applicable, but more useful where they *do* apply. ++ ++In addition, `From`/`Into` requires all conversions to succeed or panic. All conversion traits in this crate define an associated error type, allowing code to react to failed conversions as appropriate. ++ ++ ++ ++ ++## Compatibility ++ ++`conv` is compatible with Rust 1.2 and higher. ++ ++## Change Log ++ ++### v0.3.2 ++ ++- Added integer ↔ `char` conversions. ++- Added missing `isize`/`usize` → `f32`/`f64` conversions. ++- Fixed the error type of `i64` → `usize` for 64-bit targets. ++ ++### v0.3.1 ++ ++- Change to `unwrap_ok` for better codegen (thanks bluss). ++- Fix for Rust breaking change (code in question was dodgy anyway; thanks m4rw3r). ++ ++### v0.3.0 ++ ++- Added an `Error` constraint to all `Err` associated types. This will break any user-defined conversions where the `Err` type does not implement `Error`. ++- Renamed the `Overflow` and `Underflow` errors to `PosOverflow` and `NegOverflow` respectively. In the context of floating point conversions, "underflow" usually means the value was too close to zero to correctly represent. ++ ++### v0.2.1 ++ ++- Added `ConvUtil::into_as` as a shortcut for `Into::::into`. ++- Added `#[inline]` attributes. ++- Added `Saturate::saturate`, which can saturate `Result`s arising from over/underflow. ++ ++### v0.2.0 ++ ++- Changed all error types to include the original input as payload. This breaks pretty much *everything*. Sorry about that. On the bright side, there's now no downside to using the conversion traits for non-`Copy` types. ++- Added the normal rounding modes for float → int approximations: `RoundToNearest`, `RoundToNegInf`, `RoundToPosInf`, and `RoundToZero`. ++- `ApproxWith` is now subsumed by a pair of extension traits (`ConvUtil` and `ConvAsUtil`), that also have shortcuts for `TryInto` and `ValueInto` so that you can specify the destination type on the method. ++ ++# Overview ++ ++The following traits are used to define various conversion semantics: ++ ++- [`ApproxFrom`](./trait.ApproxFrom.html)/[`ApproxInto`](./trait.ApproxInto.html) - approximate conversions, with selectable approximation scheme (see [`ApproxScheme`](./trait.ApproxScheme.html)). ++- [`TryFrom`](./trait.TryFrom.html)/[`TryInto`](./trait.TryInto.html) - general, potentially failing value conversions. ++- [`ValueFrom`](./trait.ValueFrom.html)/[`ValueInto`](./trait.ValueInto.html) - exact, value-preserving conversions. ++ ++When *defining* a conversion, try to implement the `*From` trait variant where possible. When *using* a conversion, try to depend on the `*Into` trait variant where possible. This is because the `*Into` traits automatically use `*From` implementations, but not the reverse. Implementing `*From` and using `*Into` ensures conversions work in as many contexts as possible. ++ ++These extension methods are provided to help with some common cases: ++ ++- [`ConvUtil::approx_as`](./trait.ConvUtil.html#method.approx_as) - approximates to `Dst` with the `DefaultApprox` scheme. ++- [`ConvUtil::approx_as_by`](./trait.ConvUtil.html#method.approx_as_by) - approximates to `Dst` with the scheme `S`. ++- [`ConvUtil::into_as`](./trait.ConvUtil.html#method.into_as) - converts to `Dst` using `Into::into`. ++- [`ConvUtil::try_as`](./trait.ConvUtil.html#method.try_as) - converts to `Dst` using `TryInto::try_into`. ++- [`ConvUtil::value_as`](./trait.ConvUtil.html#method.value_as) - converts to `Dst` using `ValueInto::value_into`. ++- [`ConvAsUtil::approx`](./trait.ConvAsUtil.html#method.approx) - approximates to an inferred destination type with the `DefaultApprox` scheme. ++- [`ConvAsUtil::approx_by`](./trait.ConvAsUtil.html#method.approx_by) - approximates to an inferred destination type with the scheme `S`. ++- [`Saturate::saturate`](./errors/trait.Saturate.html#tymethod.saturate) - saturates on overflow. ++- [`UnwrapOk::unwrap_ok`](./errors/trait.UnwrapOk.html#tymethod.unwrap_ok) - unwraps results from conversions that cannot fail. ++- [`UnwrapOrInf::unwrap_or_inf`](./errors/trait.UnwrapOrInf.html#tymethod.unwrap_or_inf) - saturates to ±∞ on failure. ++- [`UnwrapOrInvalid::unwrap_or_invalid`](./errors/trait.UnwrapOrInvalid.html#tymethod.unwrap_or_invalid) - substitutes the target type's "invalid" sentinel value on failure. ++- [`UnwrapOrSaturate::unwrap_or_saturate`](./errors/trait.UnwrapOrSaturate.html#tymethod.unwrap_or_saturate) - saturates to the maximum or minimum value of the target type on failure. ++ ++A macro is provided to assist in implementing conversions: ++ ++- [`TryFrom!`](./macros/index.html#tryfrom!) - derives an implementation of [`TryFrom`](./trait.TryFrom.html). ++ ++If you are implementing your own types, you may also be interested in the traits contained in the [`misc`](./misc/index.html) module. ++ ++## Provided Implementations ++ ++The crate provides several blanket implementations: ++ ++- `*From for A` (all types can be converted from and into themselves). ++- `*Into for Src where Dst: *From` (`*From` implementations imply a matching `*Into` implementation). ++ ++Conversions for the builtin numeric (integer and floating point) types are provided. In general, `ValueFrom` conversions exist for all pairs except for float → integer (since such a conversion is generally unlikely to *exactly* succeed) and `f64 → f32` (for the same reason). `ApproxFrom` conversions with the `DefaultApprox` scheme exist between all pairs. `ApproxFrom` with the `Wrapping` scheme exist between integers. ++ ++## Errors ++ ++A number of error types are defined in the [`errors`](./errors/index.html) module. Generally, conversions use whichever error type most *narrowly* defines the kinds of failures that can occur. For example: ++ ++- `ValueFrom for u16` cannot possibly fail, and as such it uses `NoError`. ++- `ValueFrom for u16` can *only* fail with a negative overflow, thus it uses the `NegOverflow` type. ++- `ValueFrom for u16` can overflow in either direction, hence it uses `RangeError`. ++- Finally, `ApproxFrom for u16` can overflow (positive or negative), or attempt to convert NaN; `FloatError` covers those three cases. ++ ++Because there are *numerous* error types, the `GeneralError` enum is provided. `From for GeneralError` exists for each error type `E` defined by this crate (even for `NoError`!), allowing errors to be translated automatically by `try!`. In fact, all errors can be "expanded" to *all* more general forms (*e.g.* `NoError` → `NegOverflow`, `PosOverflow` → `RangeError` → `FloatError`). ++ ++Aside from `NoError`, the various error types wrap the input value that you attempted to convert. This is so that non-`Copy` types do not need to be pre-emptively cloned prior to conversion, just in case the conversion fails. A downside is that this means there are many, *many* incompatible error types. ++ ++To help alleviate this, there is also `GeneralErrorKind`, which is simply `GeneralError` without the payload, and all errors can be converted into it directly. ++ ++The reason for not just using `GeneralErrorKind` in the first place is to statically reduce the number of potential error cases you need to deal with. It also allows the `Unwrap*` extension traits to be defined *without* the possibility for runtime failure (*e.g.* you cannot use `unwrap_or_saturate` with a `FloatError`, because what do you do if the error is `NotANumber`; saturate to max or to min? Or panic?). ++ ++# Examples ++ ++``` ++# extern crate conv; ++# use conv::*; ++# fn main() { ++// This *cannot* fail, so we can use `unwrap_ok` to discard the `Result`. ++assert_eq!(u8::value_from(0u8).unwrap_ok(), 0u8); ++ ++// This *can* fail. Specifically, it can overflow toward negative infinity. ++assert_eq!(u8::value_from(0i8), Ok(0u8)); ++assert_eq!(u8::value_from(-1i8), Err(NegOverflow(-1))); ++ ++// This can overflow in *either* direction; hence the change to `RangeError`. ++assert_eq!(u8::value_from(-1i16), Err(RangeError::NegOverflow(-1))); ++assert_eq!(u8::value_from(0i16), Ok(0u8)); ++assert_eq!(u8::value_from(256i16), Err(RangeError::PosOverflow(256))); ++ ++// We can use the extension traits to simplify this a little. ++assert_eq!(u8::value_from(-1i16).unwrap_or_saturate(), 0u8); ++assert_eq!(u8::value_from(0i16).unwrap_or_saturate(), 0u8); ++assert_eq!(u8::value_from(256i16).unwrap_or_saturate(), 255u8); ++ ++// Obviously, all integers can be "approximated" using the default scheme (it ++// doesn't *do* anything), but they can *also* be approximated with the ++// `Wrapping` scheme. ++assert_eq!( ++ >::approx_from(400u16), ++ Err(PosOverflow(400))); ++assert_eq!( ++ >::approx_from(400u16), ++ Ok(144u8)); ++ ++// This is rather inconvenient; as such, there are a number of convenience ++// extension methods available via `ConvUtil` and `ConvAsUtil`. ++assert_eq!(400u16.approx(), Err::(PosOverflow(400))); ++assert_eq!(400u16.approx_by::(), Ok::(144u8)); ++assert_eq!(400u16.approx_as::(), Err(PosOverflow(400))); ++assert_eq!(400u16.approx_as_by::(), Ok(144)); ++ ++// Integer -> float conversions *can* fail due to limited precision. ++// Once the continuous range of exactly representable integers is exceeded, the ++// provided implementations fail with overflow errors. ++assert_eq!(f32::value_from(16_777_216i32), Ok(16_777_216.0f32)); ++assert_eq!(f32::value_from(16_777_217i32), Err(RangeError::PosOverflow(16_777_217))); ++ ++// Float -> integer conversions have to be done using approximations. Although ++// exact conversions are *possible*, "advertising" this with an implementation ++// is misleading. ++// ++// Note that `DefaultApprox` for float -> integer uses whatever rounding ++// mode is currently active (*i.e.* whatever `as` would do). ++assert_eq!(41.0f32.approx(), Ok(41u8)); ++assert_eq!(41.3f32.approx(), Ok(41u8)); ++assert_eq!(41.5f32.approx(), Ok(41u8)); ++assert_eq!(41.8f32.approx(), Ok(41u8)); ++assert_eq!(42.0f32.approx(), Ok(42u8)); ++ ++assert_eq!(255.0f32.approx(), Ok(255u8)); ++assert_eq!(256.0f32.approx(), Err::(FloatError::PosOverflow(256.0))); ++ ++// Sometimes, it can be useful to saturate the conversion from float to ++// integer directly, then account for NaN as input separately. The `Saturate` ++// extension trait exists for this reason. ++assert_eq!((-23.0f32).approx_as::().saturate(), Ok(0)); ++assert_eq!(302.0f32.approx_as::().saturate(), Ok(255u8)); ++assert!(std::f32::NAN.approx_as::().saturate().is_err()); ++ ++// If you really don't care about the specific kind of error, you can just rely ++// on automatic conversion to `GeneralErrorKind`. ++fn too_many_errors() -> Result<(), GeneralErrorKind> { ++ assert_eq!({let r: u8 = try!(0u8.value_into()); r}, 0u8); ++ assert_eq!({let r: u8 = try!(0i8.value_into()); r}, 0u8); ++ assert_eq!({let r: u8 = try!(0i16.value_into()); r}, 0u8); ++ assert_eq!({let r: u8 = try!(0.0f32.approx()); r}, 0u8); ++ Ok(()) ++} ++# let _ = too_many_errors(); ++# } ++``` ++ ++*/ ++ ++#![deny(missing_docs)] ++ ++#[macro_use] extern crate custom_derive; ++ ++// Exported macros. ++pub mod macros; ++ ++pub use errors::{ ++ NoError, GeneralError, GeneralErrorKind, Unrepresentable, ++ NegOverflow, PosOverflow, ++ FloatError, RangeError, RangeErrorKind, ++ Saturate, ++ UnwrapOk, UnwrapOrInf, UnwrapOrInvalid, UnwrapOrSaturate, ++}; ++ ++use std::error::Error; ++ ++/** ++Publicly re-exports the most generally useful set of items. ++ ++Usage of the prelude should be considered **unstable**. Although items will likely *not* be removed without bumping the major version, new items *may* be added, which could potentially cause name conflicts in user code. ++*/ ++pub mod prelude { ++ pub use super::{ ++ ApproxFrom, ApproxInto, ++ ValueFrom, ValueInto, ++ GeneralError, GeneralErrorKind, ++ Saturate, ++ UnwrapOk, UnwrapOrInf, UnwrapOrInvalid, UnwrapOrSaturate, ++ ConvUtil, ConvAsUtil, ++ RoundToNearest, RoundToZero, Wrapping, ++ }; ++} ++ ++macro_rules! as_item { ++ ($($i:item)*) => {$($i)*}; ++} ++ ++macro_rules! item_for_each { ++ ( ++ $( ($($arg:tt)*) ),* $(,)* => { $($exp:tt)* } ++ ) => { ++ macro_rules! body { ++ $($exp)* ++ } ++ ++ $( ++ body! { $($arg)* } ++ )* ++ }; ++} ++ ++pub mod errors; ++pub mod misc; ++ ++mod impls; ++ ++/** ++This trait is used to perform a conversion that is permitted to approximate the result, but *not* to wrap or saturate the result to fit into the destination type's representable range. ++ ++Where possible, prefer *implementing* this trait over `ApproxInto`, but prefer *using* `ApproxInto` for generic constraints. ++ ++# Details ++ ++All implementations of this trait must provide a conversion that can be separated into two logical steps: an approximation transform, and a representation transform. ++ ++The "approximation transform" step involves transforming the input value into an approximately equivalent value which is supported by the target type *without* taking the target type's representable range into account. For example, this might involve rounding or truncating a floating point value to an integer, or reducing the accuracy of a floating point value. ++ ++The "representation transform" step *exactly* rewrites the value from the source type's binary representation into the destination type's binary representation. This step *may not* transform the value in any way. If the result of the approximation is not representable, the conversion *must* fail. ++ ++The major reason for this formulation is to exactly define what happens when converting between floating point and integer types. Often, it is unclear what happens to floating point values beyond the range of the target integer type. Do they saturate, wrap, or cause a failure? ++ ++With this formulation, it is well-defined: if a floating point value is outside the representable range, the conversion fails. This allows users to distinguish between approximation and range violation, and act accordingly. ++*/ ++pub trait ApproxFrom: Sized where Scheme: ApproxScheme { ++ /// The error type produced by a failed conversion. ++ type Err: Error; ++ ++ /// Convert the given value into an approximately equivalent representation. ++ fn approx_from(src: Src) -> Result; ++} ++ ++impl ApproxFrom for Src where Scheme: ApproxScheme { ++ type Err = NoError; ++ fn approx_from(src: Src) -> Result { ++ Ok(src) ++ } ++} ++ ++/** ++This is the dual of `ApproxFrom`; see that trait for information. ++ ++Where possible, prefer *using* this trait over `ApproxFrom` for generic constraints, but prefer *implementing* `ApproxFrom`. ++*/ ++pub trait ApproxInto where Scheme: ApproxScheme { ++ /// The error type produced by a failed conversion. ++ type Err: Error; ++ ++ /// Convert the subject into an approximately equivalent representation. ++ fn approx_into(self) -> Result; ++} ++ ++impl ApproxInto for Src ++where ++ Dst: ApproxFrom, ++ Scheme: ApproxScheme, ++{ ++ type Err = Dst::Err; ++ fn approx_into(self) -> Result { ++ ApproxFrom::approx_from(self) ++ } ++} ++ ++/** ++This trait is used to mark approximation scheme types. ++*/ ++pub trait ApproxScheme {} ++ ++/** ++The "default" approximation scheme. This scheme does whatever would generally be expected of a lossy conversion, assuming no additional context or instruction is given. ++ ++This is a double-edged sword: it has the loosest semantics, but is far more likely to exist than more complicated approximation schemes. ++*/ ++pub enum DefaultApprox {} ++impl ApproxScheme for DefaultApprox {} ++ ++/** ++This scheme is used to convert a value by "wrapping" it into a narrower range. ++ ++In abstract, this can be viewed as the opposite of rounding: rather than preserving the most significant bits of a value, it preserves the *least* significant bits of a value. ++*/ ++pub enum Wrapping {} ++impl ApproxScheme for Wrapping {} ++ ++/** ++This scheme is used to convert a value by rounding it to the nearest representable value, with ties rounding away from zero. ++*/ ++pub enum RoundToNearest {} ++impl ApproxScheme for RoundToNearest {} ++ ++/** ++This scheme is used to convert a value by rounding it toward negative infinity to the nearest representable value. ++*/ ++pub enum RoundToNegInf {} ++impl ApproxScheme for RoundToNegInf {} ++ ++/** ++This scheme is used to convert a value by rounding it toward positive infinity to the nearest representable value. ++*/ ++pub enum RoundToPosInf {} ++impl ApproxScheme for RoundToPosInf {} ++ ++/** ++This scheme is used to convert a value by rounding it toward zero to the nearest representable value. ++*/ ++pub enum RoundToZero {} ++impl ApproxScheme for RoundToZero {} ++ ++/** ++This trait is used to perform a conversion between different semantic types which might fail. ++ ++Where possible, prefer *implementing* this trait over `TryInto`, but prefer *using* `TryInto` for generic constraints. ++ ++# Details ++ ++Typically, this should be used in cases where you are converting between values whose ranges and/or representations only partially overlap. That the conversion may fail should be a reasonably expected outcome. A standard example of this is converting from integers to enums of unitary variants. ++*/ ++pub trait TryFrom: Sized { ++ /// The error type produced by a failed conversion. ++ type Err: Error; ++ ++ /// Convert the given value into the subject type. ++ fn try_from(src: Src) -> Result; ++} ++ ++impl TryFrom for Src { ++ type Err = NoError; ++ fn try_from(src: Src) -> Result { ++ Ok(src) ++ } ++} ++ ++/** ++This is the dual of `TryFrom`; see that trait for information. ++ ++Where possible, prefer *using* this trait over `TryFrom` for generic constraints, but prefer *implementing* `TryFrom`. ++*/ ++pub trait TryInto { ++ /// The error type produced by a failed conversion. ++ type Err: Error; ++ ++ /// Convert the subject into the destination type. ++ fn try_into(self) -> Result; ++} ++ ++impl TryInto for Src where Dst: TryFrom { ++ type Err = Dst::Err; ++ fn try_into(self) -> Result { ++ TryFrom::try_from(self) ++ } ++} ++ ++/** ++This trait is used to perform an exact, value-preserving conversion. ++ ++Where possible, prefer *implementing* this trait over `ValueInto`, but prefer *using* `ValueInto` for generic constraints. ++ ++# Details ++ ++Implementations of this trait should be reflexive, associative and commutative (in the absence of conversion errors). That is, all possible cycles of `ValueFrom` conversions (for which each "step" has a defined implementation) should produce the same result, with a given value either being "round-tripped" exactly, or an error being produced. ++*/ ++pub trait ValueFrom: Sized { ++ /// The error type produced by a failed conversion. ++ type Err: Error; ++ ++ /// Convert the given value into an exactly equivalent representation. ++ fn value_from(src: Src) -> Result; ++} ++ ++impl ValueFrom for Src { ++ type Err = NoError; ++ fn value_from(src: Src) -> Result { ++ Ok(src) ++ } ++} ++ ++/** ++This is the dual of `ValueFrom`; see that trait for information. ++ ++Where possible, prefer *using* this trait over `ValueFrom` for generic constraints, but prefer *implementing* `ValueFrom`. ++*/ ++pub trait ValueInto { ++ /// The error type produced by a failed conversion. ++ type Err: Error; ++ ++ /// Convert the subject into an exactly equivalent representation. ++ fn value_into(self) -> Result; ++} ++ ++impl ValueInto for Src where Dst: ValueFrom { ++ type Err = Dst::Err; ++ fn value_into(self) -> Result { ++ ValueFrom::value_from(self) ++ } ++} ++ ++/** ++This extension trait exists to simplify using various conversions. ++ ++If there is more than one implementation for a given type/trait pair, a simple call to `*_into` may not be uniquely resolvable. Due to the position of the type parameter (on the trait itself), it is cumbersome to specify the destination type. A similar problem exists for approximation schemes. ++ ++See also the [`ConvAsUtil`](./trait.ConvAsUtil.html) trait. ++ ++> **Note**: There appears to be a bug in `rustdoc`'s output. This trait is implemented *for all* types, though the methods are only available for types where the appropriate conversions are defined. ++*/ ++pub trait ConvUtil { ++ /// Approximate the subject to a given type with the default scheme. ++ fn approx_as(self) -> Result ++ where Self: Sized + ApproxInto { ++ self.approx_into() ++ } ++ ++ /// Approximate the subject to a given type with a specific scheme. ++ fn approx_as_by(self) -> Result ++ where ++ Self: Sized + ApproxInto, ++ Scheme: ApproxScheme, ++ { ++ self.approx_into() ++ } ++ ++ /// Convert the subject to a given type. ++ fn into_as(self) -> Dst ++ where Self: Sized + Into { ++ self.into() ++ } ++ ++ /// Attempt to convert the subject to a given type. ++ fn try_as(self) -> Result ++ where Self: Sized + TryInto { ++ self.try_into() ++ } ++ ++ /// Attempt a value conversion of the subject to a given type. ++ fn value_as(self) -> Result ++ where Self: Sized + ValueInto { ++ self.value_into() ++ } ++} ++ ++impl ConvUtil for T {} ++ ++/** ++This extension trait exists to simplify using various conversions. ++ ++If there is more than one `ApproxFrom` implementation for a given type, a simple call to `approx_into` may not be uniquely resolvable. Due to the position of the scheme parameter (on the trait itself), it is cumbersome to specify which scheme you wanted. ++ ++The destination type is inferred from context. ++ ++See also the [`ConvUtil`](./trait.ConvUtil.html) trait. ++ ++> **Note**: There appears to be a bug in `rustdoc`'s output. This trait is implemented *for all* types, though the methods are only available for types where the appropriate conversions are defined. ++*/ ++pub trait ConvAsUtil { ++ /// Approximate the subject with the default scheme. ++ fn approx(self) -> Result ++ where Self: Sized + ApproxInto { ++ self.approx_into() ++ } ++ ++ /// Approximate the subject with a specific scheme. ++ fn approx_by(self) -> Result ++ where ++ Self: Sized + ApproxInto, ++ Scheme: ApproxScheme, ++ { ++ self.approx_into() ++ } ++} ++ ++impl ConvAsUtil for T {} diff --cc vendor/conv-0.3.3/src/macros.rs index 000000000,000000000..f0609ecb1 new file mode 100644 --- /dev/null +++ b/vendor/conv-0.3.3/src/macros.rs @@@ -1,0 -1,0 +1,148 @@@ ++/*! ++This module provides convenience macros to help with implementing the conversion traits. ++ ++# `TryFrom!` ++ ++```ignore ++macro_rules! TryFrom { ++ (($target:ty) $enum:item) => { ... }; ++} ++``` ++ ++This macro attempts to derive an implementation of the [`TryFrom`](../trait.TryFrom.html) trait. Specifically, it supports `enum`s consisting entirely of unitary variants, with or without explicit values. The source type can be any integer type which the variants of the enumeration can be explicitly cast to (*i.e.* using `as`). ++ ++If a conversion fails (due to there being no matching variant for the specified integer value `src`), then the conversion returns `Err(Unrepresentable(src))` (see [`Unrepresentable`](../errors/struct.Unrepresentable.html)). ++ ++It is compatible with the [`custom_derive!`](https://crates.io/crates/custom_derive) macro. ++ ++## Example ++ ++Using `custom_derive!`: ++ ++``` ++#[macro_use] extern crate conv; ++#[macro_use] extern crate custom_derive; ++ ++custom_derive! { ++ #[derive(Debug, PartialEq, TryFrom(i32))] ++ enum Colours { ++ Red = 0, ++ Green = 5, ++ Blue ++ } ++} ++ ++fn main() { ++ use conv::{TryFrom, Unrepresentable}; ++ ++ assert_eq!(Colours::try_from(0), Ok(Colours::Red)); ++ assert_eq!(Colours::try_from(1), Err(Unrepresentable(1))); ++ assert_eq!(Colours::try_from(5), Ok(Colours::Green)); ++ assert_eq!(Colours::try_from(6), Ok(Colours::Blue)); ++ assert_eq!(Colours::try_from(7), Err(Unrepresentable(7))); ++} ++``` ++ ++The above is equivalent to the following: ++ ++``` ++#[macro_use] extern crate conv; ++ ++#[derive(Debug, PartialEq)] ++enum Colours { ++ Red = 0, ++ Green = 5, ++ Blue ++} ++ ++TryFrom! { (i32) enum Colours { ++ Red = 0, ++ Green = 5, ++ Blue ++} } ++# fn main() {} ++``` ++*/ ++ ++/** ++See the documentation for the [`macros`](./macros/index.html#tryfrom!) module for details. ++*/ ++#[macro_export] ++macro_rules! TryFrom { ++ (($prim:ty) $(pub)* enum $name:ident { $($body:tt)* }) => { ++ TryFrom! { ++ @collect_variants ($name, $prim), ++ ($($body)*,) -> () ++ } ++ }; ++ ++ ( ++ @collect_variants ($name:ident, $prim:ty), ++ ($(,)*) -> ($($var_names:ident,)*) ++ ) => { ++ impl $crate::TryFrom<$prim> for $name { ++ type Err = $crate::errors::Unrepresentable<$prim>; ++ fn try_from(src: $prim) -> Result<$name, Self::Err> { ++ $( ++ if src == $name::$var_names as $prim { ++ return Ok($name::$var_names); ++ } ++ )* ++ Err($crate::errors::Unrepresentable(src)) ++ } ++ } ++ }; ++ ++ ( ++ @collect_variants $fixed:tt, ++ (#[$_attr:meta] $($tail:tt)*) -> $var_names:tt ++ ) => { ++ TryFrom! { ++ @skip_meta $fixed, ++ ($($tail)*) -> $var_names ++ } ++ }; ++ ++ ( ++ @collect_variants $fixed:tt, ++ ($var:ident $(= $_val:expr)*, $($tail:tt)*) -> ($($var_names:tt)*) ++ ) => { ++ TryFrom! { ++ @collect_variants $fixed, ++ ($($tail)*) -> ($($var_names)* $var,) ++ } ++ }; ++ ++ ( ++ @collect_variants ($name:ident), ++ ($var:ident $_struct:tt, $($tail:tt)*) -> ($($var_names:tt)*) ++ ) => { ++ const _error: () = concat!( ++ "cannot derive TryFrom for ", ++ stringify!($name), ++ ", due to non-unitary variant ", ++ stringify!($var), ++ "." ++ ); ++ }; ++ ++ ( ++ @skip_meta $fixed:tt, ++ (#[$_attr:meta] $($tail:tt)*) -> $var_names:tt ++ ) => { ++ TryFrom! { ++ @skip_meta $fixed, ++ ($($tail)*) -> $var_names ++ } ++ }; ++ ++ ( ++ @skip_meta $fixed:tt, ++ ($var:ident $($tail:tt)*) -> $var_names:tt ++ ) => { ++ TryFrom! { ++ @collect_variants $fixed, ++ ($var $($tail)*) -> $var_names ++ } ++ }; ++} diff --cc vendor/conv-0.3.3/src/misc.rs index 000000000,000000000..db80a532c new file mode 100644 --- /dev/null +++ b/vendor/conv-0.3.3/src/misc.rs @@@ -1,0 -1,0 +1,71 @@@ ++/*! ++This module defines some additional traits not *directly* tied to conversions. ++*/ ++ ++/** ++This trait indicates that values of a type can be logically "saturated". ++ ++This is used by the `errors::UnwrapOrSaturate` extension trait. ++*/ ++pub trait Saturated { ++ /// Returns the type's saturated, maximum value. ++ fn saturated_max() -> Self; ++ ++ /// Returns the type's saturated, minimum value. ++ fn saturated_min() -> Self; ++} ++ ++item_for_each! { ++ (i8), (i16), (i32), (i64), (u8), (u16), (u32), (u64), (isize), (usize) => { ++ ($ity:ident) => { ++ impl Saturated for $ity { ++ #[inline] fn saturated_max() -> Self { ::std::$ity::MAX } ++ #[inline] fn saturated_min() -> Self { ::std::$ity::MIN } ++ } ++ }; ++ } ++} ++ ++/** ++This trait indicates that a type has an "invalid" sentinel value. ++ ++This is used by the `errors::UnwrapOrInvalid` extension trait. ++*/ ++pub trait InvalidSentinel { ++ /// Returns the type's "invalid" sentinel value. ++ fn invalid_sentinel() -> Self; ++} ++ ++item_for_each! { ++ (f32), (f64) => { ++ ($ity:ident) => { ++ impl InvalidSentinel for $ity { ++ #[inline] fn invalid_sentinel() -> Self { ::std::$ity::NAN } ++ } ++ }; ++ } ++} ++ ++/** ++This trait indicates that a type has positive and negative "infinity" values. ++ ++This is used by the `errors::UnwrapOrInf` extension trait. ++*/ ++pub trait SignedInfinity { ++ /// Returns the type's positive infinity value. ++ fn neg_infinity() -> Self; ++ ++ /// Returns the type's negative infinity value. ++ fn pos_infinity() -> Self; ++} ++ ++item_for_each! { ++ (f32), (f64) => { ++ ($ity:ident) => { ++ impl SignedInfinity for $ity { ++ #[inline] fn neg_infinity() -> Self { ::std::$ity::NEG_INFINITY } ++ #[inline] fn pos_infinity() -> Self { ::std::$ity::INFINITY } ++ } ++ }; ++ } ++} diff --cc vendor/conv-0.3.3/tests/conv_utils.rs index 000000000,000000000..3444ab318 new file mode 100644 --- /dev/null +++ b/vendor/conv-0.3.3/tests/conv_utils.rs @@@ -1,0 -1,0 +1,40 @@@ ++#[macro_use] extern crate conv; ++ ++use conv::prelude::*; ++ ++#[test] ++fn test_approx() { ++ use conv::DefaultApprox; ++ assert_eq!((1.5f32).approx(), Ok(1i32)); ++ assert_eq!((1.5f32).approx_by::(), Ok(1)); ++ assert_eq!((1.5f32).approx_as::(), Ok(1)); ++ assert_eq!((1.5f32).approx_as_by::(), Ok(1)); ++} ++ ++#[test] ++fn test_into() { ++ let v = "ABC".into_as::>(); ++ assert_eq!(&*v, &[0x41, 0x42, 0x43]); ++} ++ ++#[test] ++fn test_try() { ++ #[derive(PartialEq, Debug)] enum ItAintRight { BabeNo, NoNo } ++ TryFrom! { (u8) enum ItAintRight { BabeNo, NoNo } } ++ ++ assert_eq!(0u8.try_as::(), Ok(ItAintRight::BabeNo)); ++ assert_eq!(1u8.try_as::(), Ok(ItAintRight::NoNo)); ++ assert_eq!(2u8.try_as::(), Err(conv::Unrepresentable(2))); ++} ++ ++#[test] ++fn test_value() { ++ assert_eq!((123u32).value_as::(), Ok(123)); ++} ++ ++#[test] ++fn test_whizzo() { ++ use conv::errors::Unrepresentable; ++ assert_eq!((-1.0f32).approx_as::().saturate(), Ok::<_, Unrepresentable<_>>(0u8)); ++ assert_eq!((-1i32).value_as::().saturate().unwrap_ok(), 0u8); ++} diff --cc vendor/conv-0.3.3/tests/derive_try_from.rs index 000000000,000000000..f8e0c781f new file mode 100644 --- /dev/null +++ b/vendor/conv-0.3.3/tests/derive_try_from.rs @@@ -1,0 -1,0 +1,45 @@@ ++#[macro_use] extern crate conv; ++ ++use conv::{TryFrom, Unrepresentable}; ++ ++#[derive(Debug, PartialEq)] ++enum Get { Up, Down, AllAround } ++ ++TryFrom! { (u8) ++ enum Get { ++ Up, ++ /// And ++ Down, ++ /** And */ ++ AllAround ++ } ++} ++ ++#[derive(Debug, PartialEq)] ++enum GottaGo { GetAway, Fast = 9000, Faster = 9001 } ++ ++TryFrom! { (u16) ++ enum GottaGo { ++ GetAway, ++ Fast = 9000, ++ /// This show was stupid. ++ Faster = 9001 ++ } ++} ++ ++#[test] ++fn test_try_from() { ++ assert_eq!(Get::try_from(0u8), Ok(Get::Up)); ++ assert_eq!(Get::try_from(1u8), Ok(Get::Down)); ++ assert_eq!(Get::try_from(2u8), Ok(Get::AllAround)); ++ assert_eq!(Get::try_from(3u8), Err(Unrepresentable(3u8))); ++ ++ assert_eq!(GottaGo::try_from(0u16), Ok(GottaGo::GetAway)); ++ assert_eq!(GottaGo::try_from(1u16), Err(Unrepresentable(1u16))); ++ assert_eq!(GottaGo::try_from(2u16), Err(Unrepresentable(2u16))); ++ assert_eq!(GottaGo::try_from(3u16), Err(Unrepresentable(3u16))); ++ assert_eq!(GottaGo::try_from(8999u16), Err(Unrepresentable(8999u16))); ++ assert_eq!(GottaGo::try_from(9000u16), Ok(GottaGo::Fast)); ++ assert_eq!(GottaGo::try_from(9001u16), Ok(GottaGo::Faster)); ++ assert_eq!(GottaGo::try_from(9002u16), Err(Unrepresentable(9002u16))); ++} diff --cc vendor/conv-0.3.3/tests/lang_char.rs index 000000000,000000000..88a932074 new file mode 100644 --- /dev/null +++ b/vendor/conv-0.3.3/tests/lang_char.rs @@@ -1,0 -1,0 +1,121 @@@ ++extern crate conv; ++ ++#[macro_use] mod util; ++ ++use conv::*; ++ ++use conv::PosOverflow as Of; ++use conv::Unrepresentable as Ur; ++ ++macro_rules! check { ++ (@ $from:ty, $to:ty=> $(;)*) => {}; ++ ++ (@ $from:ty, $to:ty=> try cident; $($tail:tt)*) => { ++ check!(@ $from, $to=> try v: '\x00';); ++ check!(@ $from, $to=> try v: '\x01';); ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> try uident; $($tail:tt)*) => { ++ check!(@ $from, $to=> try v: 0;); ++ check!(@ $from, $to=> try v: 1;); ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> try v: $src:expr, !$dst:expr; $($tail:tt)*) => { ++ { ++ let src: $from = $src; ++ let dst: Result<$to, _> = src.try_into(); ++ assert_eq!(dst, Err($dst(src))); ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> try v: $src:expr; $($tail:tt)*) => { ++ { ++ let src: $from = $src; ++ let dst: Result<$to, _> = src.try_into(); ++ assert_eq!(dst, Ok($src as $to)); ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> qt: *; $($tail:tt)*) => { ++ { ++ extern crate quickcheck; ++ ++ fn property(v: $from) -> bool { ++ let dst: Result<$to, _> = v.try_into(); ++ dst == Ok(v as $to) ++ } ++ ++ let mut qc = quickcheck::QuickCheck::new(); ++ match qc.quicktest(property as fn($from) -> bool) { ++ Ok(_) => (), ++ Err(err) => panic!("qv {:?}", err) ++ } ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ ($from:ty, $to:ty=> $($tail:tt)*) => { ++ check! { @ $from, $to=> $($tail)*; } ++ }; ++} ++ ++#[test] ++fn test_i_to_c() { ++ check!(u8, char => try uident; qt: *); ++ ++ /* ++ `char` is a pain because `u8` is the *only* type you can cast directly from. So, the `check!` macro is *basically useless*. ++ ++ Also, `char` has a great big hole in the middle, which makes things more interesting. ++ ++ Instead, we're just going to make sure that the conversions *exist* and have the expected error type. ++ */ ++ macro_rules! check_i_to_c { ++ ($($ts:ty),* $(,)*) => { ++ $( ++ { ++ let v: $ts = 0; ++ let r: Result> = TryFrom::try_from(v); ++ assert_eq!(r, Ok('\x00')); ++ } ++ )* ++ }; ++ } ++ check_i_to_c!(i8, i16, i32, i64, isize, u16, u32, u64, usize); ++} ++ ++#[test] ++fn test_c_to_i() { ++ check!(char, i8=> try cident; ++ try v: '\u{80}', !Of; ++ ); ++ check!(char, i16=> try cident; ++ try v: '\u{8000}', !Of; ++ ); ++ check!(char, i32=> try cident;); ++ check!(char, i64=> try cident;); ++ check!(char, u8=> try cident; ++ try v: '\u{100}', !Of; ++ ); ++ check!(char, u16=> try cident; ++ try v: '\u{10000}', !Of; ++ ); ++ check!(char, u32=> try cident;); ++ check!(char, u64=> try cident;); ++ for_bitness! { ++ 32 { ++ check!(char, isize=> try cident; ++ try v: '\u{10ffff}'; ++ ); ++ check!(char, usize=> try cident;); ++ } ++ 64 { ++ check!(char, i64=> try cident;); ++ check!(char, u64=> try cident;); ++ } ++ } ++} diff --cc vendor/conv-0.3.3/tests/lang_floats.rs index 000000000,000000000..9ec1a416d new file mode 100644 --- /dev/null +++ b/vendor/conv-0.3.3/tests/lang_floats.rs @@@ -1,0 -1,0 +1,57 @@@ ++extern crate conv; ++ ++#[macro_use] mod util; ++ ++use conv::*; ++ ++use conv::FloatError::NegOverflow as FU; ++use conv::FloatError::PosOverflow as FO; ++ ++#[test] ++fn test_f32() { ++ check!(f32, f32=> fident; qv: *;); ++ check!(f32, f64=> fident; qv: *;); ++} ++ ++#[test] ++fn test_f32_to_int() { ++ check!(f32, i8=> sidenta; qa: i8=> a: -129.0, !FU; a: 128.0, !FO;); ++ check!(f32, i16=> sidenta; qa: i16=> a: -32_769.0, !FU; a: 32_768.0, !FO;); ++ check!(f32, i32=> sidenta; qa: i32=> ++ a: -2.1474836e9, -2147483648; a: 2.1474835e9, 2147483520; ++ a: -2_147_500_000.0, !FU; a: 2_147_500_000.0, !FO;); ++ check!(f32, i64=> sidenta; qa: i64=> ++ a: -9.223372e18, -9223372036854775808; a: 9.2233715e18, 9223371487098961920; ++ a: -9_223_373_000_000_000_000.0, !FU; a: 9_223_373_000_000_000_000.0, !FO;); ++ check!(f32, u8=> uidenta; qa: u8=> a: -1.0, !FU; a: 256.0, !FO;); ++ check!(f32, u16=> uidenta; qa: u16=> a: -1.0, !FU; a: 65_536.0, !FO;); ++ check!(f32, u32=> uidenta; qa: u32=> ++ a: 4.294967e9, 4294967040; ++ a: -1.0, !FU; a: 4_294_968_000.0, !FO;); ++ check!(f32, u64=> uidenta; qa: u64=> ++ a: 1.8446743e19, 18446742974197923840; ++ a: -1.0, !FU; a: 18_446_746_000_000_000_000.0, !FO;); ++} ++ ++#[test] ++fn test_f64_to_int() { ++ check!(f64, i8=> sidenta; qa: i8=> a: -129.0, !FU; a: 128.0, !FO;); ++ check!(f64, i16=> sidenta; qa: i16=> a: -32_769.0, !FU; a: 32_768.0, !FO;); ++ check!(f64, i32=> sidenta; qa: i32=> a: -2_147_483_649.0, !FU; a: 2_147_483_648.0, !FO;); ++ check!(f64, i64=> sidenta; qa: i64=> ++ a: -9.223372036854776e18, -9223372036854775808; ++ a: 9.223372036854775e18, 9223372036854774784; ++ a: -9_223_372_036_854_778_000.0, !FU; a: 9_223_372_036_854_778_000.0, !FO;); ++ check!(f64, u8=> uidenta; qa: u8=> a: -1.0, !FU; a: 256.0, !FO;); ++ check!(f64, u16=> uidenta; qa: u16=> a: -1.0, !FU; a: 65_536.0, !FO;); ++ check!(f64, u32=> uidenta; qa: u32=> a: -1.0, !FU; a: 4_294_967_296.0, !FO;); ++ check!(f64, u64=> uidenta; qa: u64=> ++ a: 1.844674407370955e19; ++ a: -1.0, !FU; a: 18_446_744_073_709_560_000.0, !FO;); ++} ++ ++#[test] ++fn test_f64() { ++ check!(f64, f32=> fidenta; qa: *;); ++ check!(f64, f64=> fident; qv: *;); ++} diff --cc vendor/conv-0.3.3/tests/lang_ints.rs index 000000000,000000000..f8f63a7ca new file mode 100644 --- /dev/null +++ b/vendor/conv-0.3.3/tests/lang_ints.rs @@@ -1,0 -1,0 +1,395 @@@ ++extern crate conv; ++ ++#[macro_use] mod util; ++ ++use conv::*; ++ ++use conv::NegOverflow as Uf; ++use conv::PosOverflow as Of; ++use conv::RangeError::NegOverflow as RU; ++use conv::RangeError::PosOverflow as RO; ++ ++#[test] ++fn test_i8() { ++ check!(i8, i8=> sident; qv: *; qa: *; qaW: *); ++ check!(i8, i16=> sident; qv: *; qa: *; qaW: *); ++ check!(i8, i32=> sident; qv: *; qa: *; qaW: *); ++ check!(i8, i64=> sident; qv: *; qa: *; qaW: *); ++ check!(i8, u8=> uident; qv: +; qa: +; qaW: *; ++ v: -1, !Uf; ++ ); ++ check!(i8, u16=> uident; qv: +; qa: +; qaW: *; ++ v: -1, !Uf; ++ ); ++ check!(i8, u32=> uident; qv: +; qa: +; qaW: *; ++ v: -1, !Uf; ++ ); ++ check!(i8, u64=> uident; qv: +; qa: +; qaW: *; ++ v: -1, !Uf; ++ ); ++ check!(i8, isize=> sident; qv: *; qa: *; qaW: *); ++ check!(i8, usize=> uident; qv: +; qa: +; qaW: *; ++ v: -1, !Uf; ++ ); ++} ++ ++#[test] ++fn test_i16() { ++ check!(i16, i8=> sident; qv: i8=> qa: i8=> qaW: *; ++ v: -129, !RU; v: 128, !RO; ++ ); ++ check!(i16, i16=> sident; qv: *; qa: *; qaW: *); ++ check!(i16, i32=> sident; qv: *; qa: *; qaW: *); ++ check!(i16, i64=> sident; qv: *; qa: *; qaW: *); ++ check!(i16, u8=> uident; qv: u8=> qa: +; qaW: *; ++ v: -1, !RU; ++ ); ++ check!(i16, u16=> uident; qv: u16, i16=> qa: +; qaW: *; ++ v: -1, !Uf; ++ ); ++ check!(i16, u32=> uident; qv: +; qa: +; qaW: *; ++ v: -1, !Uf; ++ ); ++ check!(i16, u64=> uident; qv: +; qa: +; qaW: *; ++ v: -1, !Uf; ++ ); ++ check!(i16, isize=> sident; qv: *; qa: *; qaW: *); ++ check!(i16, usize=> uident; qv: +; qa: +; qaW: *; ++ v: -1, !Uf; ++ ); ++} ++ ++#[test] ++fn test_i32() { ++ check!(i32, i8=> sident; qv: i8=> qa: i8=> qaW: *; ++ v: -129, !RU; v: 128, !RO; ++ ); ++ check!(i32, i16=> sident; qv: i16=> qa: i16=> qaW: *; ++ v: -32_769, !RU; v: 32_768, !RO; ++ ); ++ check!(i32, i32=> sident; qv: *; qa: *; qaW: *); ++ check!(i32, i64=> sident; qv: *; qa: *; qaW: *); ++ check!(i32, u8=> uident; qv: u8=> qa: u8=> qaW: *; ++ v: -1, !RU; ++ ); ++ check!(i32, u16=> uident; qv: u16=> qa: u16=> qaW: *; ++ v: -1, !RU; ++ ); ++ check!(i32, u32=> uident; qv: +; qa: +; qaW: *; ++ v: -1, !Uf; ++ ); ++ check!(i32, u64=> uident; qv: +; qa: +; qaW: *; ++ v: -1, !Uf; ++ ); ++ for_bitness! { ++ 32 { ++ check!(i32, isize=> sident; qv: *; qa: *; qaW: *); ++ check!(i32, usize=> uident; qv: +; qa: +; qaW: *; ++ v: -1, !Uf; ++ ); ++ } ++ 64 { ++ check!(i32, isize=> sident; qv: *; qa: *; qaW: *); ++ check!(i32, usize=> uident; qv: +; qa: +; qaW: *; ++ v: -1, !Uf; ++ ); ++ } ++ } ++} ++ ++#[test] ++fn test_i64() { ++ check!(i64, i8=> sident; qv: i8=> qa: i8=> qaW: *; ++ v: -129, !RU; v: 128, !RO; ++ ); ++ check!(i64, i16=> sident; qv: i16=> qa: i16=> qaW: *; ++ v: -32_769, !RU; v: 32_768, !RO; ++ ); ++ check!(i64, i32=> sident; qv: i32=> qa: i32=> qaW: *; ++ v: -2_147_483_649, !RU; v: 2_147_483_648, !RO; ++ ); ++ check!(i64, i64=> sident; qv: *; qa: *; qaW: *; ++ ); ++ check!(i64, u8=> uident; qv: u8=> qa: u8=> qaW: *; ++ v: -1, !RU; ++ ); ++ check!(i64, u16=> uident; qv: u16=> qa: u16=> qaW: *; ++ v: -1, !RU; ++ ); ++ check!(i64, u32=> uident; qv: u32=> qa: u32=> qaW: *; ++ v: -1, !RU; ++ ); ++ check!(i64, u64=> uident; qv: +; qa: +; qaW: *; ++ v: -1, !Uf; ++ ); ++ for_bitness! { ++ 32 { ++ check!(i64, isize=> sident; qv: isize=> qa: isize=> qaW: *; ++ v: -2_147_483_649, !RU; v: 2_147_483_648, !RO; ++ ); ++ check!(i64, usize=> uident; qv: usize=> qa: usize=> qaW: *; ++ v: -1, !RU; v: 4_294_967_296, !RO; ++ ); ++ } ++ 64 { ++ check!(i64, isize=> sident; qv: *; qa: *; qaW: *; ++ ); ++ check!(i64, usize=> uident; qv: +; qa: +; qaW: *; ++ v: -1, !Uf; ++ ); ++ } ++ } ++} ++ ++#[test] ++fn test_u8() { ++ check!(u8, i8=> uident; qv: +i8=> qa: +i8=> qaW: *; ++ v: 127; v: 128, !Of; ++ ); ++ check!(u8, i16=> uident; qv: *; qa: *; qaW: *); ++ check!(u8, i32=> uident; qv: *; qa: *; qaW: *); ++ check!(u8, i64=> uident; qv: *; qa: *; qaW: *); ++ check!(u8, u8=> uident; qv: *; qa: *; qaW: *); ++ check!(u8, u16=> uident; qv: *; qa: *; qaW: *); ++ check!(u8, u32=> uident; qv: *; qa: *; qaW: *); ++ check!(u8, u64=> uident; qv: *; qa: *; qaW: *); ++ check!(u8, isize=> uident; qv: *; qa: *; qaW: *); ++ check!(u8, usize=> uident; qv: *; qa: *; qaW: *); ++} ++ ++#[test] ++fn test_u16() { ++ check!(u16, i8=> uident; qv: +i8=> qa: +i8=> qaW: *; ++ v: 128, !Of; ++ ); ++ check!(u16, i16=> uident; qv: +i16=> qa: +i16=> qaW: *; ++ v: 32_768, !Of; ++ ); ++ check!(u16, i32=> uident; qv: *; qa: *; qaW: *); ++ check!(u16, i64=> uident; qv: *; qa: *; qaW: *); ++ check!(u16, u8=> uident; qv: u8=> qa: u8=> qaW: *; ++ v: 256, !Of; ++ ); ++ check!(u16, u16=> uident; qv: *; qa: *; qaW: *); ++ check!(u16, u32=> uident; qv: *; qa: *; qaW: *); ++ check!(u16, u64=> uident; qv: *; qa: *; qaW: *); ++ check!(u16, isize=> uident; qv: *; qa: *; qaW: *); ++ check!(u16, usize=> uident; qv: *; qa: *; qaW: *); ++} ++ ++#[test] ++fn test_u32() { ++ check!(u32, i8=> uident; qv: +i8=> qa: +i8=> qaW: *; ++ v: 128, !Of; ++ ); ++ check!(u32, i16=> uident; qv: +i16=> qa: +i16=> qaW: *; ++ v: 32_768, !Of; ++ ); ++ check!(u32, i32=> uident; qv: +i32=> qa: +i32=> qaW: *; ++ v: 2_147_483_648, !Of; ++ ); ++ check!(u32, i64=> uident; qv: *; qa: *; qaW: *); ++ check!(u32, u8=> uident; qv: u8=> qa: u8=> qaW: *; ++ v: 256, !Of; ++ ); ++ check!(u32, u16=> uident; qv: u16=> qa: u16=> qaW: *; ++ v: 65_536, !Of; ++ ); ++ check!(u32, u32=> uident; qv: *; qa: *; qaW: *); ++ check!(u32, u64=> uident; qv: *; qa: *; qaW: *); ++ for_bitness! { ++ 32 { ++ check!(u32, isize=> uident; qv: +isize=> qa: +isize=> qaW: *; ++ v: 2_147_483_647; v: 2_147_483_648, !Of; ++ ); ++ check!(u32, usize=> uident; qv: *; qa: *; qaW: *); ++ } ++ 64 { ++ check!(u32, isize=> uident; qv: *; qa: *; qaW: *); ++ check!(u32, usize=> uident; qv: *; qa: *; qaW: *); ++ } ++ } ++} ++ ++#[test] ++fn test_u64() { ++ check!(u64, i8=> uident; qv: +i8=> qa: +i8=> qaW: *; ++ v: 128, !Of; ++ ); ++ check!(u64, i16=> uident; qv: +i16=> qa: +i16=> qaW: *; ++ v: 32_768, !Of; ++ ); ++ check!(u64, i32=> uident; qv: +i32=> qa: +i32=> qaW: *; ++ v: 2_147_483_648, !Of; ++ ); ++ check!(u64, i64=> uident; qv: +i64=> qa: +i64=> qaW: *; ++ v: 9_223_372_036_854_775_808, !Of; ++ ); ++ check!(u64, u8=> uident; qv: u8=> qa: u8=> qaW: *; ++ v: 256, !Of; ++ ); ++ check!(u64, u16=> uident; qv: u16=> qa: u16=> qaW: *; ++ v: 65_536, !Of; ++ ); ++ check!(u64, u32=> uident; qv: u32=> qa: u32=> qaW: *; ++ v: 4_294_967_296, !Of; ++ ); ++ check!(u64, u64=> uident; qv: *; qa: *; qaW: *); ++ for_bitness! { ++ 32 { ++ check!(u64, isize=> uident; qv: +isize=> qa: +isize=> qaW: *; ++ v: 2_147_483_648, !Of; ++ ); ++ check!(u64, usize=> uident; qv: usize=> qa: usize=> qaW: *; ++ v: 4_294_967_296, !Of; ++ ); ++ } ++ 64 { ++ check!(u64, isize=> uident; qv: +i64=> qa: +i64=> qaW: *; ++ v: 9_223_372_036_854_775_808, !Of; ++ ); ++ check!(u64, usize=> uident; qv: *; qa: *; qaW: *); ++ } ++ } ++} ++ ++#[test] ++fn test_isize() { ++ check!(isize, i8=> sident; qv: i8=> qa: i8=> qaW: *; ++ v: -129, !RU; v: 128, !RO; ++ ); ++ check!(isize, i16=> sident; qv: i16=> qa: i16=> qaW: *; ++ v: -32_769, !RU; v: 32_768, !RO; ++ ); ++ check!(isize, u8=> uident; qv: u8=> qa: u8=> qaW: *; ++ v: -1, !RU; v: 256, !RO; ++ ); ++ check!(isize, u16=> uident; qv: u16=> qa: u16=> qaW: *; ++ v: -1, !RU; v: 65_536, !RO; ++ ); ++ check!(isize, isize=> sident; qv: *; qa: *; qaW: *); ++ for_bitness! { ++ 32 { ++ check!(isize, i32=> sident; qv: *; qa: *; qaW: *); ++ check!(isize, i64=> sident; qv: *; qa: *; qaW: *); ++ check!(isize, u32=> uident; qv: +; qa: +; qaW: *; ++ v: -1, !Uf; ++ ); ++ check!(isize, u64=> uident; qv: +; qa: +; qaW: *; ++ v: -1, !Uf; ++ ); ++ check!(isize, usize=> uident; qv: +; qa: +; qaW: *; ++ v: -1, !Uf; ++ ); ++ } ++ 64 { ++ check!(isize, i32=> sident; qv: *; qa: *; qaW: *); ++ check!(isize, i64=> sident; qv: *; qa: *; qaW: *); ++ check!(isize, u32=> uident; qv: u32=> qa: u32=> qaW: *; ++ v: -1, !RU; v: 4_294_967_296, !RO; ++ ); ++ check!(isize, u64=> uident; qv: +; qa: +; qaW: *; ++ v: -1, !Uf; ++ ); ++ check!(isize, usize=> uident; qv: +; qa: +; qaW: *; ++ v: -1, !Uf; ++ ); ++ } ++ } ++} ++ ++#[test] ++fn test_usize() { ++ check!(usize, i8=> uident; qv: +i8=> qa: +i8=> qaW: *; ++ v: 128, !Of; ++ ); ++ check!(usize, i16=> uident; qv: +i16=> qa: +i16=> qaW: *; ++ v: 32_768, !Of; ++ ); ++ check!(usize, u8=> uident; qv: u8=> qa: u8=> qaW: *; ++ v: 256, !Of; ++ ); ++ check!(usize, u16=> uident; qv: u16=> qa: u16=> qaW: *; ++ v: 65_536, !Of; ++ ); ++ check!(usize, usize=> uident; qv: *; qa: *; qaW: *); ++ for_bitness! { ++ 32 { ++ check!(usize, i32=> uident; qv: +i32=> qa: +i32=> qaW: *); ++ check!(usize, i64=> uident; qv: *; qa: *; qaW: *); ++ check!(usize, u32=> uident; qv: *; qa: *; qaW: *); ++ check!(usize, u64=> uident; qv: *; qa: *; qaW: *); ++ check!(usize, isize=> uident; qv: +isize=> qa: +isize=> qaW: *); ++ } ++ 64 { ++ check!(usize, i32=> uident; qv: +i32=> qa: +i32=> qaW: *); ++ check!(usize, i64=> uident; qv: +i64=> qa: +i64=> qaW: *); ++ check!(usize, u32=> uident; qv: u32=> qa: u32=> qaW: *; ++ v: 4_294_967_296, !Of; ++ ); ++ check!(usize, u64=> uident; qv: *; qa: *; qaW: *); ++ check!(usize, isize=> uident; qv: +isize=> qa: +isize=> qaW: *); ++ } ++ } ++} ++ ++#[test] ++fn test_i_to_f() { ++ check!(i8, f32=> sident; qv: *; qa: *); ++ check!(i16, f32=> sident; qv: *; qa: *); ++ check!(i32, f32=> sident; qv: (+-16_777_216); qa: *; ++ v: -16_777_217, !RU; v: 16_777_217, !RO; ++ ); ++ check!(i64, f32=> sident; qv: (+-16_777_216); qa: *; ++ v: -16_777_217, !RU; v: 16_777_217, !RO; ++ ); ++ check!(isize, f32=> sident; qv: (+-16_777_216); qa: *; ++ v: -16_777_217, !RU; v: 16_777_217, !RO; ++ ); ++ ++ check!(u8, f32=> uident; qv: *; qa: *); ++ check!(u16, f32=> uident; qv: *; qa: *); ++ check!(u32, f32=> uident; qv: (, 16_777_216); qa: *; ++ v: 16_777_217, !Of; ++ ); ++ check!(u64, f32=> uident; qv: (, 16_777_216); qa: *; ++ v: 16_777_217, !Of; ++ ); ++ check!(usize, f32=> uident; qv: (, 16_777_216); qa: *; ++ v: 16_777_217, !Of; ++ ); ++ ++ check!(i8, f64=> sident; qv: *; qa: *); ++ check!(i16, f64=> sident; qv: *; qa: *); ++ check!(i32, f64=> sident; qv: *; qa: *); ++ check!(i64, f64=> sident; qv: (+-9_007_199_254_740_992); qa: *; ++ v: -9_007_199_254_740_993, !RU; v: 9_007_199_254_740_993, !RO; ++ ); ++ for_bitness! { ++ 32 { ++ check!(isize, f64=> sident; qv: *; qa: *); ++ } ++ 64 { ++ check!(i64, f64=> sident; qv: (+-9_007_199_254_740_992); qa: *; ++ v: -9_007_199_254_740_993, !RU; v: 9_007_199_254_740_993, !RO; ++ ); ++ } ++ } ++ ++ check!(u8, f64=> uident; qv: *; qa: *); ++ check!(u16, f64=> uident; qv: *; qa: *); ++ check!(u32, f64=> uident; qv: *; qa: *); ++ check!(u64, f64=> uident; qv: (, 9_007_199_254_740_992); qa: *; ++ v: 9_007_199_254_740_993, !Of; ++ ); ++ for_bitness! { ++ 32 { ++ check!(usize, f64=> uident; qv: *; qa: *); ++ } ++ 64 { ++ check!(u64, f64=> uident; qv: (, 9_007_199_254_740_992); qa: *; ++ v: 9_007_199_254_740_993, !Of; ++ ); ++ } ++ } ++} diff --cc vendor/conv-0.3.3/tests/unwraps.rs index 000000000,000000000..921dec7c4 new file mode 100644 --- /dev/null +++ b/vendor/conv-0.3.3/tests/unwraps.rs @@@ -1,0 -1,0 +1,31 @@@ ++extern crate conv; ++ ++#[macro_use] mod util; ++ ++use conv::*; ++ ++macro_rules! cty { ++ ($e:expr, $t:ty) => { ++ { let v: $t = $e; v } ++ }; ++} ++ ++#[test] ++fn test_unwraps() { ++ assert_eq!(cty!(0i16.value_into().unwrap(), i32), 0); ++ assert_eq!(cty!(127i16.value_into().unwrap(), i8), 127); ++ assert_eq!(cty!(128i16.value_into().unwrap_or_saturate(), i8), 127); ++ assert_eq!(cty!(128i16.approx().unwrap_or_saturate(), i8), 127); ++ assert_eq!(cty!(128i16.approx_by::().unwrap_or_saturate(), i8), -128); ++ ++ assert_eq!(cty!(16_777_216i32.value_into().unwrap(), f32), 16_777_216.0); ++ assert_eq!(cty!(16_777_216i32.value_into().unwrap_or_inf(), f32), 16_777_216.0); ++ assert_eq!(cty!(16_777_217i32.value_into().unwrap_or_inf(), f32), std::f32::INFINITY); ++ assert_eq!(cty!((-16_777_217i32).value_into().unwrap_or_inf(), f32), std::f32::NEG_INFINITY); ++ ++ assert_eq!(cty!(16_777_216i32.value_into().unwrap_or_invalid(), f32), 16_777_216.0); ++ assert!(cty!(16_777_217i32.value_into().unwrap_or_invalid(), f32).is_nan()); ++ assert!(cty!((-16_777_217i32).value_into().unwrap_or_invalid(), f32).is_nan()); ++ ++ assert_eq!(cty!(0u8.value_into().unwrap_ok(), u16), 0); ++} diff --cc vendor/conv-0.3.3/tests/use_in_generics.rs index 000000000,000000000..9400dacc5 new file mode 100644 --- /dev/null +++ b/vendor/conv-0.3.3/tests/use_in_generics.rs @@@ -1,0 -1,0 +1,14 @@@ ++//! Are conversions easily usable in generic code? ++extern crate conv; ++ ++use conv::prelude::*; ++ ++#[test] ++fn test_generic_unwrap() { ++ fn do_conv(t: T) -> U ++ where T: ValueInto { ++ t.value_into().unwrap() ++ } ++ ++ assert_eq!({let x: u8 = do_conv(42i32); x}, 42u8); ++} diff --cc vendor/conv-0.3.3/tests/util/mod.rs index 000000000,000000000..9bb5abfc5 new file mode 100644 --- /dev/null +++ b/vendor/conv-0.3.3/tests/util/mod.rs @@@ -1,0 -1,0 +1,509 @@@ ++macro_rules! SL { ++ ($($tts:tt)*) => { stringify!($($tts)*) }; ++} ++ ++macro_rules! as_expr { ++ ($e:expr) => {$e}; ++} ++ ++macro_rules! check { ++ (@ $from:ty, $to:ty=> $(;)*) => {}; ++ ++ (@ $from:ty, $to:ty=> cident; $($tail:tt)*) => { ++ check!(@ $from, $to=> v: '\x00';); ++ check!(@ $from, $to=> v: '\x01';); ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> uident; $($tail:tt)*) => { ++ check!(@ $from, $to=> v: 0;); ++ check!(@ $from, $to=> v: 1;); ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> sident; $($tail:tt)*) => { ++ check!(@ $from, $to=> v: -1;); ++ check!(@ $from, $to=> v: 0;); ++ check!(@ $from, $to=> v: 1;); ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> fident; $($tail:tt)*) => { ++ check!(@ $from, $to=> v: -1.0;); ++ check!(@ $from, $to=> v: 0.0;); ++ check!(@ $from, $to=> v: 1.0;); ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> uidenta; $($tail:tt)*) => { ++ check!(@ $from, $to=> a: 0.0;); ++ check!(@ $from, $to=> a: 1.0;); ++ ++ check!(@ $from, $to=> aRTN: 0.00, 0;); ++ check!(@ $from, $to=> aRTN: 0.25, 0;); ++ check!(@ $from, $to=> aRTN: 0.50, 1;); ++ check!(@ $from, $to=> aRTN: 0.75, 1;); ++ check!(@ $from, $to=> aRTN: 1.00, 1;); ++ ++ check!(@ $from, $to=> aRNI: 0.00, 0;); ++ check!(@ $from, $to=> aRNI: 0.25, 0;); ++ check!(@ $from, $to=> aRNI: 0.50, 0;); ++ check!(@ $from, $to=> aRNI: 0.75, 0;); ++ check!(@ $from, $to=> aRNI: 1.00, 1;); ++ ++ check!(@ $from, $to=> aRPI: 0.00, 0;); ++ check!(@ $from, $to=> aRPI: 0.25, 1;); ++ check!(@ $from, $to=> aRPI: 0.50, 1;); ++ check!(@ $from, $to=> aRPI: 0.75, 1;); ++ check!(@ $from, $to=> aRPI: 1.00, 1;); ++ ++ check!(@ $from, $to=> aRTZ: 0.00, 0;); ++ check!(@ $from, $to=> aRTZ: 0.25, 0;); ++ check!(@ $from, $to=> aRTZ: 0.50, 0;); ++ check!(@ $from, $to=> aRTZ: 0.75, 0;); ++ check!(@ $from, $to=> aRTZ: 1.00, 1;); ++ ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> sidenta; $($tail:tt)*) => { ++ check!(@ $from, $to=> a: -1.0;); ++ check!(@ $from, $to=> a: 0.0;); ++ check!(@ $from, $to=> a: 1.0;); ++ ++ check!(@ $from, $to=> aRTN: -1.00, -1;); ++ check!(@ $from, $to=> aRTN: -0.75, -1;); ++ check!(@ $from, $to=> aRTN: -0.50, -1;); ++ check!(@ $from, $to=> aRTN: -0.25, 0;); ++ check!(@ $from, $to=> aRTN: 0.00, 0;); ++ check!(@ $from, $to=> aRTN: 0.25, 0;); ++ check!(@ $from, $to=> aRTN: 0.50, 1;); ++ check!(@ $from, $to=> aRTN: 0.75, 1;); ++ check!(@ $from, $to=> aRTN: 1.00, 1;); ++ ++ check!(@ $from, $to=> aRNI: -1.00, -1;); ++ check!(@ $from, $to=> aRNI: -0.75, -1;); ++ check!(@ $from, $to=> aRNI: -0.50, -1;); ++ check!(@ $from, $to=> aRNI: -0.25, -1;); ++ check!(@ $from, $to=> aRNI: 0.00, 0;); ++ check!(@ $from, $to=> aRNI: 0.25, 0;); ++ check!(@ $from, $to=> aRNI: 0.50, 0;); ++ check!(@ $from, $to=> aRNI: 0.75, 0;); ++ check!(@ $from, $to=> aRNI: 1.00, 1;); ++ ++ check!(@ $from, $to=> aRPI: -1.00, -1;); ++ check!(@ $from, $to=> aRPI: -0.75, 0;); ++ check!(@ $from, $to=> aRPI: -0.50, 0;); ++ check!(@ $from, $to=> aRPI: -0.25, 0;); ++ check!(@ $from, $to=> aRPI: 0.00, 0;); ++ check!(@ $from, $to=> aRPI: 0.25, 1;); ++ check!(@ $from, $to=> aRPI: 0.50, 1;); ++ check!(@ $from, $to=> aRPI: 0.75, 1;); ++ check!(@ $from, $to=> aRPI: 1.00, 1;); ++ ++ check!(@ $from, $to=> aRTZ: -1.00, -1;); ++ check!(@ $from, $to=> aRTZ: -0.75, 0;); ++ check!(@ $from, $to=> aRTZ: -0.50, 0;); ++ check!(@ $from, $to=> aRTZ: -0.25, 0;); ++ check!(@ $from, $to=> aRTZ: 0.00, 0;); ++ check!(@ $from, $to=> aRTZ: 0.25, 0;); ++ check!(@ $from, $to=> aRTZ: 0.50, 0;); ++ check!(@ $from, $to=> aRTZ: 0.75, 0;); ++ check!(@ $from, $to=> aRTZ: 1.00, 1;); ++ ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> fidenta; $($tail:tt)*) => { ++ check!(@ $from, $to=> a: -1.0;); ++ check!(@ $from, $to=> a: 0.0;); ++ check!(@ $from, $to=> a: 1.0;); ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> v: $src:expr, !$dst:expr; $($tail:tt)*) => { ++ { ++ println!("? {} => {}, v: {}, !{}", SL!($from), SL!($to), SL!($src), SL!($dst)); ++ let src: $from = $src; ++ let dst: Result<$to, _> = src.value_into(); ++ assert_eq!(dst, Err($dst(src))); ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> v: $src:expr; $($tail:tt)*) => { ++ { ++ println!("? {} => {}, v: {}", SL!($from), SL!($to), SL!($src)); ++ let src: $from = $src; ++ let dst: Result<$to, _> = src.value_into(); ++ assert_eq!(dst, Ok($src as $to)); ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> qv: *; $($tail:tt)*) => { ++ { ++ extern crate quickcheck; ++ println!("? {} => {}, qv: *", SL!($from), SL!($to)); ++ ++ fn property(v: $from) -> bool { ++ let dst: Result<$to, _> = v.value_into(); ++ dst == Ok(v as $to) ++ } ++ ++ let mut qc = quickcheck::QuickCheck::new(); ++ match qc.quicktest(property as fn($from) -> bool) { ++ Ok(_) => (), ++ Err(err) => panic!("qv {:?}", err) ++ } ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> qv: (+-$bound:expr); $($tail:tt)*) => { ++ { ++ extern crate quickcheck; ++ println!("? {} => {}, qv: (+- {})", SL!($from), SL!($to), SL!($bound)); ++ ++ fn property(v: $from) -> bool { ++ let dst: Result<$to, conv::FloatError<_>> = v.value_into().map_err(From::from); ++ if !(-$bound as $from <= v) { ++ dst == Err(conv::FloatError::NegOverflow(v)) ++ } else if !(v <= $bound as $from) { ++ dst == Err(conv::FloatError::PosOverflow(v)) ++ } else { ++ dst == Ok(v as $to) ++ } ++ } ++ ++ let mut qc = quickcheck::QuickCheck::new(); ++ match qc.quicktest(property as fn($from) -> bool) { ++ Ok(_) => (), ++ Err(err) => panic!("qv {:?}", err) ++ } ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> qv: (, $bound:expr); $($tail:tt)*) => { ++ { ++ extern crate quickcheck; ++ println!("? {} => {}, qv: (, {})", SL!($from), SL!($to), SL!($bound)); ++ ++ fn property(v: $from) -> bool { ++ let dst: Result<$to, conv::FloatError<_>> = v.value_into().map_err(From::from); ++ if !(v <= $bound as $from) { ++ dst == Err(conv::FloatError::PosOverflow(v)) ++ } else { ++ dst == Ok(v as $to) ++ } ++ } ++ ++ let mut qc = quickcheck::QuickCheck::new(); ++ match qc.quicktest(property as fn($from) -> bool) { ++ Ok(_) => (), ++ Err(err) => panic!("qv {:?}", err) ++ } ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> qv: +; $($tail:tt)*) => { ++ { ++ extern crate quickcheck; ++ println!("? {} => {}, qv: +", SL!($from), SL!($to)); ++ ++ fn property(v: $from) -> bool { ++ let dst: Result<$to, conv::FloatError<_>> = v.value_into().map_err(From::from); ++ if !(0 <= v) { ++ dst == Err(conv::FloatError::NegOverflow(v)) ++ } else { ++ dst == Ok(v as $to) ++ } ++ } ++ ++ let mut qc = quickcheck::QuickCheck::new(); ++ match qc.quicktest(property as fn($from) -> bool) { ++ Ok(_) => (), ++ Err(err) => panic!("qv {:?}", err) ++ } ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> qv: +$max:ty=> $($tail:tt)*) => { ++ { ++ extern crate quickcheck; ++ println!("? {} => {}, qv: +{}", SL!($from), SL!($to), SL!($max)); ++ ++ fn property(v: $from) -> bool { ++ let dst: Result<$to, conv::FloatError<_>> = v.value_into().map_err(From::from); ++ if !(v <= <$max>::max_value() as $from) { ++ dst == Err(conv::FloatError::PosOverflow(v)) ++ } else { ++ dst == Ok(v as $to) ++ } ++ } ++ ++ let mut qc = quickcheck::QuickCheck::new(); ++ match qc.quicktest(property as fn($from) -> bool) { ++ Ok(_) => (), ++ Err(err) => panic!("qv {:?}", err) ++ } ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> qv: $bound:ty=> $($tail:tt)*) => { ++ { ++ extern crate quickcheck; ++ println!("? {} => {}, qv: {}", SL!($from), SL!($to), SL!($bound)); ++ ++ fn property(v: $from) -> bool { ++ let dst: Result<$to, conv::FloatError<_>> = v.value_into().map_err(From::from); ++ if !(<$bound>::min_value() as $from <= v) { ++ dst == Err(conv::FloatError::NegOverflow(v)) ++ } else if !(v <= <$bound>::max_value() as $from) { ++ dst == Err(conv::FloatError::PosOverflow(v)) ++ } else { ++ dst == Ok(v as $to) ++ } ++ } ++ ++ let mut qc = quickcheck::QuickCheck::new(); ++ match qc.quicktest(property as fn($from) -> bool) { ++ Ok(_) => (), ++ Err(err) => panic!("qv {:?}", err) ++ } ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> qv: $min:ty, $max:ty=> $($tail:tt)*) => { ++ { ++ extern crate quickcheck; ++ println!("? {} => {}, qv: {}, {}", SL!($from), SL!($to), SL!($min), SL!($max)); ++ ++ fn property(v: $from) -> bool { ++ let dst: Result<$to, conv::FloatError<_>> = v.value_into().map_err(From::from); ++ if !(<$min>::min_value() as $from <= v) { ++ dst == Err(conv::FloatError::NegOverflow(v)) ++ } else if !(v <= <$max>::max_value() as $from) { ++ dst == Err(conv::FloatError::PosOverflow(v)) ++ } else { ++ dst == Ok(v as $to) ++ } ++ } ++ ++ let mut qc = quickcheck::QuickCheck::new(); ++ match qc.quicktest(property as fn($from) -> bool) { ++ Ok(_) => (), ++ Err(err) => panic!("qv {:?}", err) ++ } ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> a: $src:expr, !$dst:expr; $($tail:tt)*) => { ++ { ++ println!("? {} => {}, a: {}, !{}", SL!($from), SL!($to), SL!($src), SL!($dst)); ++ let src: $from = $src; ++ let dst: Result<$to, _> = src.approx_as(); ++ assert_eq!(dst, Err($dst(src))); ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> a: $src:expr, $dst:expr; $($tail:tt)*) => { ++ { ++ println!("? {} => {}, a: {}, {}", SL!($from), SL!($to), SL!($src), SL!($dst)); ++ let src: $from = $src; ++ let dst: Result<$to, _> = src.approx_as(); ++ assert_eq!(dst, Ok($dst)); ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> a: $src:expr; $($tail:tt)*) => { ++ { ++ println!("? {} => {}, a: {}", SL!($from), SL!($to), SL!($src)); ++ let src: $from = $src; ++ let dst: Result<$to, _> = src.approx_as(); ++ assert_eq!(dst, Ok($src as $to)); ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> qa: *; $($tail:tt)*) => { ++ { ++ println!("? {} => {}, qa: *", SL!($from), SL!($to)); ++ extern crate quickcheck; ++ ++ fn property(v: $from) -> bool { ++ let dst: Result<$to, _> = v.approx_as(); ++ dst == Ok(v as $to) ++ } ++ ++ let mut qc = quickcheck::QuickCheck::new(); ++ match qc.quicktest(property as fn($from) -> bool) { ++ Ok(_) => (), ++ Err(err) => panic!("qa {:?}", err) ++ } ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> qa: +; $($tail:tt)*) => { ++ { ++ extern crate quickcheck; ++ println!("? {} => {}, qa: +", SL!($from), SL!($to)); ++ ++ fn property(v: $from) -> bool { ++ let dst: Result<$to, conv::FloatError<_>> = v.approx_as().map_err(From::from); ++ if !(0 <= v) { ++ dst == Err(conv::FloatError::NegOverflow(v)) ++ } else { ++ dst == Ok(v as $to) ++ } ++ } ++ ++ let mut qc = quickcheck::QuickCheck::new(); ++ match qc.quicktest(property as fn($from) -> bool) { ++ Ok(_) => (), ++ Err(err) => panic!("qa {:?}", err) ++ } ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> qa: +$max:ty=> $($tail:tt)*) => { ++ { ++ extern crate quickcheck; ++ println!("? {} => {}, qa: +{}", SL!($from), SL!($to), SL!($max)); ++ ++ fn property(v: $from) -> bool { ++ let dst: Result<$to, conv::FloatError<_>> = v.approx_as().map_err(From::from); ++ if !(v <= <$max>::max_value() as $from) { ++ dst == Err(conv::FloatError::PosOverflow(v)) ++ } else { ++ dst == Ok(v as $to) ++ } ++ } ++ ++ let mut qc = quickcheck::QuickCheck::new(); ++ match qc.quicktest(property as fn($from) -> bool) { ++ Ok(_) => (), ++ Err(err) => panic!("qa {:?}", err) ++ } ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> qa: $bound:ty=> $($tail:tt)*) => { ++ { ++ extern crate quickcheck; ++ println!("? {} => {}, qa: {}", SL!($from), SL!($to), SL!($bound)); ++ ++ fn property(v: $from) -> bool { ++ let dst: Result<$to, conv::FloatError<_>> = v.approx_as().map_err(From::from); ++ if !(<$bound>::min_value() as $from <= v) { ++ dst == Err(conv::FloatError::NegOverflow(v)) ++ } else if !(v <= <$bound>::max_value() as $from) { ++ dst == Err(conv::FloatError::PosOverflow(v)) ++ } else { ++ dst == Ok(v as $to) ++ } ++ } ++ ++ let mut qc = quickcheck::QuickCheck::new(); ++ match qc.quicktest(property as fn($from) -> bool) { ++ Ok(_) => (), ++ Err(err) => panic!("qa {:?}", err) ++ } ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> qaW: *; $($tail:tt)*) => { ++ { ++ extern crate quickcheck; ++ println!("? {} => {}, qaW: *", SL!($from), SL!($to)); ++ ++ fn property(v: $from) -> bool { ++ let dst: Result<$to, _> = v.approx_as_by::<_, Wrapping>(); ++ dst == Ok(v as $to) ++ } ++ ++ let mut qc = quickcheck::QuickCheck::new(); ++ match qc.quicktest(property as fn($from) -> bool) { ++ Ok(_) => (), ++ Err(err) => panic!("qaW {:?}", err) ++ } ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> aRTN: $src:expr, $dst:expr; $($tail:tt)*) => { ++ { ++ println!("? {} => {}, aRTN: {}, {}", SL!($from), SL!($to), SL!($src), SL!($dst)); ++ let src: $from = $src; ++ let dst: Result<$to, _> = src.approx_by::(); ++ assert_eq!(dst, Ok($dst)); ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> aRNI: $src:expr, $dst:expr; $($tail:tt)*) => { ++ { ++ println!("? {} => {}, aRNI: {}, {}", SL!($from), SL!($to), SL!($src), SL!($dst)); ++ let src: $from = $src; ++ let dst: Result<$to, _> = src.approx_by::(); ++ assert_eq!(dst, Ok($dst)); ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> aRPI: $src:expr, $dst:expr; $($tail:tt)*) => { ++ { ++ println!("? {} => {}, aRPI: {}, {}", SL!($from), SL!($to), SL!($src), SL!($dst)); ++ let src: $from = $src; ++ let dst: Result<$to, _> = src.approx_by::(); ++ assert_eq!(dst, Ok($dst)); ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ (@ $from:ty, $to:ty=> aRTZ: $src:expr, $dst:expr; $($tail:tt)*) => { ++ { ++ println!("? {} => {}, aRTZ: {}, {}", SL!($from), SL!($to), SL!($src), SL!($dst)); ++ let src: $from = $src; ++ let dst: Result<$to, _> = src.approx_by::(); ++ assert_eq!(dst, Ok($dst)); ++ } ++ check!(@ $from, $to=> $($tail)*); ++ }; ++ ++ ($from:ty, $to:ty=> $($tail:tt)*) => { ++ check! { @ $from, $to=> $($tail)*; } ++ }; ++} ++ ++macro_rules! for_bitness { ++ (32 {$($bits32:tt)*} 64 {$($bits64:tt)*}) => { ++ as_expr!( ++ { ++ #[cfg(target_pointer_width="32")] ++ fn for_bitness() { ++ $($bits32)* ++ } ++ ++ #[cfg(target_pointer_width="64")] ++ fn for_bitness() { ++ $($bits64)* ++ } ++ ++ for_bitness() ++ } ++ ) ++ }; ++} diff --cc vendor/core-foundation-0.4.4/.cargo-checksum.json index 000000000,000000000..d372e42f7 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-0.4.4/.cargo-checksum.json @@@ -1,0 -1,0 +1,1 @@@ ++{"files":{},"package":"5909502e547762013619f4c4e01cc7393c20fe2d52d7fa471c1210adb2320dc7"} diff --cc vendor/core-foundation-0.4.4/.cargo-ok index 000000000,000000000..e69de29bb new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-0.4.4/.cargo-ok diff --cc vendor/core-foundation-0.4.4/Cargo.toml index 000000000,000000000..b20557ccc new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-0.4.4/Cargo.toml @@@ -1,0 -1,0 +1,29 @@@ ++# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO ++# ++# When uploading crates to the registry Cargo will automatically ++# "normalize" Cargo.toml files for maximal compatibility ++# with all versions of Cargo and also rewrite `path` dependencies ++# to registry (e.g. crates.io) dependencies ++# ++# If you believe there's an error in this file please file an ++# issue against the rust-lang/cargo repository. If you're ++# editing this file be aware that the upstream Cargo.toml ++# will likely look very different (and much more reasonable) ++ ++[package] ++name = "core-foundation" ++version = "0.4.4" ++authors = ["The Servo Project Developers"] ++description = "Bindings to Core Foundation for OS X" ++homepage = "https://github.com/servo/core-foundation-rs" ++license = "MIT / Apache-2.0" ++repository = "https://github.com/servo/core-foundation-rs" ++[dependencies.libc] ++version = "0.2" ++ ++[dependencies.core-foundation-sys] ++version = "0.4.4" ++ ++[features] ++mac_os_10_8_features = ["core-foundation-sys/mac_os_10_8_features"] ++mac_os_10_7_support = ["core-foundation-sys/mac_os_10_7_support"] diff --cc vendor/core-foundation-0.4.4/src/array.rs index 000000000,000000000..3859b0a5b new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-0.4.4/src/array.rs @@@ -1,0 -1,0 +1,160 @@@ ++// Copyright 2013 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++//! Heterogeneous immutable arrays. ++ ++pub use core_foundation_sys::array::*; ++pub use core_foundation_sys::base::{CFIndex, CFRelease}; ++use core_foundation_sys::base::{CFTypeRef, kCFAllocatorDefault}; ++use libc::c_void; ++use std::mem; ++ ++use base::{CFIndexConvertible, TCFType, CFRange}; ++ ++/// A heterogeneous immutable array. ++pub struct CFArray(CFArrayRef); ++ ++impl Drop for CFArray { ++ fn drop(&mut self) { ++ unsafe { ++ CFRelease(self.as_CFTypeRef()) ++ } ++ } ++} ++ ++pub struct CFArrayIterator<'a> { ++ array: &'a CFArray, ++ index: CFIndex, ++} ++ ++impl<'a> Iterator for CFArrayIterator<'a> { ++ type Item = *const c_void; ++ ++ fn next(&mut self) -> Option<*const c_void> { ++ if self.index >= self.array.len() { ++ None ++ } else { ++ let value = self.array.get(self.index); ++ self.index += 1; ++ Some(value) ++ } ++ } ++} ++ ++impl_TCFType!(CFArray, CFArrayRef, CFArrayGetTypeID); ++ ++impl CFArray { ++ /// Creates a new `CFArray` with the given elements, which must be `CFType` objects. ++ pub fn from_CFTypes(elems: &[T]) -> CFArray where T: TCFType { ++ unsafe { ++ let elems: Vec = elems.iter().map(|elem| elem.as_CFTypeRef()).collect(); ++ let array_ref = CFArrayCreate(kCFAllocatorDefault, ++ mem::transmute(elems.as_ptr()), ++ elems.len().to_CFIndex(), ++ &kCFTypeArrayCallBacks); ++ TCFType::wrap_under_create_rule(array_ref) ++ } ++ } ++ ++ /// Iterates over the elements of this `CFArray`. ++ /// ++ /// Careful; the loop body must wrap the reference properly. Generally, when array elements are ++ /// Core Foundation objects (not always true), they need to be wrapped with ++ /// `TCFType::wrap_under_get_rule()`. ++ #[inline] ++ pub fn iter<'a>(&'a self) -> CFArrayIterator<'a> { ++ CFArrayIterator { ++ array: self, ++ index: 0 ++ } ++ } ++ ++ #[inline] ++ pub fn len(&self) -> CFIndex { ++ unsafe { ++ CFArrayGetCount(self.0) ++ } ++ } ++ ++ #[inline] ++ pub fn get(&self, index: CFIndex) -> *const c_void { ++ assert!(index < self.len()); ++ unsafe { ++ CFArrayGetValueAtIndex(self.0, index) ++ } ++ } ++ ++ pub fn get_values(&self, range: CFRange) -> Vec<*const c_void> { ++ let mut vec = Vec::with_capacity(range.length as usize); ++ unsafe { ++ CFArrayGetValues(self.0, range, vec.as_mut_ptr()); ++ vec.set_len(range.length as usize); ++ vec ++ } ++ } ++ ++ pub fn get_all_values(&self) -> Vec<*const c_void> { ++ self.get_values(CFRange { ++ location: 0, ++ length: self.len() ++ }) ++ } ++} ++ ++impl<'a> IntoIterator for &'a CFArray { ++ type Item = *const c_void; ++ type IntoIter = CFArrayIterator<'a>; ++ ++ fn into_iter(self) -> CFArrayIterator<'a> { ++ self.iter() ++ } ++} ++ ++#[test] ++fn should_box_and_unbox() { ++ use number::{CFNumber, number}; ++ ++ let n1 = number(1); ++ let n2 = number(2); ++ let n3 = number(3); ++ let n4 = number(4); ++ let n5 = number(5); ++ ++ let arr = CFArray::from_CFTypes(&[ ++ n1.as_CFType(), ++ n2.as_CFType(), ++ n3.as_CFType(), ++ n4.as_CFType(), ++ n5.as_CFType(), ++ ]); ++ ++ assert!(arr.get_all_values() == &[n1.as_CFTypeRef(), ++ n2.as_CFTypeRef(), ++ n3.as_CFTypeRef(), ++ n4.as_CFTypeRef(), ++ n5.as_CFTypeRef()]); ++ ++ unsafe { ++ let mut sum = 0; ++ ++ for elem in arr.iter() { ++ let number: CFNumber = TCFType::wrap_under_get_rule(mem::transmute(elem)); ++ sum += number.to_i64().unwrap() ++ } ++ ++ assert!(sum == 15); ++ ++ for elem in arr.iter() { ++ let number: CFNumber = TCFType::wrap_under_get_rule(mem::transmute(elem)); ++ sum += number.to_i64().unwrap() ++ } ++ ++ assert!(sum == 30); ++ } ++} diff --cc vendor/core-foundation-0.4.4/src/base.rs index 000000000,000000000..3f4bcea16 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-0.4.4/src/base.rs @@@ -1,0 -1,0 +1,142 @@@ ++// Copyright 2013 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++pub use core_foundation_sys::base::*; ++ ++pub trait CFIndexConvertible { ++ /// Always use this method to construct a `CFIndex` value. It performs bounds checking to ++ /// ensure the value is in range. ++ fn to_CFIndex(self) -> CFIndex; ++} ++ ++impl CFIndexConvertible for usize { ++ #[inline] ++ fn to_CFIndex(self) -> CFIndex { ++ let max_CFIndex = CFIndex::max_value(); ++ if self > (max_CFIndex as usize) { ++ panic!("value out of range") ++ } ++ self as CFIndex ++ } ++} ++ ++/// Superclass of all Core Foundation objects. ++pub struct CFType(CFTypeRef); ++ ++impl Clone for CFType { ++ #[inline] ++ fn clone(&self) -> CFType { ++ unsafe { ++ TCFType::wrap_under_get_rule(self.0) ++ } ++ } ++} ++ ++impl Drop for CFType { ++ fn drop(&mut self) { ++ unsafe { ++ CFRelease(self.0) ++ } ++ } ++} ++ ++/// All Core Foundation types implement this trait. The type parameter `TypeRef` specifies the ++/// associated Core Foundation type: e.g. for `CFType` this is `CFTypeRef`; for `CFArray` this is ++/// `CFArrayRef`. ++pub trait TCFType { ++ /// Returns the object as its concrete TypeRef. ++ fn as_concrete_TypeRef(&self) -> ConcreteTypeRef; ++ ++ /// Returns an instance of the object, wrapping the underlying `CFTypeRef` subclass. Use this ++ /// when following Core Foundation's "Create Rule". The reference count is *not* bumped. ++ unsafe fn wrap_under_create_rule(obj: ConcreteTypeRef) -> Self; ++ ++ /// Returns the type ID for this class. ++ fn type_id() -> CFTypeID; ++ ++ /// Returns the object as a wrapped `CFType`. The reference count is incremented by one. ++ #[inline] ++ fn as_CFType(&self) -> CFType { ++ unsafe { ++ TCFType::wrap_under_get_rule(self.as_CFTypeRef()) ++ } ++ } ++ ++ /// Returns the object as a raw `CFTypeRef`. The reference count is not adjusted. ++ fn as_CFTypeRef(&self) -> CFTypeRef; ++ ++ /// Returns an instance of the object, wrapping the underlying `CFTypeRef` subclass. Use this ++ /// when following Core Foundation's "Get Rule". The reference count *is* bumped. ++ unsafe fn wrap_under_get_rule(reference: ConcreteTypeRef) -> Self; ++ ++ /// Returns the reference count of the object. It is unwise to do anything other than test ++ /// whether the return value of this method is greater than zero. ++ #[inline] ++ fn retain_count(&self) -> CFIndex { ++ unsafe { ++ CFGetRetainCount(self.as_CFTypeRef()) ++ } ++ } ++ ++ /// Returns the type ID of this object. ++ #[inline] ++ fn type_of(&self) -> CFTypeID { ++ unsafe { ++ CFGetTypeID(self.as_CFTypeRef()) ++ } ++ } ++ ++ /// Writes a debugging version of this object on standard error. ++ fn show(&self) { ++ unsafe { ++ CFShow(self.as_CFTypeRef()) ++ } ++ } ++ ++ /// Returns true if this value is an instance of another type. ++ #[inline] ++ fn instance_of>(&self) -> bool { ++ self.type_of() == >::type_id() ++ } ++} ++ ++impl TCFType for CFType { ++ #[inline] ++ fn as_concrete_TypeRef(&self) -> CFTypeRef { ++ self.0 ++ } ++ ++ #[inline] ++ unsafe fn wrap_under_get_rule(reference: CFTypeRef) -> CFType { ++ let reference: CFTypeRef = CFRetain(reference); ++ TCFType::wrap_under_create_rule(reference) ++ } ++ ++ #[inline] ++ fn as_CFTypeRef(&self) -> CFTypeRef { ++ self.as_concrete_TypeRef() ++ } ++ ++ #[inline] ++ unsafe fn wrap_under_create_rule(obj: CFTypeRef) -> CFType { ++ CFType(obj) ++ } ++ ++ #[inline] ++ fn type_id() -> CFTypeID { ++ // FIXME(pcwalton): Is this right? ++ 0 ++ } ++ ++ #[inline] ++ fn instance_of>(&self) -> bool { ++ // Since this is the root of the type hierarchy, we always answer yes. ++ true ++ } ++} diff --cc vendor/core-foundation-0.4.4/src/boolean.rs index 000000000,000000000..8e6ca3bf8 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-0.4.4/src/boolean.rs @@@ -1,0 -1,0 +1,44 @@@ ++// Copyright 2013 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++//! A Boolean type. ++ ++use core_foundation_sys::base::{CFRelease}; ++pub use core_foundation_sys::number::{CFBooleanRef, CFBooleanGetTypeID, kCFBooleanTrue, kCFBooleanFalse}; ++ ++use base::TCFType; ++ ++/// A Boolean type. ++/// ++/// FIXME(pcwalton): Should be a newtype struct, but that fails due to a Rust compiler bug. ++pub struct CFBoolean(CFBooleanRef); ++ ++impl Drop for CFBoolean { ++ fn drop(&mut self) { ++ unsafe { ++ CFRelease(self.as_CFTypeRef()) ++ } ++ } ++} ++ ++impl_TCFType!(CFBoolean, CFBooleanRef, CFBooleanGetTypeID); ++ ++impl CFBoolean { ++ pub fn true_value() -> CFBoolean { ++ unsafe { ++ TCFType::wrap_under_get_rule(kCFBooleanTrue) ++ } ++ } ++ ++ pub fn false_value() -> CFBoolean { ++ unsafe { ++ TCFType::wrap_under_get_rule(kCFBooleanFalse) ++ } ++ } ++} diff --cc vendor/core-foundation-0.4.4/src/bundle.rs index 000000000,000000000..4d75c302f new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-0.4.4/src/bundle.rs @@@ -1,0 -1,0 +1,127 @@@ ++// Copyright 2013 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++//! Core Foundation Bundle Type ++ ++pub use core_foundation_sys::bundle::*; ++use core_foundation_sys::base::{CFRelease, kCFAllocatorDefault}; ++ ++use base::TCFType; ++use url::CFURL; ++use dictionary::CFDictionary; ++ ++/// A Bundle type. ++pub struct CFBundle(CFBundleRef); ++ ++impl Drop for CFBundle { ++ fn drop(&mut self) { ++ unsafe { ++ CFRelease(self.as_CFTypeRef()) ++ } ++ } ++} ++ ++impl CFBundle { ++ pub fn new(bundleURL: CFURL) -> Option { ++ unsafe { ++ let bundle_ref = CFBundleCreate(kCFAllocatorDefault, bundleURL.as_concrete_TypeRef()); ++ if bundle_ref.is_null() { ++ None ++ } else { ++ Some(TCFType::wrap_under_create_rule(bundle_ref)) ++ } ++ } ++ } ++ ++ pub fn main_bundle() -> CFBundle { ++ unsafe { ++ let bundle_ref = CFBundleGetMainBundle(); ++ TCFType::wrap_under_get_rule(bundle_ref) ++ } ++ } ++ ++ pub fn info_dictionary(&self) -> CFDictionary { ++ unsafe { ++ let info_dictionary = CFBundleGetInfoDictionary(self.0); ++ TCFType::wrap_under_get_rule(info_dictionary) ++ } ++ } ++ ++ pub fn executable_url(&self) -> Option { ++ unsafe { ++ let exe_url = CFBundleCopyExecutableURL(self.0); ++ if exe_url.is_null() { ++ None ++ } else { ++ Some(TCFType::wrap_under_create_rule(exe_url)) ++ } ++ } ++ } ++ ++ pub fn private_frameworks_url(&self) -> Option { ++ unsafe { ++ let fw_url = CFBundleCopyPrivateFrameworksURL(self.0); ++ if fw_url.is_null() { ++ None ++ } else { ++ Some(TCFType::wrap_under_create_rule(fw_url)) ++ } ++ } ++ } ++} ++ ++impl_TCFType!(CFBundle, CFBundleRef, CFBundleGetTypeID); ++ ++#[test] ++fn safari_executable_url() { ++ use string::CFString; ++ use url::{CFURL, kCFURLPOSIXPathStyle}; ++ ++ let cfstr_path = CFString::from_static_string("/Applications/Safari.app"); ++ let cfurl_path = CFURL::from_file_system_path(cfstr_path, kCFURLPOSIXPathStyle, true); ++ let cfurl_executable = CFBundle::new(cfurl_path) ++ .expect("Safari not present") ++ .executable_url(); ++ assert!(cfurl_executable.is_some()); ++ assert_eq!(cfurl_executable ++ .unwrap() ++ .absolute() ++ .get_file_system_path(kCFURLPOSIXPathStyle) ++ .to_string(), ++ "/Applications/Safari.app/Contents/MacOS/Safari"); ++} ++ ++#[test] ++fn safari_private_frameworks_url() { ++ use string::CFString; ++ use url::{CFURL, kCFURLPOSIXPathStyle}; ++ ++ let cfstr_path = CFString::from_static_string("/Applications/Safari.app"); ++ let cfurl_path = CFURL::from_file_system_path(cfstr_path, kCFURLPOSIXPathStyle, true); ++ let cfurl_executable = CFBundle::new(cfurl_path) ++ .expect("Safari not present") ++ .private_frameworks_url(); ++ assert!(cfurl_executable.is_some()); ++ assert_eq!(cfurl_executable ++ .unwrap() ++ .absolute() ++ .get_file_system_path(kCFURLPOSIXPathStyle) ++ .to_string(), ++ "/Applications/Safari.app/Contents/Frameworks"); ++} ++ ++#[test] ++fn non_existant_bundle() { ++ use string::CFString; ++ use url::{CFURL, kCFURLPOSIXPathStyle}; ++ ++ let cfstr_path = CFString::from_static_string("/usr/local/foo"); ++ let cfurl_path = CFURL::from_file_system_path(cfstr_path, kCFURLPOSIXPathStyle, true); ++ assert!(CFBundle::new(cfurl_path).is_none()); ++} diff --cc vendor/core-foundation-0.4.4/src/data.rs index 000000000,000000000..7d83a615c new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-0.4.4/src/data.rs @@@ -1,0 -1,0 +1,68 @@@ ++// Copyright 2013 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++//! Core Foundation byte buffers. ++ ++pub use core_foundation_sys::data::*; ++use core_foundation_sys::base::{CFIndex, CFRelease}; ++use core_foundation_sys::base::{kCFAllocatorDefault}; ++use std::ops::Deref; ++use std::slice; ++ ++use base::{CFIndexConvertible, TCFType}; ++ ++/// A byte buffer. ++pub struct CFData(CFDataRef); ++ ++impl Drop for CFData { ++ fn drop(&mut self) { ++ unsafe { ++ CFRelease(self.as_CFTypeRef()) ++ } ++ } ++} ++ ++impl_TCFType!(CFData, CFDataRef, CFDataGetTypeID); ++ ++impl CFData { ++ pub fn from_buffer(buffer: &[u8]) -> CFData { ++ unsafe { ++ let data_ref = CFDataCreate(kCFAllocatorDefault, ++ buffer.as_ptr(), ++ buffer.len().to_CFIndex()); ++ TCFType::wrap_under_create_rule(data_ref) ++ } ++ } ++ ++ /// Returns a pointer to the underlying bytes in this data. Note that this byte buffer is ++ /// read-only. ++ #[inline] ++ pub fn bytes<'a>(&'a self) -> &'a [u8] { ++ unsafe { ++ slice::from_raw_parts(CFDataGetBytePtr(self.0), self.len() as usize) ++ } ++ } ++ ++ /// Returns the length of this byte buffer. ++ #[inline] ++ pub fn len(&self) -> CFIndex { ++ unsafe { ++ CFDataGetLength(self.0) ++ } ++ } ++} ++ ++impl Deref for CFData { ++ type Target = [u8]; ++ ++ #[inline] ++ fn deref(&self) -> &[u8] { ++ self.bytes() ++ } ++} diff --cc vendor/core-foundation-0.4.4/src/dictionary.rs index 000000000,000000000..9953c4b10 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-0.4.4/src/dictionary.rs @@@ -1,0 -1,0 +1,118 @@@ ++// Copyright 2013 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++//! Dictionaries of key-value pairs. ++ ++pub use core_foundation_sys::dictionary::*; ++use core_foundation_sys::base::CFRelease; ++use core_foundation_sys::base::{CFTypeRef, kCFAllocatorDefault}; ++use libc::c_void; ++use std::mem; ++use std::ptr; ++ ++use base::{CFType, CFIndexConvertible, TCFType}; ++ ++/// An immutable dictionary of key-value pairs. ++pub struct CFDictionary(CFDictionaryRef); ++ ++impl Drop for CFDictionary { ++ fn drop(&mut self) { ++ unsafe { ++ CFRelease(self.as_CFTypeRef()) ++ } ++ } ++} ++ ++impl_TCFType!(CFDictionary, CFDictionaryRef, CFDictionaryGetTypeID); ++ ++impl CFDictionary { ++ pub fn from_CFType_pairs(pairs: &[(K, V)]) -> CFDictionary ++ where K: TCFType, V: TCFType { ++ let (keys, values): (Vec,Vec) = ++ pairs.iter() ++ .map(|&(ref key, ref value)| (key.as_CFTypeRef(), value.as_CFTypeRef())) ++ .unzip(); ++ ++ unsafe { ++ let dictionary_ref = CFDictionaryCreate(kCFAllocatorDefault, ++ mem::transmute(keys.as_ptr()), ++ mem::transmute(values.as_ptr()), ++ keys.len().to_CFIndex(), ++ &kCFTypeDictionaryKeyCallBacks, ++ &kCFTypeDictionaryValueCallBacks); ++ TCFType::wrap_under_create_rule(dictionary_ref) ++ } ++ } ++ ++ #[inline] ++ pub fn len(&self) -> usize { ++ unsafe { ++ CFDictionaryGetCount(self.0) as usize ++ } ++ } ++ ++ #[inline] ++ pub fn is_empty(&self) -> bool { ++ self.len() == 0 ++ } ++ ++ #[inline] ++ pub fn contains_key(&self, key: *const c_void) -> bool { ++ unsafe { ++ CFDictionaryContainsKey(self.0, key) != 0 ++ } ++ } ++ ++ #[inline] ++ pub fn find(&self, key: *const c_void) -> Option<*const c_void> { ++ unsafe { ++ let mut value: *const c_void = ptr::null(); ++ if CFDictionaryGetValueIfPresent(self.0, key, &mut value) != 0 { ++ Some(value) ++ } else { ++ None ++ } ++ } ++ } ++ ++ #[inline] ++ pub fn get(&self, key: *const c_void) -> *const c_void { ++ let value = self.find(key); ++ if value.is_none() { ++ panic!("No entry found for key {:p}", key); ++ } ++ value.unwrap() ++ } ++ ++ /// A convenience function to retrieve `CFType` instances. ++ #[inline] ++ pub unsafe fn get_CFType(&self, key: *const c_void) -> CFType { ++ let value: CFTypeRef = mem::transmute(self.get(key)); ++ TCFType::wrap_under_get_rule(value) ++ } ++ ++ #[inline] ++ pub unsafe fn set_value(&self, key: *const c_void, value: *const c_void) { ++ CFDictionarySetValue(self.0, key, value) ++ } ++ ++ pub fn get_keys_and_values(&self) -> (Vec<*const c_void>, Vec<*const c_void>) { ++ let length = self.len(); ++ let mut keys = Vec::with_capacity(length); ++ let mut values = Vec::with_capacity(length); ++ ++ unsafe { ++ CFDictionaryGetKeysAndValues(self.0, keys.as_mut_ptr(), values.as_mut_ptr()); ++ keys.set_len(length); ++ values.set_len(length); ++ } ++ ++ (keys, values) ++ } ++} diff --cc vendor/core-foundation-0.4.4/src/error.rs index 000000000,000000000..af9b26f4e new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-0.4.4/src/error.rs @@@ -1,0 -1,0 +1,77 @@@ ++// Copyright 2016 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++//! Core Foundation errors. ++ ++use core_foundation_sys::error::*; ++use core_foundation_sys::base::CFRelease; ++use std::error::Error; ++use std::fmt; ++ ++use base::{CFIndex, TCFType}; ++use string::CFString; ++ ++/// An error value. ++pub struct CFError(CFErrorRef); ++ ++impl Drop for CFError { ++ fn drop(&mut self) { ++ unsafe { ++ CFRelease(self.as_CFTypeRef()) ++ } ++ } ++} ++ ++impl_TCFType!(CFError, CFErrorRef, CFErrorGetTypeID); ++ ++impl fmt::Debug for CFError { ++ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { ++ fmt.debug_struct("CFError") ++ .field("domain", &self.domain()) ++ .field("code", &self.code()) ++ .field("description", &self.description()) ++ .finish() ++ } ++} ++ ++impl fmt::Display for CFError { ++ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { ++ write!(fmt, "{}", self.description()) ++ } ++} ++ ++impl Error for CFError { ++ fn description(&self) -> &str { ++ "a Core Foundation error" ++ } ++} ++ ++impl CFError { ++ /// Returns a string identifying the domain with which this error is ++ /// associated. ++ pub fn domain(&self) -> CFString { ++ unsafe { ++ let s = CFErrorGetDomain(self.0); ++ CFString::wrap_under_get_rule(s) ++ } ++ } ++ ++ /// Returns the code identifying this type of error. ++ pub fn code(&self) -> CFIndex { ++ unsafe { CFErrorGetCode(self.0) } ++ } ++ ++ /// Returns a human-presentable description of the error. ++ pub fn description(&self) -> CFString { ++ unsafe { ++ let s = CFErrorCopyDescription(self.0); ++ CFString::wrap_under_create_rule(s) ++ } ++ } ++} diff --cc vendor/core-foundation-0.4.4/src/lib.rs index 000000000,000000000..f4ca71d83 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-0.4.4/src/lib.rs @@@ -1,0 -1,0 +1,98 @@@ ++// Copyright 2013 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++#![allow(non_snake_case)] ++ ++extern crate core_foundation_sys; ++extern crate libc; ++ ++#[macro_export] ++macro_rules! impl_TCFType { ++ ($ty:ident, $raw:ident, $ty_id:ident) => { ++ impl $crate::base::TCFType<$raw> for $ty { ++ #[inline] ++ fn as_concrete_TypeRef(&self) -> $raw { ++ self.0 ++ } ++ ++ #[inline] ++ unsafe fn wrap_under_get_rule(reference: $raw) -> $ty { ++ let reference = ::std::mem::transmute(::core_foundation_sys::base::CFRetain(::std::mem::transmute(reference))); ++ $crate::base::TCFType::wrap_under_create_rule(reference) ++ } ++ ++ #[inline] ++ fn as_CFTypeRef(&self) -> ::core_foundation_sys::base::CFTypeRef { ++ unsafe { ++ ::std::mem::transmute(self.as_concrete_TypeRef()) ++ } ++ } ++ ++ #[inline] ++ unsafe fn wrap_under_create_rule(obj: $raw) -> $ty { ++ $ty(obj) ++ } ++ ++ #[inline] ++ fn type_id() -> ::core_foundation_sys::base::CFTypeID { ++ unsafe { ++ $ty_id() ++ } ++ } ++ } ++ } ++} ++ ++pub mod array; ++pub mod base; ++pub mod boolean; ++pub mod data; ++pub use core_foundation_sys::date; // back compat ++pub mod dictionary; ++pub mod error; ++pub mod number; ++pub mod set; ++pub mod string; ++pub mod url; ++pub mod bundle; ++pub mod propertylist; ++pub mod runloop; ++ ++#[cfg(test)] ++pub mod test { ++ #[test] ++ fn test_stuff() { ++ use base::TCFType; ++ use boolean::CFBoolean; ++ use number::number; ++ use dictionary::CFDictionary; ++ use string::CFString; ++ ++ /*let n = CFNumber::new_number(42 as i32); ++ io::println(format!("%d", (&n).retain_count() as int)); ++ (&n).show();*/ ++ ++ let bar = CFString::from_static_string("Bar"); ++ let baz = CFString::from_static_string("Baz"); ++ let boo = CFString::from_static_string("Boo"); ++ let foo = CFString::from_static_string("Foo"); ++ let tru = CFBoolean::true_value(); ++ let n42 = number(42); ++ ++ let d = CFDictionary::from_CFType_pairs(&[ ++ (bar.as_CFType(), boo.as_CFType()), ++ (baz.as_CFType(), tru.as_CFType()), ++ (foo.as_CFType(), n42.as_CFType()), ++ ]); ++ ++ let (v1, v2) = d.get_keys_and_values(); ++ ++ assert!(v1 == &[bar.as_CFTypeRef(), baz.as_CFTypeRef(), foo.as_CFTypeRef()]); ++ assert!(v2 == &[boo.as_CFTypeRef(), tru.as_CFTypeRef(), n42.as_CFTypeRef()]); ++ } ++} diff --cc vendor/core-foundation-0.4.4/src/number.rs index 000000000,000000000..771c75eb9 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-0.4.4/src/number.rs @@@ -1,0 -1,0 +1,85 @@@ ++// Copyright 2013 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++//! Immutable numbers. ++ ++use core_foundation_sys::base::{CFRelease, kCFAllocatorDefault}; ++pub use core_foundation_sys::number::*; ++use std::mem; ++ ++use base::{TCFType}; ++ ++/// An immutable numeric value. ++pub struct CFNumber(CFNumberRef); ++ ++impl Drop for CFNumber { ++ fn drop(&mut self) { ++ unsafe { ++ CFRelease(self.as_CFTypeRef()) ++ } ++ } ++} ++ ++impl_TCFType!(CFNumber, CFNumberRef, CFNumberGetTypeID); ++ ++// TODO(pcwalton): Floating point. ++impl CFNumber { ++ #[inline] ++ pub fn from_i32(value: i32) -> CFNumber { ++ unsafe { ++ let number_ref = CFNumberCreate(kCFAllocatorDefault, ++ kCFNumberSInt32Type, ++ mem::transmute(&value)); ++ TCFType::wrap_under_create_rule(number_ref) ++ } ++ } ++ ++ #[inline] ++ pub fn to_i64(&self) -> Option { ++ unsafe { ++ let mut value: i64 = 0; ++ let ok = CFNumberGetValue(self.0, kCFNumberSInt64Type, mem::transmute(&mut value)); ++ if ok { Some(value) } else { None } ++ } ++ } ++ ++ #[inline] ++ pub fn to_f64(&self) -> Option { ++ unsafe { ++ let mut value: f64 = 0.0; ++ let ok = CFNumberGetValue(self.0, kCFNumberFloat64Type, mem::transmute(&mut value)); ++ if ok { Some(value) } else { None } ++ } ++ } ++ ++ #[inline] ++ pub fn from_i64(value: i64) -> CFNumber { ++ unsafe { ++ let number_ref = CFNumberCreate(kCFAllocatorDefault, ++ kCFNumberSInt64Type, ++ mem::transmute(&value)); ++ TCFType::wrap_under_create_rule(number_ref) ++ } ++ } ++ ++ #[inline] ++ pub fn from_f64(value: f64) -> CFNumber { ++ unsafe { ++ let number_ref = CFNumberCreate(kCFAllocatorDefault, ++ kCFNumberFloat64Type, ++ mem::transmute(&value)); ++ TCFType::wrap_under_create_rule(number_ref) ++ } ++ } ++} ++ ++/// A convenience function to create CFNumbers. ++pub fn number(value: i64) -> CFNumber { ++ CFNumber::from_i64(value) ++} diff --cc vendor/core-foundation-0.4.4/src/propertylist.rs index 000000000,000000000..9b175fe4c new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-0.4.4/src/propertylist.rs @@@ -1,0 -1,0 +1,87 @@@ ++// Copyright 2013 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++//! Core Foundation property lists ++ ++use std::ptr; ++ ++use libc::c_void; ++ ++use error::CFError; ++use data::CFData; ++use base::{TCFType}; ++ ++pub use core_foundation_sys::propertylist::*; ++use core_foundation_sys::error::CFErrorRef; ++use core_foundation_sys::base::{kCFAllocatorDefault}; ++ ++pub fn create_with_data(data: CFData, ++ options: CFPropertyListMutabilityOptions) ++ -> Result<(*const c_void, CFPropertyListFormat), CFError> { ++ unsafe { ++ let mut error: CFErrorRef = ptr::null_mut(); ++ let mut format: CFPropertyListFormat = 0; ++ let property_list = CFPropertyListCreateWithData(kCFAllocatorDefault, ++ data.as_concrete_TypeRef(), ++ options, ++ &mut format, ++ &mut error); ++ if property_list.is_null() { ++ Err(TCFType::wrap_under_create_rule(error)) ++ } else { ++ Ok((property_list, format)) ++ } ++ } ++} ++ ++pub fn create_data(property_list: *const c_void, format: CFPropertyListFormat) -> Result { ++ unsafe { ++ let mut error: CFErrorRef = ptr::null_mut(); ++ let data_ref = CFPropertyListCreateData(kCFAllocatorDefault, ++ property_list, ++ format, ++ 0, ++ &mut error); ++ if data_ref.is_null() { ++ Err(TCFType::wrap_under_create_rule(error)) ++ } else { ++ Ok(TCFType::wrap_under_create_rule(data_ref)) ++ } ++ } ++} ++ ++#[cfg(test)] ++pub mod test { ++ #[test] ++ fn test_property_list_serialization() { ++ use base::{TCFType, CFEqual}; ++ use boolean::CFBoolean; ++ use number::number; ++ use dictionary::CFDictionary; ++ use string::CFString; ++ use super::*; ++ ++ let bar = CFString::from_static_string("Bar"); ++ let baz = CFString::from_static_string("Baz"); ++ let boo = CFString::from_static_string("Boo"); ++ let foo = CFString::from_static_string("Foo"); ++ let tru = CFBoolean::true_value(); ++ let n42 = number(42); ++ ++ let dict1 = CFDictionary::from_CFType_pairs(&[(bar.as_CFType(), boo.as_CFType()), ++ (baz.as_CFType(), tru.as_CFType()), ++ (foo.as_CFType(), n42.as_CFType())]); ++ ++ let data = create_data(dict1.as_CFTypeRef(), kCFPropertyListXMLFormat_v1_0).unwrap(); ++ let (dict2, _) = create_with_data(data, kCFPropertyListImmutable).unwrap(); ++ unsafe { ++ assert!(CFEqual(dict1.as_CFTypeRef(), dict2) == 1); ++ } ++ } ++} diff --cc vendor/core-foundation-0.4.4/src/runloop.rs index 000000000,000000000..ccc1f78ed new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-0.4.4/src/runloop.rs @@@ -1,0 -1,0 +1,141 @@@ ++// Copyright 2013 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++#![allow(non_upper_case_globals)] ++ ++pub use core_foundation_sys::runloop::*; ++use core_foundation_sys::base::{CFIndex, CFRelease}; ++use core_foundation_sys::base::{kCFAllocatorDefault, CFOptionFlags}; ++use core_foundation_sys::string::CFStringRef; ++use core_foundation_sys::date::{CFAbsoluteTime, CFTimeInterval}; ++ ++use base::{TCFType}; ++use string::{CFString}; ++ ++pub struct CFRunLoop(CFRunLoopRef); ++ ++impl Drop for CFRunLoop { ++ fn drop(&mut self) { ++ unsafe { ++ CFRelease(self.as_CFTypeRef()) ++ } ++ } ++} ++ ++impl_TCFType!(CFRunLoop, CFRunLoopRef, CFRunLoopGetTypeID); ++ ++impl CFRunLoop { ++ pub fn get_current() -> CFRunLoop { ++ unsafe { ++ let run_loop_ref = CFRunLoopGetCurrent(); ++ TCFType::wrap_under_get_rule(run_loop_ref) ++ } ++ } ++ ++ pub fn get_main() -> CFRunLoop { ++ unsafe { ++ let run_loop_ref = CFRunLoopGetMain(); ++ TCFType::wrap_under_get_rule(run_loop_ref) ++ } ++ } ++ ++ pub fn run_current() { ++ unsafe { ++ CFRunLoopRun(); ++ } ++ } ++ ++ pub fn stop(&self) { ++ unsafe { ++ CFRunLoopStop(self.0); ++ } ++ } ++ ++ pub fn current_mode(&self) -> Option { ++ unsafe { ++ let string_ref = CFRunLoopCopyCurrentMode(self.0); ++ if string_ref.is_null() { ++ return None; ++ } ++ ++ let cf_string: CFString = TCFType::wrap_under_create_rule(string_ref); ++ Some(cf_string.to_string()) ++ } ++ } ++ ++ pub fn contains_timer(&self, timer: &CFRunLoopTimer, mode: CFStringRef) -> bool { ++ unsafe { ++ CFRunLoopContainsTimer(self.0, timer.0, mode) != 0 ++ } ++ } ++ ++ pub fn add_timer(&self, timer: &CFRunLoopTimer, mode: CFStringRef) { ++ unsafe { ++ CFRunLoopAddTimer(self.0, timer.0, mode); ++ } ++ } ++ ++} ++ ++pub struct CFRunLoopTimer(CFRunLoopTimerRef); ++ ++impl Drop for CFRunLoopTimer { ++ fn drop(&mut self) { ++ unsafe { ++ CFRelease(self.as_CFTypeRef()) ++ } ++ } ++} ++ ++impl_TCFType!(CFRunLoopTimer, CFRunLoopTimerRef, CFRunLoopTimerGetTypeID); ++ ++impl CFRunLoopTimer { ++ pub fn new(fireDate: CFAbsoluteTime, interval: CFTimeInterval, flags: CFOptionFlags, order: CFIndex, callout: CFRunLoopTimerCallBack, context: *mut CFRunLoopTimerContext) -> CFRunLoopTimer { ++ unsafe { ++ let timer_ref = CFRunLoopTimerCreate(kCFAllocatorDefault, fireDate, interval, flags, order, callout, context); ++ TCFType::wrap_under_create_rule(timer_ref) ++ } ++ } ++} ++ ++#[cfg(test)] ++mod test { ++ use super::*; ++ use core_foundation_sys::date::{CFAbsoluteTime, CFAbsoluteTimeGetCurrent}; ++ use std::mem; ++ use libc::c_void; ++ ++ #[test] ++ fn wait_200_milliseconds() { ++ let run_loop = CFRunLoop::get_current(); ++ let mut now = unsafe { CFAbsoluteTimeGetCurrent() }; ++ let mut context = unsafe { CFRunLoopTimerContext { ++ version: 0, ++ info: mem::transmute(&mut now), ++ retain: mem::zeroed(), ++ release: mem::zeroed(), ++ copyDescription: mem::zeroed(), ++ } }; ++ ++ ++ let run_loop_timer = CFRunLoopTimer::new(now + 0.20f64, 0f64, 0, 0, timer_popped, &mut context); ++ unsafe { ++ run_loop.add_timer(&run_loop_timer, kCFRunLoopDefaultMode); ++ } ++ CFRunLoop::run_current(); ++ } ++ ++ extern "C" fn timer_popped(_timer: CFRunLoopTimerRef, _info: *mut c_void) { ++ let previous_now_ptr: *const CFAbsoluteTime = unsafe { mem::transmute(_info) }; ++ let previous_now = unsafe { *previous_now_ptr }; ++ let now = unsafe { CFAbsoluteTimeGetCurrent() }; ++ assert!(now - previous_now > 0.19 && now - previous_now < 0.21); ++ CFRunLoop::get_current().stop(); ++ } ++} diff --cc vendor/core-foundation-0.4.4/src/set.rs index 000000000,000000000..8224a2090 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-0.4.4/src/set.rs @@@ -1,0 -1,0 +1,45 @@@ ++// Copyright 2013 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++//! An immutable bag of elements. ++ ++pub use core_foundation_sys::set::*; ++use core_foundation_sys::base::CFRelease; ++use core_foundation_sys::base::{CFTypeRef, kCFAllocatorDefault}; ++ ++use base::{CFIndexConvertible, TCFType}; ++ ++use std::mem; ++ ++/// An immutable bag of elements. ++pub struct CFSet(CFSetRef); ++ ++impl Drop for CFSet { ++ fn drop(&mut self) { ++ unsafe { ++ CFRelease(self.as_CFTypeRef()) ++ } ++ } ++} ++ ++impl_TCFType!(CFSet, CFSetRef, CFSetGetTypeID); ++ ++impl CFSet { ++ /// Creates a new set from a list of `CFType` instances. ++ pub fn from_slice(elems: &[T]) -> CFSet where T: TCFType { ++ unsafe { ++ let elems: Vec = elems.iter().map(|elem| elem.as_CFTypeRef()).collect(); ++ let set_ref = CFSetCreate(kCFAllocatorDefault, ++ mem::transmute(elems.as_ptr()), ++ elems.len().to_CFIndex(), ++ &kCFTypeSetCallBacks); ++ TCFType::wrap_under_create_rule(set_ref) ++ } ++ } ++} diff --cc vendor/core-foundation-0.4.4/src/string.rs index 000000000,000000000..051ca9a4e new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-0.4.4/src/string.rs @@@ -1,0 -1,0 +1,152 @@@ ++// Copyright 2013 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++//! Immutable strings. ++ ++pub use core_foundation_sys::string::*; ++ ++use base::{CFIndexConvertible, TCFType}; ++ ++use core_foundation_sys::base::{Boolean, CFIndex, CFRange, CFRelease}; ++use core_foundation_sys::base::{kCFAllocatorDefault, kCFAllocatorNull}; ++use std::fmt; ++use std::str::{self, FromStr}; ++use std::ptr; ++use std::ffi::CStr; ++ ++/// An immutable string in one of a variety of encodings. ++pub struct CFString(CFStringRef); ++ ++impl Clone for CFString { ++ #[inline] ++ fn clone(&self) -> CFString { ++ unsafe { ++ TCFType::wrap_under_get_rule(self.0) ++ } ++ } ++} ++ ++impl Drop for CFString { ++ fn drop(&mut self) { ++ unsafe { ++ CFRelease(self.as_CFTypeRef()) ++ } ++ } ++} ++ ++impl_TCFType!(CFString, CFStringRef, CFStringGetTypeID); ++ ++impl FromStr for CFString { ++ type Err = (); ++ ++ /// See also CFString::new for a variant of this which does not return a Result ++ #[inline] ++ fn from_str(string: &str) -> Result { ++ Ok(CFString::new(string)) ++ } ++} ++ ++impl fmt::Display for CFString { ++ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { ++ unsafe { ++ // Do this without allocating if we can get away with it ++ let c_string = CFStringGetCStringPtr(self.0, kCFStringEncodingUTF8); ++ if c_string != ptr::null() { ++ let c_str = CStr::from_ptr(c_string); ++ fmt.write_str(str::from_utf8_unchecked(c_str.to_bytes())) ++ } else { ++ let char_len = self.char_len(); ++ ++ // First, ask how big the buffer ought to be. ++ let mut bytes_required: CFIndex = 0; ++ CFStringGetBytes(self.0, ++ CFRange { location: 0, length: char_len }, ++ kCFStringEncodingUTF8, ++ 0, ++ false as Boolean, ++ ptr::null_mut(), ++ 0, ++ &mut bytes_required); ++ ++ // Then, allocate the buffer and actually copy. ++ let mut buffer = vec![b'\x00'; bytes_required as usize]; ++ ++ let mut bytes_used: CFIndex = 0; ++ let chars_written = CFStringGetBytes(self.0, ++ CFRange { location: 0, length: char_len }, ++ kCFStringEncodingUTF8, ++ 0, ++ false as Boolean, ++ buffer.as_mut_ptr(), ++ buffer.len().to_CFIndex(), ++ &mut bytes_used) as usize; ++ assert!(chars_written.to_CFIndex() == char_len); ++ ++ // This is dangerous; we over-allocate and null-terminate the string (during ++ // initialization). ++ assert!(bytes_used == buffer.len().to_CFIndex()); ++ fmt.write_str(str::from_utf8_unchecked(&buffer)) ++ } ++ } ++ } ++} ++ ++impl fmt::Debug for CFString { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ write!(f, "\"{}\"", self) ++ } ++} ++ ++ ++impl CFString { ++ /// Creates a new `CFString` instance from a Rust string. ++ #[inline] ++ pub fn new(string: &str) -> CFString { ++ unsafe { ++ let string_ref = CFStringCreateWithBytes(kCFAllocatorDefault, ++ string.as_ptr(), ++ string.len().to_CFIndex(), ++ kCFStringEncodingUTF8, ++ false as Boolean, ++ kCFAllocatorNull); ++ CFString::wrap_under_create_rule(string_ref) ++ } ++ } ++ ++ /// Like `CFString::new`, but references a string that can be used as a backing store ++ /// by virtue of being statically allocated. ++ #[inline] ++ pub fn from_static_string(string: &'static str) -> CFString { ++ unsafe { ++ let string_ref = CFStringCreateWithBytesNoCopy(kCFAllocatorDefault, ++ string.as_ptr(), ++ string.len().to_CFIndex(), ++ kCFStringEncodingUTF8, ++ false as Boolean, ++ kCFAllocatorNull); ++ TCFType::wrap_under_create_rule(string_ref) ++ } ++ } ++ ++ /// Returns the number of characters in the string. ++ #[inline] ++ pub fn char_len(&self) -> CFIndex { ++ unsafe { ++ CFStringGetLength(self.0) ++ } ++ } ++} ++ ++#[test] ++fn string_and_back() { ++ let original = "The quick brown fox jumped over the slow lazy dog."; ++ let cfstr = CFString::from_static_string(original); ++ let converted = cfstr.to_string(); ++ assert!(converted == original); ++} diff --cc vendor/core-foundation-0.4.4/src/url.rs index 000000000,000000000..34c53b754 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-0.4.4/src/url.rs @@@ -1,0 -1,0 +1,120 @@@ ++// Copyright 2013 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++//! A URL type for Core Foundation. ++ ++pub use core_foundation_sys::url::*; ++ ++use base::{TCFType, CFIndex}; ++use string::{CFString}; ++ ++use core_foundation_sys::base::{kCFAllocatorDefault, CFRelease}; ++use std::fmt; ++use std::ptr; ++use std::path::Path; ++ ++pub struct CFURL(CFURLRef); ++ ++impl Drop for CFURL { ++ fn drop(&mut self) { ++ unsafe { ++ CFRelease(self.as_CFTypeRef()) ++ } ++ } ++} ++ ++impl_TCFType!(CFURL, CFURLRef, CFURLGetTypeID); ++ ++impl fmt::Debug for CFURL { ++ #[inline] ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ unsafe { ++ let string: CFString = TCFType::wrap_under_get_rule(CFURLGetString(self.0)); ++ write!(f, "{}", string.to_string()) ++ } ++ } ++} ++ ++impl CFURL { ++ pub fn from_path>(path: P, isDirectory: bool) -> Option { ++ let path = match path.as_ref().to_str() { ++ Some(path) => path, ++ None => return None, ++ }; ++ ++ unsafe { ++ let url_ref = CFURLCreateFromFileSystemRepresentation(ptr::null_mut(), path.as_ptr(), path.len() as CFIndex, isDirectory as u8); ++ if url_ref.is_null() { ++ return None; ++ } ++ Some(TCFType::wrap_under_create_rule(url_ref)) ++ } ++ } ++ ++ pub fn from_file_system_path(filePath: CFString, pathStyle: CFURLPathStyle, isDirectory: bool) -> CFURL { ++ unsafe { ++ let url_ref = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, filePath.as_concrete_TypeRef(), pathStyle, isDirectory as u8); ++ TCFType::wrap_under_create_rule(url_ref) ++ } ++ } ++ ++ pub fn get_string(&self) -> CFString { ++ unsafe { ++ TCFType::wrap_under_get_rule(CFURLGetString(self.0)) ++ } ++ } ++ ++ pub fn get_file_system_path(&self, pathStyle: CFURLPathStyle) -> CFString { ++ unsafe { ++ TCFType::wrap_under_create_rule(CFURLCopyFileSystemPath(self.as_concrete_TypeRef(), pathStyle)) ++ } ++ } ++ ++ pub fn absolute(&self) -> CFURL { ++ unsafe { ++ TCFType::wrap_under_create_rule(CFURLCopyAbsoluteURL(self.as_concrete_TypeRef())) ++ } ++ } ++} ++ ++#[test] ++fn file_url_from_path() { ++ let path = "/usr/local/foo/"; ++ let cfstr_path = CFString::from_static_string(path); ++ let cfurl = CFURL::from_file_system_path(cfstr_path, kCFURLPOSIXPathStyle, true); ++ assert_eq!(cfurl.get_string().to_string(), "file:///usr/local/foo/"); ++} ++ ++#[test] ++fn absolute_file_url() { ++ use core_foundation_sys::url::CFURLCreateWithFileSystemPathRelativeToBase; ++ use std::path::PathBuf; ++ ++ let path = "/usr/local/foo"; ++ let file = "bar"; ++ ++ let cfstr_path = CFString::from_static_string(path); ++ let cfstr_file = CFString::from_static_string(file); ++ let cfurl_base = CFURL::from_file_system_path(cfstr_path, kCFURLPOSIXPathStyle, true); ++ let cfurl_relative: CFURL = unsafe { ++ let url_ref = CFURLCreateWithFileSystemPathRelativeToBase(kCFAllocatorDefault, ++ cfstr_file.as_concrete_TypeRef(), ++ kCFURLPOSIXPathStyle, ++ false as u8, ++ cfurl_base.as_concrete_TypeRef()); ++ TCFType::wrap_under_create_rule(url_ref) ++ }; ++ ++ let mut absolute_path = PathBuf::from(path); ++ absolute_path.push(file); ++ ++ assert_eq!(cfurl_relative.get_file_system_path(kCFURLPOSIXPathStyle).to_string(), file); ++ assert_eq!(cfurl_relative.absolute().get_file_system_path(kCFURLPOSIXPathStyle).to_string(), ++ absolute_path.to_str().unwrap()); ++} diff --cc vendor/core-foundation-sys-0.4.4/.cargo-checksum.json index 000000000,000000000..e2dfddf96 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-sys-0.4.4/.cargo-checksum.json @@@ -1,0 -1,0 +1,1 @@@ ++{"files":{},"package":"bc9fb3d6cb663e6fd7cf1c63f9b144ee2b1e4a78595a0451dd34bff85b9a3387"} diff --cc vendor/core-foundation-sys-0.4.4/.cargo-ok index 000000000,000000000..e69de29bb new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-sys-0.4.4/.cargo-ok diff --cc vendor/core-foundation-sys-0.4.4/Cargo.toml index 000000000,000000000..a4d63964c new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-sys-0.4.4/Cargo.toml @@@ -1,0 -1,0 +1,27 @@@ ++# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO ++# ++# When uploading crates to the registry Cargo will automatically ++# "normalize" Cargo.toml files for maximal compatibility ++# with all versions of Cargo and also rewrite `path` dependencies ++# to registry (e.g. crates.io) dependencies ++# ++# If you believe there's an error in this file please file an ++# issue against the rust-lang/cargo repository. If you're ++# editing this file be aware that the upstream Cargo.toml ++# will likely look very different (and much more reasonable) ++ ++[package] ++name = "core-foundation-sys" ++version = "0.4.4" ++authors = ["The Servo Project Developers"] ++build = "build.rs" ++description = "Bindings to Core Foundation for OS X" ++homepage = "https://github.com/servo/core-foundation-rs" ++license = "MIT / Apache-2.0" ++repository = "https://github.com/servo/core-foundation-rs" ++[dependencies.libc] ++version = "0.2" ++ ++[features] ++mac_os_10_7_support = [] ++mac_os_10_8_features = [] diff --cc vendor/core-foundation-sys-0.4.4/build.rs index 000000000,000000000..1f03b0602 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-sys-0.4.4/build.rs @@@ -1,0 -1,0 +1,14 @@@ ++// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++fn main() { ++ if std::env::var("TARGET").unwrap().contains("-apple") { ++ println!("cargo:rustc-link-lib=framework=CoreFoundation"); ++ } ++} diff --cc vendor/core-foundation-sys-0.4.4/src/array.rs index 000000000,000000000..2574ecae4 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-sys-0.4.4/src/array.rs @@@ -1,0 -1,0 +1,61 @@@ ++// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++use libc::c_void; ++ ++use base::{CFRange, CFIndex, CFAllocatorRef, CFTypeID}; ++ ++/// FIXME(pcwalton): This is wrong. ++pub type CFArrayRetainCallBack = *const u8; ++ ++/// FIXME(pcwalton): This is wrong. ++pub type CFArrayReleaseCallBack = *const u8; ++ ++/// FIXME(pcwalton): This is wrong. ++pub type CFArrayCopyDescriptionCallBack = *const u8; ++ ++/// FIXME(pcwalton): This is wrong. ++pub type CFArrayEqualCallBack = *const u8; ++ ++#[repr(C)] ++#[derive(Clone, Copy)] ++pub struct CFArrayCallBacks { ++ pub version: CFIndex, ++ pub retain: CFArrayRetainCallBack, ++ pub release: CFArrayReleaseCallBack, ++ pub copyDescription: CFArrayCopyDescriptionCallBack, ++ pub equal: CFArrayEqualCallBack, ++} ++ ++#[repr(C)] ++pub struct __CFArray(c_void); ++ ++pub type CFArrayRef = *const __CFArray; ++ ++extern { ++ /* ++ * CFArray.h ++ */ ++ pub static kCFTypeArrayCallBacks: CFArrayCallBacks; ++ ++ pub fn CFArrayCreate(allocator: CFAllocatorRef, values: *const *const c_void, ++ numValues: CFIndex, callBacks: *const CFArrayCallBacks) -> CFArrayRef; ++ pub fn CFArrayCreateCopy(allocator: CFAllocatorRef , theArray: CFArrayRef) -> CFArrayRef; ++ ++ // CFArrayBSearchValues ++ // CFArrayContainsValue ++ pub fn CFArrayGetCount(theArray: CFArrayRef) -> CFIndex; ++ // CFArrayGetCountOfValue ++ // CFArrayGetFirstIndexOfValue ++ // CFArrayGetLastIndexOfValue ++ pub fn CFArrayGetValues(theArray: CFArrayRef, range: CFRange, values: *mut *const c_void); ++ pub fn CFArrayGetValueAtIndex(theArray: CFArrayRef, idx: CFIndex) -> *const c_void; ++ // CFArrayApplyFunction ++ pub fn CFArrayGetTypeID() -> CFTypeID; ++} diff --cc vendor/core-foundation-sys-0.4.4/src/base.rs index 000000000,000000000..42e294612 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-sys-0.4.4/src/base.rs @@@ -1,0 -1,0 +1,76 @@@ ++// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++use libc::{c_uint, c_long, c_ulong, c_void, c_int}; ++ ++pub type Boolean = u8; ++pub type CFIndex = c_long; ++pub type mach_port_t = c_uint; ++pub type CFAllocatorRef = *const c_void; ++pub type CFNullRef = *const c_void; ++pub type CFHashCode = c_ulong; ++pub type CFTypeID = c_ulong; ++pub type CFTypeRef = *const c_void; ++pub type CFOptionFlags = u32; ++pub type OSStatus = i32; ++pub type SInt32 = c_int; ++ ++#[repr(C)] ++#[derive(Clone, Copy)] ++pub struct CFRange { ++ pub location: CFIndex, ++ pub length: CFIndex ++} ++ ++// for back-compat ++impl CFRange { ++ pub fn init(location: CFIndex, length: CFIndex) -> CFRange { ++ CFRange { ++ location: location, ++ length: length, ++ } ++ } ++} ++ ++extern { ++ /* ++ * CFBase.h ++ */ ++ ++ /* CFAllocator Reference */ ++ // N.B. Many CFAllocator functions and constants are omitted here. ++ pub static kCFAllocatorDefault: CFAllocatorRef; ++ pub static kCFAllocatorSystemDefault: CFAllocatorRef; ++ pub static kCFAllocatorMalloc: CFAllocatorRef; ++ pub static kCFAllocatorMallocZone: CFAllocatorRef; ++ pub static kCFAllocatorNull: CFAllocatorRef; ++ pub static kCFAllocatorUseContext: CFAllocatorRef; ++ ++ /* CFNull Reference */ ++ ++ pub static kCFNull: CFNullRef; ++ ++ /* CFType Reference */ ++ ++ //fn CFCopyDescription ++ //fn CFCopyTypeIDDescription ++ //fn CFEqual ++ //fn CFGetAllocator ++ pub fn CFEqual(cf1: CFTypeRef, cf2: CFTypeRef) -> Boolean; ++ pub fn CFGetRetainCount(cf: CFTypeRef) -> CFIndex; ++ pub fn CFGetTypeID(cf: CFTypeRef) -> CFTypeID; ++ pub fn CFHash(cf: CFTypeRef) -> CFHashCode; ++ //fn CFMakeCollectable ++ pub fn CFRelease(cf: CFTypeRef); ++ pub fn CFRetain(cf: CFTypeRef) -> CFTypeRef; ++ pub fn CFShow(obj: CFTypeRef); ++ ++ /* Base Utilities Reference */ ++ // N.B. Some things missing here. ++} diff --cc vendor/core-foundation-sys-0.4.4/src/bundle.rs index 000000000,000000000..3f8c0b376 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-sys-0.4.4/src/bundle.rs @@@ -1,0 -1,0 +1,36 @@@ ++// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++use libc::c_void; ++ ++use base::{CFTypeID, CFAllocatorRef}; ++use url::CFURLRef; ++use dictionary::CFDictionaryRef; ++use string::CFStringRef; ++ ++#[repr(C)] ++pub struct __CFBundle(c_void); ++ ++pub type CFBundleRef = *const __CFBundle; ++ ++extern { ++ /* ++ * CFBundle.h ++ */ ++ pub fn CFBundleCreate(allocator: CFAllocatorRef, bundleURL: CFURLRef) -> CFBundleRef; ++ ++ pub fn CFBundleGetBundleWithIdentifier(bundleID: CFStringRef) -> CFBundleRef; ++ pub fn CFBundleGetFunctionPointerForName(bundle: CFBundleRef, function_name: CFStringRef) -> *const c_void; ++ pub fn CFBundleGetMainBundle() -> CFBundleRef; ++ pub fn CFBundleGetInfoDictionary(bundle: CFBundleRef) -> CFDictionaryRef; ++ ++ pub fn CFBundleGetTypeID() -> CFTypeID; ++ pub fn CFBundleCopyExecutableURL(bundle: CFBundleRef) -> CFURLRef; ++ pub fn CFBundleCopyPrivateFrameworksURL(bundle: CFBundleRef) -> CFURLRef; ++} diff --cc vendor/core-foundation-sys-0.4.4/src/data.rs index 000000000,000000000..6a42b2b38 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-sys-0.4.4/src/data.rs @@@ -1,0 -1,0 +1,22 @@@ ++use libc::c_void; ++ ++use base::{CFAllocatorRef, CFTypeID, CFIndex}; ++ ++#[repr(C)] ++pub struct __CFData(c_void); ++ ++pub type CFDataRef = *const __CFData; ++ ++extern { ++ /* ++ * CFData.h ++ */ ++ ++ pub fn CFDataCreate(allocator: CFAllocatorRef, ++ bytes: *const u8, length: CFIndex) -> CFDataRef; ++ //fn CFDataFind ++ pub fn CFDataGetBytePtr(theData: CFDataRef) -> *const u8; ++ pub fn CFDataGetLength(theData: CFDataRef) -> CFIndex; ++ ++ pub fn CFDataGetTypeID() -> CFTypeID; ++} diff --cc vendor/core-foundation-sys-0.4.4/src/date.rs index 000000000,000000000..c6cac2d95 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-sys-0.4.4/src/date.rs @@@ -1,0 -1,0 +1,15 @@@ ++// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++pub type CFTimeInterval = f64; ++pub type CFAbsoluteTime = CFTimeInterval; ++ ++extern { ++ pub fn CFAbsoluteTimeGetCurrent() -> CFAbsoluteTime; ++} diff --cc vendor/core-foundation-sys-0.4.4/src/dictionary.rs index 000000000,000000000..bf51bb101 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-sys-0.4.4/src/dictionary.rs @@@ -1,0 -1,0 +1,79 @@@ ++// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++use libc::{c_void}; ++ ++use base::{CFAllocatorRef, CFIndex, CFTypeID, Boolean}; ++ ++pub type CFDictionaryApplierFunction = extern "C" fn (key: *const c_void, ++ value: *const c_void, ++ context: *mut c_void); ++pub type CFDictionaryCopyDescriptionCallBack = *const u8; ++pub type CFDictionaryEqualCallBack = *const u8; ++pub type CFDictionaryHashCallBack = *const u8; ++pub type CFDictionaryReleaseCallBack = *const u8; ++pub type CFDictionaryRetainCallBack = *const u8; ++ ++#[allow(dead_code)] ++#[repr(C)] ++#[derive(Clone, Copy)] ++pub struct CFDictionaryKeyCallBacks { ++ pub version: CFIndex, ++ pub retain: CFDictionaryRetainCallBack, ++ pub release: CFDictionaryReleaseCallBack, ++ pub copyDescription: CFDictionaryCopyDescriptionCallBack, ++ pub equal: CFDictionaryEqualCallBack, ++ pub hash: CFDictionaryHashCallBack ++} ++ ++#[allow(dead_code)] ++#[repr(C)] ++#[derive(Clone, Copy)] ++pub struct CFDictionaryValueCallBacks { ++ pub version: CFIndex, ++ pub retain: CFDictionaryRetainCallBack, ++ pub release: CFDictionaryReleaseCallBack, ++ pub copyDescription: CFDictionaryCopyDescriptionCallBack, ++ pub equal: CFDictionaryEqualCallBack ++} ++ ++#[repr(C)] ++pub struct __CFDictionary(c_void); ++ ++pub type CFDictionaryRef = *const __CFDictionary; ++pub type CFMutableDictionaryRef = *const __CFDictionary; ++ ++extern { ++ /* ++ * CFDictionary.h ++ */ ++ ++ pub static kCFTypeDictionaryKeyCallBacks: CFDictionaryKeyCallBacks; ++ pub static kCFTypeDictionaryValueCallBacks: CFDictionaryValueCallBacks; ++ ++ pub fn CFDictionaryContainsKey(theDict: CFDictionaryRef, key: *const c_void) -> Boolean; ++ pub fn CFDictionaryCreate(allocator: CFAllocatorRef, keys: *const *const c_void, values: *const *const c_void, ++ numValues: CFIndex, keyCallBacks: *const CFDictionaryKeyCallBacks, ++ valueCallBacks: *const CFDictionaryValueCallBacks) ++ -> CFDictionaryRef; ++ pub fn CFDictionaryGetCount(theDict: CFDictionaryRef) -> CFIndex; ++ pub fn CFDictionaryGetTypeID() -> CFTypeID; ++ pub fn CFDictionaryGetValueIfPresent(theDict: CFDictionaryRef, key: *const c_void, value: *mut *const c_void) ++ -> Boolean; ++ pub fn CFDictionaryApplyFunction(theDict: CFDictionaryRef, ++ applier: CFDictionaryApplierFunction, ++ context: *mut c_void); ++ pub fn CFDictionarySetValue(theDict: CFMutableDictionaryRef, ++ key: *const c_void, ++ value: *const c_void); ++ pub fn CFDictionaryGetKeysAndValues(theDict: CFDictionaryRef, ++ keys: *mut *const c_void, ++ values: *mut *const c_void); ++ ++} diff --cc vendor/core-foundation-sys-0.4.4/src/error.rs index 000000000,000000000..68097dad7 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-sys-0.4.4/src/error.rs @@@ -1,0 -1,0 +1,32 @@@ ++// Copyright 2016 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++use libc::c_void; ++ ++use base::{CFTypeID, CFIndex}; ++use string::CFStringRef; ++ ++#[repr(C)] ++pub struct __CFError(c_void); ++ ++pub type CFErrorRef = *mut __CFError; ++ ++extern "C" { ++ pub fn CFErrorGetTypeID() -> CFTypeID; ++ ++ pub static kCFErrorDomainPOSIX: CFStringRef; ++ pub static kCFErrorDomainOSStatus: CFStringRef; ++ pub static kCFErrorDomainMach: CFStringRef; ++ pub static kCFErrorDomainCocoa: CFStringRef; ++ ++ pub fn CFErrorGetDomain(err: CFErrorRef) -> CFStringRef; ++ pub fn CFErrorGetCode(err: CFErrorRef) -> CFIndex; ++ ++ pub fn CFErrorCopyDescription(err: CFErrorRef) -> CFStringRef; ++} diff --cc vendor/core-foundation-sys-0.4.4/src/lib.rs index 000000000,000000000..96c0b1e69 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-sys-0.4.4/src/lib.rs @@@ -1,0 -1,0 +1,28 @@@ ++// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, improper_ctypes)] ++ ++#![cfg_attr(all(feature="mac_os_10_7_support", feature="mac_os_10_8_features"), feature(linkage))] // back-compat requires weak linkage ++ ++extern crate libc; ++ ++pub mod array; ++pub mod base; ++pub mod bundle; ++pub mod data; ++pub mod date; ++pub mod dictionary; ++pub mod error; ++pub mod messageport; ++pub mod number; ++pub mod propertylist; ++pub mod runloop; ++pub mod set; ++pub mod string; ++pub mod url; diff --cc vendor/core-foundation-sys-0.4.4/src/messageport.rs index 000000000,000000000..9b15a4d06 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-sys-0.4.4/src/messageport.rs @@@ -1,0 -1,0 +1,79 @@@ ++// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++use libc::c_void; ++ ++use base::{CFAllocatorRef, CFIndex, CFTypeID, Boolean}; ++use data::CFDataRef; ++use date::CFTimeInterval; ++use runloop::CFRunLoopSourceRef; ++use string::CFStringRef; ++ ++#[repr(C)] ++#[derive(Copy, Clone)] ++#[derive(Debug)] ++pub struct CFMessagePortContext { ++ pub version: CFIndex, ++ pub info: *mut c_void, ++ pub retain: Option *const c_void>, ++ pub release: Option, ++ pub copyDescription: Option CFStringRef>, ++} ++ ++pub type CFMessagePortCallBack = Option< ++ unsafe extern fn(local: CFMessagePortRef, ++ msgid: i32, ++ data: CFDataRef, ++ info: *mut c_void) -> CFDataRef>; ++ ++pub type CFMessagePortInvalidationCallBack = Option< ++ unsafe extern "C" fn(ms: CFMessagePortRef, info: *mut c_void)>; ++ ++#[repr(C)] ++pub struct __CFMessagePort(c_void); ++pub type CFMessagePortRef = *const __CFMessagePort; ++ ++extern { ++ /* ++ * CFMessagePort.h ++ */ ++ pub fn CFMessagePortGetTypeID() -> CFTypeID; ++ pub fn CFMessagePortCreateLocal(allocator: CFAllocatorRef, ++ name: CFStringRef, ++ callout: CFMessagePortCallBack, ++ context: *const CFMessagePortContext, ++ shouldFreeInfo: *mut Boolean) ++ -> CFMessagePortRef; ++ pub fn CFMessagePortCreateRemote(allocator: CFAllocatorRef, ++ name: CFStringRef) -> CFMessagePortRef; ++ pub fn CFMessagePortIsRemote(ms: CFMessagePortRef) -> Boolean; ++ pub fn CFMessagePortGetName(ms: CFMessagePortRef) -> CFStringRef; ++ pub fn CFMessagePortSetName(ms: CFMessagePortRef, newName: CFStringRef) ++ -> Boolean; ++ pub fn CFMessagePortGetContext(ms: CFMessagePortRef, ++ context: *mut CFMessagePortContext); ++ pub fn CFMessagePortInvalidate(ms: CFMessagePortRef); ++ pub fn CFMessagePortIsValid(ms: CFMessagePortRef) -> Boolean; ++ pub fn CFMessagePortGetInvalidationCallBack(ms: CFMessagePortRef) ++ -> CFMessagePortInvalidationCallBack; ++ pub fn CFMessagePortSetInvalidationCallBack(ms: CFMessagePortRef, ++ callout: CFMessagePortInvalidationCallBack); ++ pub fn CFMessagePortSendRequest(remote: CFMessagePortRef, msgid: i32, ++ data: CFDataRef, ++ sendTimeout: CFTimeInterval, ++ rcvTimeout: CFTimeInterval, ++ replyMode: CFStringRef, ++ returnData: *mut CFDataRef) -> i32; ++ pub fn CFMessagePortCreateRunLoopSource(allocator: CFAllocatorRef, ++ local: CFMessagePortRef, ++ order: CFIndex) ++ -> CFRunLoopSourceRef; ++ // CFMessagePortSetDispatchQueue ++} diff --cc vendor/core-foundation-sys-0.4.4/src/number.rs index 000000000,000000000..3a5c09f42 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-sys-0.4.4/src/number.rs @@@ -1,0 -1,0 +1,60 @@@ ++// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++use libc::c_void; ++ ++use base::{CFAllocatorRef, CFTypeID}; ++ ++#[repr(C)] ++pub struct __CFBoolean(c_void); ++ ++pub type CFBooleanRef = *const __CFBoolean; ++ ++pub type CFNumberType = u32; ++ ++// members of enum CFNumberType ++// static kCFNumberSInt8Type: CFNumberType = 1; ++// static kCFNumberSInt16Type: CFNumberType = 2; ++pub static kCFNumberSInt32Type: CFNumberType = 3; ++pub static kCFNumberSInt64Type: CFNumberType = 4; ++// static kCFNumberFloat32Type: CFNumberType = 5; ++pub static kCFNumberFloat64Type: CFNumberType = 6; ++// static kCFNumberCharType: CFNumberType = 7; ++// static kCFNumberShortType: CFNumberType = 8; ++// static kCFNumberIntType: CFNumberType = 9; ++// static kCFNumberLongType: CFNumberType = 10; ++// static kCFNumberLongLongType: CFNumberType = 11; ++// static kCFNumberFloatType: CFNumberType = 12; ++// static kCFNumberDoubleType: CFNumberType = 13; ++// static kCFNumberCFIndexType: CFNumberType = 14; ++// static kCFNumberNSIntegerType: CFNumberType = 15; ++// static kCFNumberCGFloatType: CFNumberType = 16; ++// static kCFNumberMaxType: CFNumberType = 16; ++ ++// This is an enum due to zero-sized types warnings. ++// For more details see https://github.com/rust-lang/rust/issues/27303 ++pub enum __CFNumber {} ++ ++pub type CFNumberRef = *const __CFNumber; ++ ++extern { ++ /* ++ * CFNumber.h ++ */ ++ pub static kCFBooleanTrue: CFBooleanRef; ++ pub static kCFBooleanFalse: CFBooleanRef; ++ ++ pub fn CFBooleanGetTypeID() -> CFTypeID; ++ pub fn CFNumberCreate(allocator: CFAllocatorRef, theType: CFNumberType, valuePtr: *const c_void) ++ -> CFNumberRef; ++ //fn CFNumberGetByteSize ++ pub fn CFNumberGetValue(number: CFNumberRef, theType: CFNumberType, valuePtr: *mut c_void) -> bool; ++ //fn CFNumberCompare ++ pub fn CFNumberGetTypeID() -> CFTypeID; ++} diff --cc vendor/core-foundation-sys-0.4.4/src/propertylist.rs index 000000000,000000000..2396ef998 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-sys-0.4.4/src/propertylist.rs @@@ -1,0 -1,0 +1,37 @@@ ++use base::{CFAllocatorRef, CFIndex, CFOptionFlags, CFTypeRef}; ++use data::CFDataRef; ++use error::CFErrorRef; ++ ++pub type CFPropertyListRef = CFTypeRef; ++ ++pub type CFPropertyListFormat = CFIndex; ++pub const kCFPropertyListOpenStepFormat: CFPropertyListFormat = 1; ++pub const kCFPropertyListXMLFormat_v1_0: CFPropertyListFormat = 100; ++pub const kCFPropertyListBinaryFormat_v1_0: CFPropertyListFormat = 200; ++ ++pub type CFPropertyListMutabilityOptions = CFOptionFlags; ++pub const kCFPropertyListImmutable: CFPropertyListMutabilityOptions = 0; ++pub const kCFPropertyListMutableContainers: CFPropertyListMutabilityOptions = 1; ++pub const kCFPropertyListMutableContainersAndLeaves: CFPropertyListMutabilityOptions = 2; ++ ++extern "C" { ++ // CFPropertyList.h ++ // ++ ++ // fn CFPropertyListCreateDeepCopy ++ // fn CFPropertyListIsValid ++ pub fn CFPropertyListCreateWithData(allocator: CFAllocatorRef, ++ data: CFDataRef, ++ options: CFPropertyListMutabilityOptions, ++ format: *mut CFPropertyListFormat, ++ error: *mut CFErrorRef) ++ -> CFPropertyListRef; ++ // fn CFPropertyListCreateWithStream ++ // fn CFPropertyListWrite ++ pub fn CFPropertyListCreateData(allocator: CFAllocatorRef, ++ propertyList: CFPropertyListRef, ++ format: CFPropertyListFormat, ++ options: CFOptionFlags, ++ error: *mut CFErrorRef) ++ -> CFDataRef; ++} diff --cc vendor/core-foundation-sys-0.4.4/src/runloop.rs index 000000000,000000000..86414a5ee new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-sys-0.4.4/src/runloop.rs @@@ -1,0 -1,0 +1,164 @@@ ++// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++use libc::c_void; ++ ++use array::CFArrayRef; ++use base::{Boolean, CFIndex, CFTypeID, CFAllocatorRef, CFOptionFlags, CFHashCode, mach_port_t}; ++use date::{CFAbsoluteTime, CFTimeInterval}; ++use string::CFStringRef; ++ ++#[repr(C)] ++pub struct __CFRunLoop(c_void); ++ ++pub type CFRunLoopRef = *const __CFRunLoop; ++ ++#[repr(C)] ++pub struct __CFRunLoopSource(c_void); ++ ++pub type CFRunLoopSourceRef = *const __CFRunLoopSource; ++ ++#[repr(C)] ++pub struct __CFRunLoopObserver(c_void); ++ ++pub type CFRunLoopObserverRef = *const __CFRunLoopObserver; ++ ++// Reasons for CFRunLoopRunInMode() to Return ++pub const kCFRunLoopRunFinished: i32 = 1; ++pub const kCFRunLoopRunStopped: i32 = 2; ++pub const kCFRunLoopRunTimedOut: i32 = 3; ++pub const kCFRunLoopRunHandledSource: i32 = 4; ++ ++// Run Loop Observer Activities ++//typedef CF_OPTIONS(CFOptionFlags, CFRunLoopActivity) { ++pub type CFRunLoopActivity = CFOptionFlags; ++pub const kCFRunLoopEntry: CFOptionFlags = 1 << 0; ++pub const kCFRunLoopBeforeTimers: CFOptionFlags = 1 << 1; ++pub const kCFRunLoopBeforeSources: CFOptionFlags = 1 << 2; ++pub const kCFRunLoopBeforeWaiting: CFOptionFlags = 1 << 5; ++pub const kCFRunLoopAfterWaiting: CFOptionFlags = 1 << 6; ++pub const kCFRunLoopExit: CFOptionFlags = 1 << 7; ++pub const kCFRunLoopAllActivities: CFOptionFlags = 0x0FFFFFFF; ++ ++#[repr(C)] ++pub struct CFRunLoopSourceContext { ++ pub version: CFIndex, ++ pub info: *mut c_void, ++ pub retain: extern "C" fn (info: *const c_void) -> *const c_void, ++ pub release: extern "C" fn (info: *const c_void), ++ pub copyDescription: extern "C" fn (info: *const c_void) -> CFStringRef, ++ pub equal: extern "C" fn (info1: *const c_void, info2: *const c_void) -> Boolean, ++ pub hash: extern "C" fn (info: *const c_void) -> CFHashCode, ++ pub schedule: extern "C" fn (info: *const c_void, rl: CFRunLoopRef, mode: CFStringRef), ++ pub cancel: extern "C" fn (info: *const c_void, rl: CFRunLoopRef, mode: CFStringRef), ++ pub perform: extern "C" fn (info: *const c_void), ++} ++ ++#[repr(C)] ++pub struct CFRunLoopSourceContext1 { ++ pub version: CFIndex, ++ pub info: *mut c_void, ++ pub retain: extern "C" fn (info: *const c_void) -> *const c_void, ++ pub release: extern "C" fn (info: *const c_void), ++ pub copyDescription: extern "C" fn (info: *const c_void) -> CFStringRef, ++ pub equal: extern "C" fn (info1: *const c_void, info2: *const c_void) -> Boolean, ++ pub hash: extern "C" fn (info: *const c_void) -> CFHashCode, ++ // note that the following two fields are platform dependent in the C header, the ones here are for OS X ++ pub getPort: extern "C" fn (info: *mut c_void) -> mach_port_t, ++ pub perform: extern "C" fn (msg: *mut c_void, size: CFIndex, allocator: CFAllocatorRef, info: *mut c_void) -> *mut c_void, ++} ++ ++#[repr(C)] ++pub struct CFRunLoopObserverContext { ++ pub version: CFIndex, ++ pub info: *mut c_void, ++ pub retain: extern "C" fn (info: *const c_void) -> *const c_void, ++ pub release: extern "C" fn (info: *const c_void), ++ pub copyDescription: extern "C" fn (info: *const c_void) -> CFStringRef, ++} ++ ++pub type CFRunLoopObserverCallBack = extern "C" fn (observer: CFRunLoopObserverRef, activity: CFRunLoopActivity, info: *mut c_void); ++ ++#[repr(C)] ++pub struct CFRunLoopTimerContext { ++ pub version: CFIndex, ++ pub info: *mut c_void, ++ pub retain: extern "C" fn (info: *const c_void) -> *const c_void, ++ pub release: extern "C" fn (info: *const c_void), ++ pub copyDescription: extern "C" fn (info: *const c_void) -> CFStringRef, ++} ++ ++pub type CFRunLoopTimerCallBack = extern "C" fn (timer: CFRunLoopTimerRef, info: *mut c_void); ++ ++#[repr(C)] ++pub struct __CFRunLoopTimer; ++ ++pub type CFRunLoopTimerRef = *const __CFRunLoopTimer; ++ ++extern { ++ /* ++ * CFRunLoop.h ++ */ ++ pub static kCFRunLoopDefaultMode: CFStringRef; ++ pub static kCFRunLoopCommonModes: CFStringRef; ++ pub fn CFRunLoopGetTypeID() -> CFTypeID; ++ pub fn CFRunLoopGetCurrent() -> CFRunLoopRef; ++ pub fn CFRunLoopGetMain() -> CFRunLoopRef; ++ pub fn CFRunLoopCopyCurrentMode(rl: CFRunLoopRef) -> CFStringRef; ++ pub fn CFRunLoopCopyAllModes(rl: CFRunLoopRef) -> CFArrayRef; ++ pub fn CFRunLoopAddCommonMode(rl: CFRunLoopRef, mode: CFStringRef); ++ pub fn CFRunLoopGetNextTimerFireDate(rl: CFRunLoopRef, mode: CFStringRef) -> CFAbsoluteTime; ++ pub fn CFRunLoopRun(); ++ pub fn CFRunLoopRunInMode(mode: CFStringRef, seconds: CFTimeInterval, returnAfterSourceHandled: Boolean) -> i32; ++ pub fn CFRunLoopIsWaiting(rl: CFRunLoopRef) -> Boolean; ++ pub fn CFRunLoopWakeUp(rl: CFRunLoopRef); ++ pub fn CFRunLoopStop(rl: CFRunLoopRef); ++ // fn CFRunLoopPerformBlock(rl: CFRunLoopRef, mode: CFTypeRef, block: void (^)(void)); ++ pub fn CFRunLoopContainsSource(rl: CFRunLoopRef, source: CFRunLoopSourceRef, mode: CFStringRef) -> Boolean; ++ pub fn CFRunLoopAddSource(rl: CFRunLoopRef, source: CFRunLoopSourceRef, mode: CFStringRef); ++ pub fn CFRunLoopRemoveSource(rl: CFRunLoopRef, source: CFRunLoopSourceRef, mode: CFStringRef); ++ pub fn CFRunLoopContainsObserver(rl: CFRunLoopRef, observer: CFRunLoopObserverRef, mode: CFStringRef) -> Boolean; ++ pub fn CFRunLoopAddObserver(rl: CFRunLoopRef, observer: CFRunLoopObserverRef, mode: CFStringRef); ++ pub fn CFRunLoopRemoveObserver(rl: CFRunLoopRef, observer: CFRunLoopObserverRef, mode: CFStringRef); ++ pub fn CFRunLoopContainsTimer(rl: CFRunLoopRef, timer: CFRunLoopTimerRef, mode: CFStringRef) -> Boolean; ++ pub fn CFRunLoopAddTimer(rl: CFRunLoopRef, timer: CFRunLoopTimerRef, mode: CFStringRef); ++ pub fn CFRunLoopRemoveTimer(rl: CFRunLoopRef, timer: CFRunLoopTimerRef, mode: CFStringRef); ++ ++ pub fn CFRunLoopSourceGetTypeID() -> CFTypeID; ++ pub fn CFRunLoopSourceCreate(allocator: CFAllocatorRef, order: CFIndex, context: *mut CFRunLoopSourceContext) -> CFRunLoopSourceRef; ++ pub fn CFRunLoopSourceGetOrder(source: CFRunLoopSourceRef) -> CFIndex; ++ pub fn CFRunLoopSourceInvalidate(source: CFRunLoopSourceRef); ++ pub fn CFRunLoopSourceIsValid(source: CFRunLoopSourceRef) -> Boolean; ++ pub fn CFRunLoopSourceGetContext(source: CFRunLoopSourceRef, context: *mut CFRunLoopSourceContext); ++ pub fn CFRunLoopSourceSignal(source: CFRunLoopSourceRef); ++ ++ pub fn CFRunLoopObserverGetTypeID() -> CFTypeID; ++ pub fn CFRunLoopObserverCreate(allocator: CFAllocatorRef, activities: CFOptionFlags, repeats: Boolean, order: CFIndex, callout: CFRunLoopObserverCallBack, context: *mut CFRunLoopObserverContext) -> CFRunLoopObserverRef; ++ // fn CFRunLoopObserverCreateWithHandler(allocator: CFAllocatorRef, activities: CFOptionFlags, repeats: Boolean, order: CFIndex, block: void (^) (CFRunLoopObserverRef observer, CFRunLoopActivity activity)) -> CFRunLoopObserverRef; ++ pub fn CFRunLoopObserverGetActivities(observer: CFRunLoopObserverRef) -> CFOptionFlags; ++ pub fn CFRunLoopObserverDoesRepeat(observer: CFRunLoopObserverRef) -> Boolean; ++ pub fn CFRunLoopObserverGetOrder(observer: CFRunLoopObserverRef) -> CFIndex; ++ pub fn CFRunLoopObserverInvalidate(observer: CFRunLoopObserverRef); ++ pub fn CFRunLoopObserverIsValid(observer: CFRunLoopObserverRef) -> Boolean; ++ pub fn CFRunLoopObserverGetContext(observer: CFRunLoopObserverRef, context: *mut CFRunLoopObserverContext); ++ ++ pub fn CFRunLoopTimerGetTypeID() -> CFTypeID; ++ pub fn CFRunLoopTimerCreate(allocator: CFAllocatorRef, fireDate: CFAbsoluteTime, interval: CFTimeInterval, flags: CFOptionFlags, order: CFIndex, callout: CFRunLoopTimerCallBack, context: *mut CFRunLoopTimerContext) -> CFRunLoopTimerRef; ++ // fn CFRunLoopTimerCreateWithHandler(allocator: CFAllocatorRef, fireDate: CFAbsoluteTime, interval: CFTimeInterval, flags: CFOptionFlags, order: CFIndex, block: void (^) (CFRunLoopTimerRef timer)) -> CFRunLoopTimerRef; ++ pub fn CFRunLoopTimerGetNextFireDate(timer: CFRunLoopTimerRef) -> CFAbsoluteTime; ++ pub fn CFRunLoopTimerSetNextFireDate(timer: CFRunLoopTimerRef, fireDate: CFAbsoluteTime); ++ pub fn CFRunLoopTimerGetInterval(timer: CFRunLoopTimerRef) -> CFTimeInterval; ++ pub fn CFRunLoopTimerDoesRepeat(timer: CFRunLoopTimerRef) -> Boolean; ++ pub fn CFRunLoopTimerGetOrder(timer: CFRunLoopTimerRef) -> CFIndex; ++ pub fn CFRunLoopTimerInvalidate(timer: CFRunLoopTimerRef); ++ pub fn CFRunLoopTimerIsValid(timer: CFRunLoopTimerRef) -> Boolean; ++ pub fn CFRunLoopTimerGetContext(timer: CFRunLoopTimerRef, context: *mut CFRunLoopTimerContext); ++ pub fn CFRunLoopTimerGetTolerance(timer: CFRunLoopTimerRef) -> CFTimeInterval; ++ pub fn CFRunLoopTimerSetTolerance(timer: CFRunLoopTimerRef, tolerance: CFTimeInterval); ++} diff --cc vendor/core-foundation-sys-0.4.4/src/set.rs index 000000000,000000000..ca9c3c59a new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-sys-0.4.4/src/set.rs @@@ -1,0 -1,0 +1,58 @@@ ++// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++use libc::c_void; ++ ++use base::{CFAllocatorRef, CFIndex, CFTypeID}; ++ ++pub type CFSetApplierFunction = extern "C" fn (value: *const c_void, ++ context: *const c_void); ++pub type CFSetRetainCallBack = *const u8; ++pub type CFSetReleaseCallBack = *const u8; ++pub type CFSetCopyDescriptionCallBack = *const u8; ++pub type CFSetEqualCallBack = *const u8; ++pub type CFSetHashCallBack = *const u8; ++ ++#[repr(C)] ++#[derive(Clone, Copy)] ++pub struct CFSetCallBacks { ++ pub version: CFIndex, ++ pub retain: CFSetRetainCallBack, ++ pub release: CFSetReleaseCallBack, ++ pub copyDescription: CFSetCopyDescriptionCallBack, ++ pub equal: CFSetEqualCallBack, ++ pub hash: CFSetHashCallBack, ++} ++ ++#[repr(C)] ++pub struct __CFSet(c_void); ++ ++pub type CFSetRef = *const __CFSet; ++ ++extern { ++ /* ++ * CFSet.h ++ */ ++ ++ pub static kCFTypeSetCallBacks: CFSetCallBacks; ++ ++ /* Creating Sets */ ++ pub fn CFSetCreate(allocator: CFAllocatorRef, values: *const *const c_void, numValues: CFIndex, ++ callBacks: *const CFSetCallBacks) -> CFSetRef; ++ ++ /* Applying a Function to Set Members */ ++ pub fn CFSetApplyFunction(theSet: CFSetRef, ++ applier: CFSetApplierFunction, ++ context: *const c_void); ++ ++ pub fn CFSetGetCount(theSet: CFSetRef) -> CFIndex; ++ ++ pub fn CFSetGetTypeID() -> CFTypeID; ++} ++ diff --cc vendor/core-foundation-sys-0.4.4/src/string.rs index 000000000,000000000..6095bca87 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-sys-0.4.4/src/string.rs @@@ -1,0 -1,0 +1,320 @@@ ++// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++use libc::{c_char, c_ushort, c_void}; ++ ++use base::{Boolean, CFOptionFlags, CFIndex, CFAllocatorRef, CFRange, CFTypeID}; ++ ++pub type UniChar = c_ushort; ++ ++// CFString.h ++ ++pub type CFStringCompareFlags = CFOptionFlags; ++//static kCFCompareCaseInsensitive: CFStringCompareFlags = 1; ++//static kCFCompareBackwards: CFStringCompareFlags = 4; ++//static kCFCompareAnchored: CFStringCompareFlags = 8; ++//static kCFCompareNonliteral: CFStringCompareFlags = 16; ++//static kCFCompareLocalized: CFStringCompareFlags = 32; ++//static kCFCompareNumerically: CFStringCompareFlags = 64; ++//static kCFCompareDiacriticInsensitive: CFStringCompareFlags = 128; ++//static kCFCompareWidthInsensitive: CFStringCompareFlags = 256; ++//static kCFCompareForcedOrdering: CFStringCompareFlags = 512; ++ ++pub type CFStringEncoding = u32; ++ ++// OS X built-in encodings. ++ ++//static kCFStringEncodingMacRoman: CFStringEncoding = 0; ++//static kCFStringEncodingWindowsLatin1: CFStringEncoding = 0x0500; ++//static kCFStringEncodingISOLatin1: CFStringEncoding = 0x0201; ++//static kCFStringEncodingNextStepLatin: CFStringEncoding = 0x0B01; ++//static kCFStringEncodingASCII: CFStringEncoding = 0x0600; ++//static kCFStringEncodingUnicode: CFStringEncoding = 0x0100; ++pub static kCFStringEncodingUTF8: CFStringEncoding = 0x08000100; ++//static kCFStringEncodingNonLossyASCII: CFStringEncoding = 0x0BFF; ++ ++//static kCFStringEncodingUTF16: CFStringEncoding = 0x0100; ++//static kCFStringEncodingUTF16BE: CFStringEncoding = 0x10000100; ++//static kCFStringEncodingUTF16LE: CFStringEncoding = 0x14000100; ++//static kCFStringEncodingUTF32: CFStringEncoding = 0x0c000100; ++//static kCFStringEncodingUTF32BE: CFStringEncoding = 0x18000100; ++//static kCFStringEncodingUTF32LE: CFStringEncoding = 0x1c000100; ++ ++ ++// CFStringEncodingExt.h ++ ++pub type CFStringEncodings = CFIndex; ++ ++// External encodings, except those defined above. ++// Defined above: kCFStringEncodingMacRoman = 0 ++//static kCFStringEncodingMacJapanese: CFStringEncoding = 1; ++//static kCFStringEncodingMacChineseTrad: CFStringEncoding = 2; ++//static kCFStringEncodingMacKorean: CFStringEncoding = 3; ++//static kCFStringEncodingMacArabic: CFStringEncoding = 4; ++//static kCFStringEncodingMacHebrew: CFStringEncoding = 5; ++//static kCFStringEncodingMacGreek: CFStringEncoding = 6; ++//static kCFStringEncodingMacCyrillic: CFStringEncoding = 7; ++//static kCFStringEncodingMacDevanagari: CFStringEncoding = 9; ++//static kCFStringEncodingMacGurmukhi: CFStringEncoding = 10; ++//static kCFStringEncodingMacGujarati: CFStringEncoding = 11; ++//static kCFStringEncodingMacOriya: CFStringEncoding = 12; ++//static kCFStringEncodingMacBengali: CFStringEncoding = 13; ++//static kCFStringEncodingMacTamil: CFStringEncoding = 14; ++//static kCFStringEncodingMacTelugu: CFStringEncoding = 15; ++//static kCFStringEncodingMacKannada: CFStringEncoding = 16; ++//static kCFStringEncodingMacMalayalam: CFStringEncoding = 17; ++//static kCFStringEncodingMacSinhalese: CFStringEncoding = 18; ++//static kCFStringEncodingMacBurmese: CFStringEncoding = 19; ++//static kCFStringEncodingMacKhmer: CFStringEncoding = 20; ++//static kCFStringEncodingMacThai: CFStringEncoding = 21; ++//static kCFStringEncodingMacLaotian: CFStringEncoding = 22; ++//static kCFStringEncodingMacGeorgian: CFStringEncoding = 23; ++//static kCFStringEncodingMacArmenian: CFStringEncoding = 24; ++//static kCFStringEncodingMacChineseSimp: CFStringEncoding = 25; ++//static kCFStringEncodingMacTibetan: CFStringEncoding = 26; ++//static kCFStringEncodingMacMongolian: CFStringEncoding = 27; ++//static kCFStringEncodingMacEthiopic: CFStringEncoding = 28; ++//static kCFStringEncodingMacCentralEurRoman: CFStringEncoding = 29; ++//static kCFStringEncodingMacVietnamese: CFStringEncoding = 30; ++//static kCFStringEncodingMacExtArabic: CFStringEncoding = 31; ++//static kCFStringEncodingMacSymbol: CFStringEncoding = 33; ++//static kCFStringEncodingMacDingbats: CFStringEncoding = 34; ++//static kCFStringEncodingMacTurkish: CFStringEncoding = 35; ++//static kCFStringEncodingMacCroatian: CFStringEncoding = 36; ++//static kCFStringEncodingMacIcelandic: CFStringEncoding = 37; ++//static kCFStringEncodingMacRomanian: CFStringEncoding = 38; ++//static kCFStringEncodingMacCeltic: CFStringEncoding = 39; ++//static kCFStringEncodingMacGaelic: CFStringEncoding = 40; ++//static kCFStringEncodingMacFarsi: CFStringEncoding = 0x8C; ++//static kCFStringEncodingMacUkrainian: CFStringEncoding = 0x98; ++//static kCFStringEncodingMacInuit: CFStringEncoding = 0xEC; ++//static kCFStringEncodingMacVT100: CFStringEncoding = 0xFC; ++//static kCFStringEncodingMacHFS: CFStringEncoding = 0xFF; ++// Defined above: kCFStringEncodingISOLatin1 = 0x0201 ++//static kCFStringEncodingISOLatin2: CFStringEncoding = 0x0202; ++//static kCFStringEncodingISOLatin3: CFStringEncoding = 0x0203; ++//static kCFStringEncodingISOLatin4: CFStringEncoding = 0x0204; ++//static kCFStringEncodingISOLatinCyrillic: CFStringEncoding = 0x0205; ++//static kCFStringEncodingISOLatinArabic: CFStringEncoding = 0x0206; ++//static kCFStringEncodingISOLatinGreek: CFStringEncoding = 0x0207; ++//static kCFStringEncodingISOLatinHebrew: CFStringEncoding = 0x0208; ++//static kCFStringEncodingISOLatin5: CFStringEncoding = 0x0209; ++//static kCFStringEncodingISOLatin6: CFStringEncoding = 0x020A; ++//static kCFStringEncodingISOLatinThai: CFStringEncoding = 0x020B; ++//static kCFStringEncodingISOLatin7: CFStringEncoding = 0x020D; ++//static kCFStringEncodingISOLatin8: CFStringEncoding = 0x020E; ++//static kCFStringEncodingISOLatin9: CFStringEncoding = 0x020F; ++//static kCFStringEncodingISOLatin10: CFStringEncoding = 0x0210; ++//static kCFStringEncodingDOSLatinUS: CFStringEncoding = 0x0400; ++//static kCFStringEncodingDOSGreek: CFStringEncoding = 0x0405; ++//static kCFStringEncodingDOSBalticRim: CFStringEncoding = 0x0406; ++//static kCFStringEncodingDOSLatin1: CFStringEncoding = 0x0410; ++//static kCFStringEncodingDOSGreek1: CFStringEncoding = 0x0411; ++//static kCFStringEncodingDOSLatin2: CFStringEncoding = 0x0412; ++//static kCFStringEncodingDOSCyrillic: CFStringEncoding = 0x0413; ++//static kCFStringEncodingDOSTurkish: CFStringEncoding = 0x0414; ++//static kCFStringEncodingDOSPortuguese: CFStringEncoding = 0x0415; ++//static kCFStringEncodingDOSIcelandic: CFStringEncoding = 0x0416; ++//static kCFStringEncodingDOSHebrew: CFStringEncoding = 0x0417; ++//static kCFStringEncodingDOSCanadianFrench: CFStringEncoding = 0x0418; ++//static kCFStringEncodingDOSArabic: CFStringEncoding = 0x0419; ++//static kCFStringEncodingDOSNordic: CFStringEncoding = 0x041A; ++//static kCFStringEncodingDOSRussian: CFStringEncoding = 0x041B; ++//static kCFStringEncodingDOSGreek2: CFStringEncoding = 0x041C; ++//static kCFStringEncodingDOSThai: CFStringEncoding = 0x041D; ++//static kCFStringEncodingDOSJapanese: CFStringEncoding = 0x0420; ++//static kCFStringEncodingDOSChineseSimplif: CFStringEncoding = 0x0421; ++//static kCFStringEncodingDOSKorean: CFStringEncoding = 0x0422; ++//static kCFStringEncodingDOSChineseTrad: CFStringEncoding = 0x0423; ++// Defined above: kCFStringEncodingWindowsLatin1 = 0x0500 ++//static kCFStringEncodingWindowsLatin2: CFStringEncoding = 0x0501; ++//static kCFStringEncodingWindowsCyrillic: CFStringEncoding = 0x0502; ++//static kCFStringEncodingWindowsGreek: CFStringEncoding = 0x0503; ++//static kCFStringEncodingWindowsLatin5: CFStringEncoding = 0x0504; ++//static kCFStringEncodingWindowsHebrew: CFStringEncoding = 0x0505; ++//static kCFStringEncodingWindowsArabic: CFStringEncoding = 0x0506; ++//static kCFStringEncodingWindowsBalticRim: CFStringEncoding = 0x0507; ++//static kCFStringEncodingWindowsVietnamese: CFStringEncoding = 0x0508; ++//static kCFStringEncodingWindowsKoreanJohab: CFStringEncoding = 0x0510; ++// Defined above: kCFStringEncodingASCII = 0x0600 ++//static kCFStringEncodingANSEL: CFStringEncoding = 0x0601; ++//static kCFStringEncodingJIS_X0201_76: CFStringEncoding = 0x0620; ++//static kCFStringEncodingJIS_X0208_83: CFStringEncoding = 0x0621; ++//static kCFStringEncodingJIS_X0208_90: CFStringEncoding = 0x0622; ++//static kCFStringEncodingJIS_X0212_90: CFStringEncoding = 0x0623; ++//static kCFStringEncodingJIS_C6226_78: CFStringEncoding = 0x0624; ++//static kCFStringEncodingShiftJIS_X0213: CFStringEncoding = 0x0628; ++//static kCFStringEncodingShiftJIS_X0213_MenKuTen: CFStringEncoding = 0x0629; ++//static kCFStringEncodingGB_2312_80: CFStringEncoding = 0x0630; ++//static kCFStringEncodingGBK_95: CFStringEncoding = 0x0631; ++//static kCFStringEncodingGB_18030_2000: CFStringEncoding = 0x0632; ++//static kCFStringEncodingKSC_5601_87: CFStringEncoding = 0x0640; ++//static kCFStringEncodingKSC_5601_92_Johab: CFStringEncoding = 0x0641; ++//static kCFStringEncodingCNS_11643_92_P1: CFStringEncoding = 0x0651; ++//static kCFStringEncodingCNS_11643_92_P2: CFStringEncoding = 0x0652; ++//static kCFStringEncodingCNS_11643_92_P3: CFStringEncoding = 0x0653; ++//static kCFStringEncodingISO_2022_JP: CFStringEncoding = 0x0820; ++//static kCFStringEncodingISO_2022_JP_2: CFStringEncoding = 0x0821; ++//static kCFStringEncodingISO_2022_JP_1: CFStringEncoding = 0x0822; ++//static kCFStringEncodingISO_2022_JP_3: CFStringEncoding = 0x0823; ++//static kCFStringEncodingISO_2022_CN: CFStringEncoding = 0x0830; ++//static kCFStringEncodingISO_2022_CN_EXT: CFStringEncoding = 0x0831; ++//static kCFStringEncodingISO_2022_KR: CFStringEncoding = 0x0840; ++//static kCFStringEncodingEUC_JP: CFStringEncoding = 0x0920; ++//static kCFStringEncodingEUC_CN: CFStringEncoding = 0x0930; ++//static kCFStringEncodingEUC_TW: CFStringEncoding = 0x0931; ++//static kCFStringEncodingEUC_KR: CFStringEncoding = 0x0940; ++//static kCFStringEncodingShiftJIS: CFStringEncoding = 0x0A01; ++//static kCFStringEncodingKOI8_R: CFStringEncoding = 0x0A02; ++//static kCFStringEncodingBig5: CFStringEncoding = 0x0A03; ++//static kCFStringEncodingMacRomanLatin1: CFStringEncoding = 0x0A04; ++//static kCFStringEncodingHZ_GB_2312: CFStringEncoding = 0x0A05; ++//static kCFStringEncodingBig5_HKSCS_1999: CFStringEncoding = 0x0A06; ++//static kCFStringEncodingVISCII: CFStringEncoding = 0x0A07; ++//static kCFStringEncodingKOI8_U: CFStringEncoding = 0x0A08; ++//static kCFStringEncodingBig5_E: CFStringEncoding = 0x0A09; ++// Defined above: kCFStringEncodingNextStepLatin = 0x0B01 ++//static kCFStringEncodingNextStepJapanese: CFStringEncoding = 0x0B02; ++//static kCFStringEncodingEBCDIC_US: CFStringEncoding = 0x0C01; ++//static kCFStringEncodingEBCDIC_CP037: CFStringEncoding = 0x0C02; ++//static kCFStringEncodingUTF7: CFStringEncoding = 0x04000100; ++//static kCFStringEncodingUTF7_IMAP: CFStringEncoding = 0x0A10; ++//static kCFStringEncodingShiftJIS_X0213_00: CFStringEncoding = 0x0628; /* Deprecated */ ++ ++#[repr(C)] ++pub struct __CFString(c_void); ++ ++pub type CFStringRef = *const __CFString; ++ ++extern { ++ /* ++ * CFString.h ++ */ ++ ++ // N.B. organized according to "Functions by task" in docs ++ ++ /* Creating a CFString */ ++ //fn CFSTR ++ //fn CFStringCreateArrayBySeparatingStrings ++ //fn CFStringCreateByCombiningStrings ++ //fn CFStringCreateCopy ++ //fn CFStringCreateFromExternalRepresentation ++ pub fn CFStringCreateWithBytes(alloc: CFAllocatorRef, ++ bytes: *const u8, ++ numBytes: CFIndex, ++ encoding: CFStringEncoding, ++ isExternalRepresentation: Boolean, ++ contentsDeallocator: CFAllocatorRef) ++ -> CFStringRef; ++ pub fn CFStringCreateWithBytesNoCopy(alloc: CFAllocatorRef, ++ bytes: *const u8, ++ numBytes: CFIndex, ++ encoding: CFStringEncoding, ++ isExternalRepresentation: Boolean, ++ contentsDeallocator: CFAllocatorRef) ++ -> CFStringRef; ++ //fn CFStringCreateWithCharacters ++ //fn CFStringCreateWithCharactersNoCopy ++ pub fn CFStringCreateWithCString(alloc: CFAllocatorRef, ++ cStr: *const c_char, ++ encoding: CFStringEncoding) ++ -> CFStringRef; ++ //fn CFStringCreateWithCStringNoCopy ++ //fn CFStringCreateWithFormat ++ //fn CFStringCreateWithFormatAndArguments ++ //fn CFStringCreateWithPascalString ++ //fn CFStringCreateWithPascalStringNoCopy ++ //fn CFStringCreateWithSubstring ++ ++ /* Searching Strings */ ++ //fn CFStringCreateArrayWithFindResults ++ //fn CFStringFind ++ //fn CFStringFindCharacterFromSet ++ //fn CFStringFindWithOptions ++ //fn CFStringFindWithOptionsAndLocale ++ //fn CFStringGetLineBounds ++ ++ /* Comparing Strings */ ++ //fn CFStringCompare ++ //fn CFStringCompareWithOptions ++ //fn CFStringCompareWithOptionsAndLocale ++ //fn CFStringHasPrefix ++ //fn CFStringHasSuffix ++ ++ /* Accessing Characters */ ++ //fn CFStringCreateExternalRepresentation ++ pub fn CFStringGetBytes(theString: CFStringRef, ++ range: CFRange, ++ encoding: CFStringEncoding, ++ lossByte: u8, ++ isExternalRepresentation: Boolean, ++ buffer: *mut u8, ++ maxBufLen: CFIndex, ++ usedBufLen: *mut CFIndex) ++ -> CFIndex; ++ //fn CFStringGetCharacterAtIndex ++ //fn CFStringGetCharacters ++ //fn CFStringGetCharactersPtr ++ //fn CFStringGetCharacterFromInlineBuffer ++ pub fn CFStringGetCString(theString: CFStringRef, ++ buffer: *mut c_char, ++ bufferSize: CFIndex, ++ encoding: CFStringEncoding) ++ -> Boolean; ++ pub fn CFStringGetCStringPtr(theString: CFStringRef, ++ encoding: CFStringEncoding) ++ -> *const c_char; ++ pub fn CFStringGetLength(theString: CFStringRef) -> CFIndex; ++ //fn CFStringGetPascalString ++ //fn CFStringGetPascalStringPtr ++ //fn CFStringGetRangeOfComposedCharactersAtIndex ++ //fn CFStringInitInlineBuffer ++ ++ /* Working With Hyphenation */ ++ //fn CFStringGetHyphenationLocationBeforeIndex ++ //fn CFStringIsHyphenationAvailableForLocale ++ ++ /* Working With Encodings */ ++ //fn CFStringConvertEncodingToIANACharSetName ++ //fn CFStringConvertEncodingToNSStringEncoding ++ //fn CFStringConvertEncodingToWindowsCodepage ++ //fn CFStringConvertIANACharSetNameToEncoding ++ //fn CFStringConvertNSStringEncodingToEncoding ++ //fn CFStringConvertWindowsCodepageToEncoding ++ //fn CFStringGetFastestEncoding ++ //fn CFStringGetListOfAvailableEncodings ++ //fn CFStringGetMaximumSizeForEncoding ++ //fn CFStringGetMostCompatibleMacStringEncoding ++ //fn CFStringGetNameOfEncoding ++ //fn CFStringGetSmallestEncoding ++ //fn CFStringGetSystemEncoding ++ //fn CFStringIsEncodingAvailable ++ ++ /* Getting Numeric Values */ ++ //fn CFStringGetDoubleValue ++ //fn CFStringGetIntValue ++ ++ /* Getting String Properties */ ++ //fn CFShowStr ++ pub fn CFStringGetTypeID() -> CFTypeID; ++ ++ /* String File System Representations */ ++ //fn CFStringCreateWithFileSystemRepresentation ++ //fn CFStringGetFileSystemRepresentation ++ //fn CFStringGetMaximumSizeOfFileSystemRepresentation ++ ++ /* Getting Paragraph Bounds */ ++ //fn CFStringGetParagraphBounds ++ ++ /* Managing Surrogates */ ++ //fn CFStringGetLongCharacterForSurrogatePair ++ //fn CFStringGetSurrogatePairForLongCharacter ++ //fn CFStringIsSurrogateHighCharacter ++ //fn CFStringIsSurrogateLowCharacter ++} diff --cc vendor/core-foundation-sys-0.4.4/src/url.rs index 000000000,000000000..3081ec2e9 new file mode 100644 --- /dev/null +++ b/vendor/core-foundation-sys-0.4.4/src/url.rs @@@ -1,0 -1,0 +1,162 @@@ ++// Copyright 2013-2015 The Servo Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++use libc::c_void; ++ ++use base::{CFOptionFlags, CFIndex, CFAllocatorRef, Boolean, CFTypeID, CFTypeRef, SInt32}; ++use string::{CFStringRef, CFStringEncoding}; ++use error::CFErrorRef; ++ ++#[repr(C)] ++pub struct __CFURL(c_void); ++ ++pub type CFURLRef = *const __CFURL; ++ ++pub type CFURLBookmarkCreationOptions = CFOptionFlags; ++ ++pub type CFURLPathStyle = CFIndex; ++ ++/* typedef CF_ENUM(CFIndex, CFURLPathStyle) */ ++pub const kCFURLPOSIXPathStyle: CFURLPathStyle = 0; ++pub const kCFURLHFSPathStyle: CFURLPathStyle = 1; ++pub const kCFURLWindowsPathStyle: CFURLPathStyle = 2; ++ ++// static kCFURLBookmarkCreationPreferFileIDResolutionMask: CFURLBookmarkCreationOptions = ++// (1 << 8) as u32; ++// static kCFURLBookmarkCreationMinimalBookmarkMask: CFURLBookmarkCreationOptions = ++// (1 << 9) as u32; ++// static kCFURLBookmarkCreationSuitableForBookmarkFile: CFURLBookmarkCreationOptions = ++// (1 << 10) as u32; ++// static kCFURLBookmarkCreationWithSecurityScope: CFURLBookmarkCreationOptions = ++// (1 << 11) as u32; ++// static kCFURLBookmarkCreationSecurityScopeAllowOnlyReadAccess: CFURLBookmarkCreationOptions = ++// (1 << 12) as u32; ++ ++// TODO: there are a lot of missing keys and constants. Add if you are bored or need them. ++ ++extern { ++ /* ++ * CFURL.h ++ */ ++ ++ /* Common File System Resource Keys */ ++ pub static kCFURLAttributeModificationDateKey: CFStringRef; ++ pub static kCFURLContentAccessDateKey: CFStringRef; ++ pub static kCFURLContentModificationDateKey: CFStringRef; ++ pub static kCFURLCreationDateKey: CFStringRef; ++ pub static kCFURLFileResourceIdentifierKey: CFStringRef; ++ pub static kCFURLFileSecurityKey: CFStringRef; ++ pub static kCFURLHasHiddenExtensionKey: CFStringRef; ++ pub static kCFURLIsDirectoryKey: CFStringRef; ++ pub static kCFURLIsExecutableKey: CFStringRef; ++ pub static kCFURLIsHiddenKey: CFStringRef; ++ pub static kCFURLIsPackageKey: CFStringRef; ++ pub static kCFURLIsReadableKey: CFStringRef; ++ pub static kCFURLIsRegularFileKey: CFStringRef; ++ pub static kCFURLIsSymbolicLinkKey: CFStringRef; ++ pub static kCFURLIsSystemImmutableKey: CFStringRef; ++ pub static kCFURLIsUserImmutableKey: CFStringRef; ++ pub static kCFURLIsVolumeKey: CFStringRef; ++ pub static kCFURLIsWritableKey: CFStringRef; ++ pub static kCFURLLabelNumberKey: CFStringRef; ++ pub static kCFURLLinkCountKey: CFStringRef; ++ pub static kCFURLLocalizedLabelKey: CFStringRef; ++ pub static kCFURLLocalizedNameKey: CFStringRef; ++ pub static kCFURLLocalizedTypeDescriptionKey: CFStringRef; ++ pub static kCFURLNameKey: CFStringRef; ++ pub static kCFURLParentDirectoryURLKey: CFStringRef; ++ pub static kCFURLPreferredIOBlockSizeKey: CFStringRef; ++ pub static kCFURLTypeIdentifierKey: CFStringRef; ++ pub static kCFURLVolumeIdentifierKey: CFStringRef; ++ pub static kCFURLVolumeURLKey: CFStringRef; ++ ++ #[cfg(feature="mac_os_10_8_features")] ++ #[cfg_attr(feature = "mac_os_10_7_support", linkage = "extern_weak")] ++ pub static kCFURLIsExcludedFromBackupKey: CFStringRef; ++ pub static kCFURLFileResourceTypeKey: CFStringRef; ++ ++ /* Creating a CFURL */ ++ pub fn CFURLCopyAbsoluteURL(anURL: CFURLRef) -> CFURLRef; ++ //fn CFURLCreateAbsoluteURLWithBytes ++ //fn CFURLCreateByResolvingBookmarkData ++ //fn CFURLCreateCopyAppendingPathComponent ++ //fn CFURLCreateCopyAppendingPathExtension ++ //fn CFURLCreateCopyDeletingLastPathComponent ++ //fn CFURLCreateCopyDeletingPathExtension ++ pub fn CFURLCreateFilePathURL(allocator: CFAllocatorRef, url: CFURLRef, error: *mut CFErrorRef) -> CFURLRef; ++ //fn CFURLCreateFileReferenceURL ++ pub fn CFURLCreateFromFileSystemRepresentation(allocator: CFAllocatorRef, buffer: *const u8, bufLen: CFIndex, isDirectory: Boolean) -> CFURLRef; ++ //fn CFURLCreateFromFileSystemRepresentationRelativeToBase ++ //fn CFURLCreateFromFSRef ++ pub fn CFURLCreateWithBytes(allocator: CFAllocatorRef, URLBytes: *const u8, length: CFIndex, encoding: CFStringEncoding, baseURL: CFURLRef) -> CFURLRef; ++ pub fn CFURLCreateWithFileSystemPath(allocator: CFAllocatorRef, filePath: CFStringRef, pathStyle: CFURLPathStyle, isDirectory: Boolean) -> CFURLRef; ++ pub fn CFURLCreateWithFileSystemPathRelativeToBase(allocator: CFAllocatorRef, filePath: CFStringRef, pathStyle: CFURLPathStyle, isDirectory: Boolean, baseURL: CFURLRef) -> CFURLRef; ++ //fn CFURLCreateWithString(allocator: CFAllocatorRef, urlString: CFStringRef, ++ // baseURL: CFURLRef) -> CFURLRef; ++ ++ /* Accessing the Parts of a URL */ ++ pub fn CFURLCanBeDecomposed(anURL: CFURLRef) -> Boolean; ++ pub fn CFURLCopyFileSystemPath(anURL: CFURLRef, pathStyle: CFURLPathStyle) -> CFStringRef; ++ pub fn CFURLCopyFragment(anURL: CFURLRef, charactersToLeaveEscaped: CFStringRef) -> CFStringRef; ++ pub fn CFURLCopyHostName(anURL: CFURLRef) -> CFStringRef; ++ pub fn CFURLCopyLastPathComponent(anURL: CFURLRef) -> CFStringRef; ++ pub fn CFURLCopyNetLocation(anURL: CFURLRef) -> CFStringRef; ++ pub fn CFURLCopyParameterString(anURL: CFURLRef, charactersToLeaveEscaped: CFStringRef) -> CFStringRef; ++ pub fn CFURLCopyPassword(anURL: CFURLRef) -> CFStringRef; ++ pub fn CFURLCopyPath(anURL: CFURLRef) -> CFStringRef; ++ pub fn CFURLCopyPathExtension(anURL: CFURLRef) -> CFStringRef; ++ pub fn CFURLCopyQueryString(anURL: CFURLRef, charactersToLeaveEscaped: CFStringRef) -> CFStringRef; ++ pub fn CFURLCopyResourceSpecifier(anURL: CFURLRef) -> CFStringRef; ++ pub fn CFURLCopyScheme(anURL: CFURLRef) -> CFStringRef; ++ pub fn CFURLCopyStrictPath(anURL: CFURLRef, isAbsolute: *mut Boolean) -> CFStringRef; ++ pub fn CFURLCopyUserName(anURL: CFURLRef) -> CFStringRef; ++ pub fn CFURLGetPortNumber(anURL: CFURLRef) -> SInt32; ++ pub fn CFURLHasDirectoryPath(anURL: CFURLRef) -> Boolean; ++ ++ /* Converting URLs to Other Representations */ ++ //fn CFURLCreateData(allocator: CFAllocatorRef, url: CFURLRef, ++ // encoding: CFStringEncoding, escapeWhitespace: bool) -> CFDataRef; ++ //fn CFURLCreateStringByAddingPercentEscapes ++ //fn CFURLCreateStringByReplacingPercentEscapes ++ //fn CFURLCreateStringByReplacingPercentEscapesUsingEncoding ++ //fn CFURLGetFileSystemRepresentation ++ //fn CFURLGetFSRef ++ pub fn CFURLGetString(anURL: CFURLRef) -> CFStringRef; ++ ++ /* Getting URL Properties */ ++ //fn CFURLGetBaseURL(anURL: CFURLRef) -> CFURLRef; ++ //fn CFURLGetBytes ++ //fn CFURLGetByteRangeForComponent ++ pub fn CFURLGetTypeID() -> CFTypeID; ++ //fn CFURLResourceIsReachable ++ ++ /* Getting and Setting File System Resource Properties */ ++ pub fn CFURLClearResourcePropertyCache(url: CFURLRef); ++ //fn CFURLClearResourcePropertyCacheForKey ++ //fn CFURLCopyResourcePropertiesForKeys ++ //fn CFURLCopyResourcePropertyForKey ++ //fn CFURLCreateResourcePropertiesForKeysFromBookmarkData ++ //fn CFURLCreateResourcePropertyForKeyFromBookmarkData ++ //fn CFURLSetResourcePropertiesForKeys ++ pub fn CFURLSetResourcePropertyForKey(url: CFURLRef, key: CFStringRef, value: CFTypeRef, error: *mut CFErrorRef) -> Boolean; ++ //fn CFURLSetTemporaryResourcePropertyForKey ++ ++ /* Working with Bookmark Data */ ++ //fn CFURLCreateBookmarkData ++ //fn CFURLCreateBookmarkDataFromAliasRecord ++ //fn CFURLCreateBookmarkDataFromFile ++ //fn CFURLWriteBookmarkDataToFile ++ //fn CFURLStartAccessingSecurityScopedResource ++ //fn CFURLStopAccessingSecurityScopedResource ++} ++ ++#[test] ++#[cfg(feature="mac_os_10_8_features")] ++fn can_see_excluded_from_backup_key() { ++ let _ = unsafe { kCFURLIsExcludedFromBackupKey }; ++} diff --cc vendor/curl-0.4.8/.cargo-checksum.json index 000000000,000000000..865cddf7d new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/.cargo-checksum.json @@@ -1,0 -1,0 +1,1 @@@ ++{"files":{},"package":"7034c534a1d7d22f7971d6088aa9d281d219ef724026c3428092500f41ae9c2c"} diff --cc vendor/curl-0.4.8/.cargo-ok index 000000000,000000000..e69de29bb new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/.cargo-ok diff --cc vendor/curl-0.4.8/.gitmodules index 000000000,000000000..7196785dd new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/.gitmodules @@@ -1,0 -1,0 +1,3 @@@ ++[submodule "curl-sys/curl"] ++ path = curl-sys/curl ++ url = https://github.com/alexcrichton/curl diff --cc vendor/curl-0.4.8/.travis.yml index 000000000,000000000..00ff5cc1f new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/.travis.yml @@@ -1,0 -1,0 +1,68 @@@ ++language: rust ++sudo: required ++dist: trusty ++services: ++ - docker ++ ++matrix: ++ include: ++ - os: linux ++ rust: stable ++ env: TARGET=x86_64-unknown-linux-gnu DOCKER=linux64 NO_ADD=1 ++ - os: linux ++ rust: stable ++ env: TARGET=i686-unknown-linux-gnu DOCKER=linux32 ++ - os: linux ++ rust: stable ++ env: TARGET=x86_64-unknown-linux-musl DOCKER=musl ++ - os: linux ++ rust: stable ++ env: TARGET=x86_64-pc-windows-gnu NO_RUN=1 DOCKER=mingw ++ - os: linux ++ rust: stable ++ env: TARGET=x86_64-unknown-linux-gnu DOCKER=linux64-curl NO_ADD=1 ++ - os: osx ++ rust: stable ++ env: TARGET=x86_64-apple-darwin NO_ADD=1 ++ - os: osx ++ rust: stable ++ env: TARGET=i686-apple-darwin ++ - os: linux ++ rust: beta ++ env: TARGET=x86_64-unknown-linux-gnu DOCKER=linux64 NO_ADD=1 ++ - os: linux ++ rust: nightly ++ env: TARGET=x86_64-unknown-linux-gnu DOCKER=linux64 NO_ADD=1 ++sudo: false ++before_script: ++ - pip install 'travis-cargo<0.2' --user && export PATH=$HOME/.local/bin:$PATH ++ - if [ -z "$NO_ADD" ]; then rustup target add $TARGET; fi ++script: ++ - curl --version ++ - cargo generate-lockfile ++ - cargo generate-lockfile --manifest-path systest/Cargo.toml ++ - if [ -z "$DOCKER" ]; then ++ sh ci/run.sh; ++ else ++ mkdir .cargo target; ++ docker build -t rust -f ci/Dockerfile-$DOCKER ci; ++ docker run ++ -w /src ++ -v `pwd`:/src:ro ++ -v `pwd`/target:/src/target ++ -v `pwd`/ci/.cargo:/src/.cargo:ro ++ -v `rustc --print sysroot`:/usr/local:ro ++ -e TARGET=$TARGET ++ -e NO_RUN=$NO_RUN ++ -e CARGO_TARGET_DIR=/src/target ++ -it rust ++ sh ci/run.sh; ++ fi ++after_success: ++ - travis-cargo --only nightly doc-upload ++notifications: ++ email: ++ on_success: never ++env: ++ global: ++ secure: "j4son34/PmqogLMUHgcvOk+XtyUtcd0aAA8Sa/h4pyupw8AEM7+5DMMIrcrRh7ieKqmL2RSSGnYtYbd2b5yYroudypsqmQhK0StzrtPaftl/8zxw8liXzA9rat8MP0vuEAe5w9KLRdFKUCU7TzcYXcKttpbavqdNsJae+OFzHJc=" diff --cc vendor/curl-0.4.8/Cargo.toml index 000000000,000000000..10720cb23 new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/Cargo.toml @@@ -1,0 -1,0 +1,35 @@@ ++[package] ++ ++name = "curl" ++version = "0.4.8" ++authors = ["Carl Lerche ", ++ "Alex Crichton "] ++license = "MIT" ++repository = "https://github.com/alexcrichton/curl-rust" ++homepage = "https://github.com/alexcrichton/curl-rust" ++documentation = "https://docs.rs/curl" ++description = "Rust bindings to libcurl for making HTTP requests" ++categories = ["api-bindings", "web-programming::http-client"] ++ ++[badges] ++travis-ci = { repository = "alexcrichton/curl-rust" } ++appveyor = { repository = "alexcrichton/curl-rust" } ++ ++[dependencies] ++libc = "0.2" ++curl-sys = { path = "curl-sys", version = "0.3.13" } ++socket2 = "0.2" ++ ++# Unix platforms use OpenSSL for now to provide SSL functionality ++[target."cfg(all(unix, not(target_os = \"macos\")))".dependencies] ++openssl-sys = "0.9.0" ++openssl-probe = "0.1" ++ ++[target."cfg(windows)".dependencies] ++winapi = "0.2" ++ ++[dev-dependencies] ++mio = "0.6" ++ ++[workspace] ++members = ["systest"] diff --cc vendor/curl-0.4.8/LICENSE index 000000000,000000000..5f5e4b09d new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/LICENSE @@@ -1,0 -1,0 +1,19 @@@ ++Copyright (c) 2014 Carl Lerche ++ ++Permission is hereby granted, free of charge, to any person obtaining a copy ++of this software and associated documentation files (the "Software"), to deal ++in the Software without restriction, including without limitation the rights ++to use, copy, modify, merge, publish, distribute, sublicense, and/or sell ++copies of the Software, and to permit persons to whom the Software is ++furnished to do so, subject to the following conditions: ++ ++The above copyright notice and this permission notice shall be included in ++all copies or substantial portions of the Software. ++ ++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR ++IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, ++FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE ++AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER ++LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, ++OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN ++THE SOFTWARE. diff --cc vendor/curl-0.4.8/README.md index 000000000,000000000..7e5a91b2a new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/README.md @@@ -1,0 -1,0 +1,137 @@@ ++# curl-rust ++ ++libcurl bindings for Rust ++ ++[![Build Status](https://travis-ci.org/alexcrichton/curl-rust.svg?branch=master)](https://travis-ci.org/alexcrichton/curl-rust) ++[![Build status](https://ci.appveyor.com/api/projects/status/lx98wtbxhhhajpr9?svg=true)](https://ci.appveyor.com/project/alexcrichton/curl-rust) ++ ++[Documentation](https://docs.rs/curl) ++ ++## Quick Start ++ ++```rust ++extern crate curl; ++ ++use std::io::{stdout, Write}; ++ ++use curl::easy::Easy; ++ ++// Print a web page onto stdout ++fn main() { ++ let mut easy = Easy::new(); ++ easy.url("https://www.rust-lang.org/").unwrap(); ++ easy.write_function(|data| { ++ Ok(stdout().write(data).unwrap()) ++ }).unwrap(); ++ easy.perform().unwrap(); ++ ++ println!("{}", easy.response_code().unwrap()); ++} ++``` ++ ++```rust ++extern crate curl; ++ ++use curl::easy::Easy; ++ ++// Capture output into a local `Vec`. ++fn main() { ++ let mut dst = Vec::new(); ++ let mut easy = Easy::new(); ++ easy.url("https://www.rust-lang.org/").unwrap(); ++ ++ let mut transfer = easy.transfer(); ++ transfer.write_function(|data| { ++ dst.extend_from_slice(data); ++ Ok(data.len()) ++ }).unwrap(); ++ transfer.perform().unwrap(); ++} ++``` ++ ++## Post / Put requests ++ ++The `put` and `post` methods on `Easy` can configure the method of the HTTP ++request, and then `read_function` can be used to specify how data is filled in. ++This interface works particularly well with types that implement `Read`. ++ ++```rust,no_run ++extern crate curl; ++ ++use std::io::Read; ++use curl::easy::Easy; ++ ++fn main() { ++ let mut data = "this is the body".as_bytes(); ++ ++ let mut easy = Easy::new(); ++ easy.url("http://www.example.com/upload").unwrap(); ++ easy.post(true).unwrap(); ++ easy.post_field_size(data.len() as u64).unwrap(); ++ ++ let mut transfer = easy.transfer(); ++ transfer.read_function(|buf| { ++ Ok(data.read(buf).unwrap_or(0)) ++ }).unwrap(); ++ transfer.perform().unwrap(); ++} ++``` ++ ++## Custom headers ++ ++Custom headers can be specified as part of the request: ++ ++```rust,no_run ++extern crate curl; ++ ++use curl::easy::{Easy, List}; ++ ++fn main() { ++ let mut easy = Easy::new(); ++ easy.url("http://www.example.com").unwrap(); ++ ++ let mut list = List::new(); ++ list.append("Authorization: Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==").unwrap(); ++ easy.http_headers(list).unwrap(); ++ easy.perform().unwrap(); ++} ++``` ++ ++## Keep alive ++ ++The handle can be re-used across multiple requests. Curl will attempt to ++keep the connections alive. ++ ++```rust,no_run ++extern crate curl; ++ ++use curl::easy::Easy; ++ ++fn main() { ++ let mut handle = Easy::new(); ++ ++ handle.url("http://www.example.com/foo").unwrap(); ++ handle.perform().unwrap(); ++ ++ handle.url("http://www.example.com/bar").unwrap(); ++ handle.perform().unwrap(); ++} ++``` ++ ++## Multiple requests ++ ++The libcurl library provides support for sending multiple requests ++simultaneously through the "multi" interface. This is currently bound in the ++`multi` module of this crate and provides the ability to execute multiple ++transfers simultaneously. For more information, see that module. ++ ++## Version Support ++ ++The bindings have been developed using curl version 7.24.0. They should ++work with any newer version of curl and possibly with older versions, ++but this has not been tested. ++ ++## License ++ ++The `curl-rust` crate is licensed under the MIT license, see `LICENSE` for more ++details. diff --cc vendor/curl-0.4.8/appveyor.yml index 000000000,000000000..f13520ad2 new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/appveyor.yml @@@ -1,0 -1,0 +1,68 @@@ ++environment: ++ matrix: ++ ++ # Ensure MinGW works, but we need to download the 32-bit MinGW compiler from a ++ # custom location. ++ - TARGET: i686-pc-windows-gnu ++ MINGW_URL: https://s3.amazonaws.com/rust-lang-ci ++ MINGW_ARCHIVE: i686-4.9.2-release-win32-dwarf-rt_v4-rev4.7z ++ MINGW_DIR: mingw32 ++ - TARGET: x86_64-pc-windows-gnu ++ MSYS_BITS: 64 ++ ++ # Ensure vanilla builds work ++ - TARGET: i686-pc-windows-msvc ++ - TARGET: x86_64-pc-windows-msvc ++ ++ # Pin to specific VS versions to ensure the build works ++ - TARGET: x86_64-pc-windows-msvc ++ ARCH: amd64 ++ VS: C:\Program Files (x86)\Microsoft Visual Studio 12.0\VC\vcvarsall.bat ++ - TARGET: x86_64-pc-windows-msvc ++ ARCH: amd64 ++ VS: C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat ++ ++ # Ensure getting libcurl from vcpkg works ++ - TARGET: x86_64-pc-windows-msvc ++ RUSTFLAGS: -Ctarget-feature=+crt-static ++ VCPKG_DEFAULT_TRIPLET: x64-windows-static ++ ++install: ++ # Install rust, x86_64-pc-windows-msvc host ++ - appveyor-retry appveyor DownloadFile https://win.rustup.rs/ -FileName rustup-init.exe ++ # use nightly if required until -Ctarget-feature=+crt-static is stable (expected in rust 1.19) ++ - if not defined RUSTFLAGS rustup-init.exe -y --default-host x86_64-pc-windows-msvc ++ - if defined RUSTFLAGS rustup-init.exe -y --default-host x86_64-pc-windows-msvc --default-toolchain nightly ++ - set PATH=%PATH%;C:\Users\appveyor\.cargo\bin ++ ++ # Install the target we're compiling for ++ - if NOT "%TARGET%" == "x86_64-pc-windows-msvc" rustup target add %TARGET% ++ ++ # Use the system msys if we can ++ - if defined MSYS_BITS set PATH=C:\msys64\mingw%MSYS_BITS%\bin;C:\msys64\usr\bin;%PATH% ++ ++ # download a custom compiler otherwise ++ - if defined MINGW_URL appveyor DownloadFile %MINGW_URL%/%MINGW_ARCHIVE% ++ - if defined MINGW_URL 7z x -y %MINGW_ARCHIVE% > nul ++ - if defined MINGW_URL set PATH=C:\Python27;%CD%\%MINGW_DIR%\bin;C:\msys64\usr\bin;%PATH% ++ ++ # If we're pinning to a specific visual studio, do so now ++ - if defined VS call "%VS%" %ARCH% ++ ++ # let's see what we got ++ - where gcc rustc cargo ++ - rustc -vV ++ - cargo -vV ++ - set CARGO_TARGET_DIR=%CD%\target ++ ++ # install vcpkg if required ++ - if defined VCPKG_DEFAULT_TRIPLET git clone https://github.com/Microsoft/vcpkg c:\projects\vcpkg ++ - if defined VCPKG_DEFAULT_TRIPLET c:\projects\vcpkg\bootstrap-vcpkg.bat ++ - if defined VCPKG_DEFAULT_TRIPLET set VCPKG_ROOT=c:\projects\vcpkg ++ - if defined VCPKG_DEFAULT_TRIPLET %VCPKG_ROOT%\vcpkg.exe install curl ++ ++build: false ++ ++test_script: ++ - cargo test --target %TARGET% ++ - cargo run --manifest-path systest/Cargo.toml --target %TARGET% diff --cc vendor/curl-0.4.8/ci/.cargo/config index 000000000,000000000..5ed633890 new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/ci/.cargo/config @@@ -1,0 -1,0 +1,2 @@@ ++[target.x86_64-pc-windows-gnu] ++linker = "x86_64-w64-mingw32-gcc" diff --cc vendor/curl-0.4.8/ci/Dockerfile-linux32 index 000000000,000000000..4d55dcf04 new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/ci/Dockerfile-linux32 @@@ -1,0 -1,0 +1,14 @@@ ++FROM ubuntu:16.04 ++ ++RUN dpkg --add-architecture i386 && \ ++ apt-get update && \ ++ apt-get install -y --no-install-recommends \ ++ gcc-multilib \ ++ ca-certificates \ ++ make \ ++ libc6-dev \ ++ libssl-dev:i386 \ ++ pkg-config ++ ++ENV PKG_CONFIG=i686-linux-gnu-pkg-config \ ++ PKG_CONFIG_ALLOW_CROSS=1 diff --cc vendor/curl-0.4.8/ci/Dockerfile-linux64 index 000000000,000000000..a5c1fe37f new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/ci/Dockerfile-linux64 @@@ -1,0 -1,0 +1,9 @@@ ++FROM ubuntu:16.04 ++ ++RUN apt-get update ++RUN apt-get install -y --no-install-recommends \ ++ gcc ca-certificates make libc6-dev \ ++ libssl-dev \ ++ pkg-config ++ ++ENV FEATURES="http2" diff --cc vendor/curl-0.4.8/ci/Dockerfile-linux64-curl index 000000000,000000000..be03c24da new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/ci/Dockerfile-linux64-curl @@@ -1,0 -1,0 +1,6 @@@ ++FROM ubuntu:14.04 ++ ++RUN apt-get update ++RUN apt-get install -y --no-install-recommends \ ++ gcc ca-certificates make libc6-dev \ ++ libssl-dev libcurl4-openssl-dev pkg-config diff --cc vendor/curl-0.4.8/ci/Dockerfile-mingw index 000000000,000000000..ee5926c8d new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/ci/Dockerfile-mingw @@@ -1,0 -1,0 +1,6 @@@ ++FROM ubuntu:16.04 ++ ++RUN apt-get update ++RUN apt-get install -y --no-install-recommends \ ++ gcc ca-certificates make libc6-dev \ ++ gcc-mingw-w64-x86-64 libz-mingw-w64-dev diff --cc vendor/curl-0.4.8/ci/Dockerfile-musl index 000000000,000000000..47d211fdf new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/ci/Dockerfile-musl @@@ -1,0 -1,0 +1,18 @@@ ++FROM ubuntu:16.04 ++ ++RUN apt-get update ++RUN apt-get install -y --no-install-recommends \ ++ gcc ca-certificates make libc6-dev curl \ ++ musl-tools ++ ++RUN \ ++ curl https://www.openssl.org/source/old/1.0.2/openssl-1.0.2g.tar.gz | tar xzf - && \ ++ cd openssl-1.0.2g && \ ++ CC=musl-gcc ./Configure --prefix=/openssl no-dso linux-x86_64 -fPIC && \ ++ make -j10 && \ ++ make install && \ ++ cd .. && \ ++ rm -rf openssl-1.0.2g ++ ++ENV OPENSSL_STATIC=1 \ ++ OPENSSL_DIR=/openssl diff --cc vendor/curl-0.4.8/ci/run.sh index 000000000,000000000..239d0b8c1 new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/ci/run.sh @@@ -1,0 -1,0 +1,16 @@@ ++#!/bin/sh ++ ++set -ex ++ ++cargo test --target $TARGET --no-run ++if [ -z "$NO_RUN" ]; then ++ cargo test --target $TARGET ++ cargo run --manifest-path systest/Cargo.toml --target $TARGET ++ cargo doc --no-deps --target $TARGET ++ cargo doc --no-deps -p curl-sys --target $TARGET ++fi ++ ++if [ -n "$FEATURES" ] ++then ++ cargo run --manifest-path systest/Cargo.toml --target $TARGET --features "$FEATURES" ++fi diff --cc vendor/curl-0.4.8/src/easy/form.rs index 000000000,000000000..be98d4e8e new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/src/easy/form.rs @@@ -1,0 -1,0 +1,333 @@@ ++use std::ffi::CString; ++use std::fmt; ++use std::path::Path; ++ ++use FormError; ++use curl_sys; ++use easy::{list, List}; ++ ++/// Multipart/formdata for an HTTP POST request. ++/// ++/// This structure is built up and then passed to the `Easy::httppost` method to ++/// be sent off with a request. ++pub struct Form { ++ head: *mut curl_sys::curl_httppost, ++ tail: *mut curl_sys::curl_httppost, ++ headers: Vec, ++ buffers: Vec>, ++ strings: Vec, ++} ++ ++/// One part in a multipart upload, added to a `Form`. ++pub struct Part<'form, 'data> { ++ form: &'form mut Form, ++ name: &'data str, ++ array: Vec, ++ error: Option, ++} ++ ++pub fn raw(form: &Form) -> *mut curl_sys::curl_httppost { ++ form.head ++} ++ ++impl Form { ++ /// Creates a new blank form ready for the addition of new data. ++ pub fn new() -> Form { ++ Form { ++ head: 0 as *mut _, ++ tail: 0 as *mut _, ++ headers: Vec::new(), ++ buffers: Vec::new(), ++ strings: Vec::new(), ++ } ++ } ++ ++ /// Prepares adding a new part to this `Form` ++ /// ++ /// Note that the part is not actually added to the form until the `add` ++ /// method is called on `Part`, which may or may not fail. ++ pub fn part<'a, 'data>(&'a mut self, name: &'data str) -> Part<'a, 'data> { ++ Part { ++ error: None, ++ form: self, ++ name: name, ++ array: vec![curl_sys::curl_forms { ++ option: curl_sys::CURLFORM_END, ++ value: 0 as *mut _, ++ }], ++ } ++ } ++} ++ ++impl fmt::Debug for Form { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ // TODO: fill this out more ++ f.debug_struct("Form") ++ .field("fields", &"...") ++ .finish() ++ } ++} ++ ++impl Drop for Form { ++ fn drop(&mut self) { ++ unsafe { ++ curl_sys::curl_formfree(self.head); ++ } ++ } ++} ++ ++impl<'form, 'data> Part<'form, 'data> { ++ /// A pointer to the contents of this part, the actual data to send away. ++ pub fn contents(&mut self, contents: &'data [u8]) -> &mut Self { ++ let pos = self.array.len() - 1; ++ ++ // curl has an oddity where if the length if 0 it will call strlen ++ // on the value. This means that if someone wants to add empty form ++ // contents we need to make sure the buffer contains a null byte. ++ let ptr = if contents.is_empty() { ++ b"\x00" ++ } else { ++ contents ++ }.as_ptr(); ++ ++ self.array.insert(pos, curl_sys::curl_forms { ++ option: curl_sys::CURLFORM_COPYCONTENTS, ++ value: ptr as *mut _, ++ }); ++ self.array.insert(pos + 1, curl_sys::curl_forms { ++ option: curl_sys::CURLFORM_CONTENTSLENGTH, ++ value: contents.len() as *mut _, ++ }); ++ self ++ } ++ ++ /// Causes this file to be read and its contents used as data in this part ++ /// ++ /// This part does not automatically become a file upload part simply ++ /// because its data was read from a file. ++ /// ++ /// # Errors ++ /// ++ /// If the filename has any internal nul bytes or if on Windows it does not ++ /// contain a unicode filename then the `add` function will eventually ++ /// return an error. ++ pub fn file_content

(&mut self, file: P) -> &mut Self ++ where P: AsRef ++ { ++ self._file_content(file.as_ref()) ++ } ++ ++ fn _file_content(&mut self, file: &Path) -> &mut Self { ++ if let Some(bytes) = self.path2cstr(file) { ++ let pos = self.array.len() - 1; ++ self.array.insert(pos, curl_sys::curl_forms { ++ option: curl_sys::CURLFORM_FILECONTENT, ++ value: bytes.as_ptr() as *mut _, ++ }); ++ self.form.strings.push(bytes); ++ } ++ self ++ } ++ ++ /// Makes this part a file upload part of the given file. ++ /// ++ /// Sets the filename field to the basename of the provided file name, and ++ /// it reads the contents of the file and passes them as data and sets the ++ /// content type if the given file matches one of the internally known file ++ /// extensions. ++ /// ++ /// The given upload file must exist entirely on the filesystem before the ++ /// upload is started because libcurl needs to read the size of it ++ /// beforehand. ++ /// ++ /// Multiple files can be uploaded by calling this method multiple times and ++ /// content types can also be configured for each file (by calling that ++ /// next). ++ /// ++ /// # Errors ++ /// ++ /// If the filename has any internal nul bytes or if on Windows it does not ++ /// contain a unicode filename then this function will cause `add` to return ++ /// an error when called. ++ pub fn file(&mut self, file: &'data P) -> &mut Self ++ where P: AsRef ++ { ++ self._file(file.as_ref()) ++ } ++ ++ fn _file(&mut self, file: &'data Path) -> &mut Self { ++ if let Some(bytes) = self.path2cstr(file) { ++ let pos = self.array.len() - 1; ++ self.array.insert(pos, curl_sys::curl_forms { ++ option: curl_sys::CURLFORM_FILE, ++ value: bytes.as_ptr() as *mut _, ++ }); ++ self.form.strings.push(bytes); ++ } ++ self ++ } ++ ++ /// Used in combination with `Part::file`, provides the content-type for ++ /// this part, possibly instead of choosing an internal one. ++ /// ++ /// # Panics ++ /// ++ /// This function will panic if `content_type` contains an internal nul ++ /// byte. ++ pub fn content_type(&mut self, content_type: &'data str) -> &mut Self { ++ if let Some(bytes) = self.bytes2cstr(content_type.as_bytes()) { ++ let pos = self.array.len() - 1; ++ self.array.insert(pos, curl_sys::curl_forms { ++ option: curl_sys::CURLFORM_CONTENTTYPE, ++ value: bytes.as_ptr() as *mut _, ++ }); ++ self.form.strings.push(bytes); ++ } ++ self ++ } ++ ++ /// Used in combination with `Part::file`, provides the filename for ++ /// this part instead of the actual one. ++ /// ++ /// # Errors ++ /// ++ /// If `name` contains an internal nul byte, or if on Windows the path is ++ /// not valid unicode then this function will return an error when `add` is ++ /// called. ++ pub fn filename(&mut self, name: &'data P) -> &mut Self ++ where P: AsRef ++ { ++ self._filename(name.as_ref()) ++ } ++ ++ fn _filename(&mut self, name: &'data Path) -> &mut Self { ++ if let Some(bytes) = self.path2cstr(name) { ++ let pos = self.array.len() - 1; ++ self.array.insert(pos, curl_sys::curl_forms { ++ option: curl_sys::CURLFORM_FILENAME, ++ value: bytes.as_ptr() as *mut _, ++ }); ++ self.form.strings.push(bytes); ++ } ++ self ++ } ++ ++ /// This is used to provide a custom file upload part without using the ++ /// `file` method above. ++ /// ++ /// The first parameter is for the filename field and the second is the ++ /// in-memory contents. ++ /// ++ /// # Errors ++ /// ++ /// If `name` contains an internal nul byte, or if on Windows the path is ++ /// not valid unicode then this function will return an error when `add` is ++ /// called. ++ pub fn buffer(&mut self, name: &'data P, data: Vec) ++ -> &mut Self ++ where P: AsRef ++ { ++ self._buffer(name.as_ref(), data) ++ } ++ ++ fn _buffer(&mut self, name: &'data Path, data: Vec) -> &mut Self { ++ if let Some(bytes) = self.path2cstr(name) { ++ let pos = self.array.len() - 1; ++ self.array.insert(pos, curl_sys::curl_forms { ++ option: curl_sys::CURLFORM_BUFFER, ++ value: bytes.as_ptr() as *mut _, ++ }); ++ self.form.strings.push(bytes); ++ self.array.insert(pos + 1, curl_sys::curl_forms { ++ option: curl_sys::CURLFORM_BUFFERPTR, ++ value: data.as_ptr() as *mut _, ++ }); ++ self.array.insert(pos + 2, curl_sys::curl_forms { ++ option: curl_sys::CURLFORM_BUFFERLENGTH, ++ value: data.len() as *mut _, ++ }); ++ self.form.buffers.push(data); ++ } ++ self ++ } ++ ++ /// Specifies extra headers for the form POST section. ++ /// ++ /// Appends the list of headers to those libcurl automatically generates. ++ pub fn content_header(&mut self, headers: List) -> &mut Self { ++ let pos = self.array.len() - 1; ++ self.array.insert(pos, curl_sys::curl_forms { ++ option: curl_sys::CURLFORM_CONTENTHEADER, ++ value: list::raw(&headers) as *mut _, ++ }); ++ self.form.headers.push(headers); ++ self ++ } ++ ++ /// Attempts to add this part to the `Form` that it was created from. ++ /// ++ /// If any error happens while adding, that error is returned, otherwise ++ /// `Ok(())` is returned. ++ pub fn add(&mut self) -> Result<(), FormError> { ++ if let Some(err) = self.error.clone() { ++ return Err(err) ++ } ++ let rc = unsafe { ++ curl_sys::curl_formadd(&mut self.form.head, ++ &mut self.form.tail, ++ curl_sys::CURLFORM_COPYNAME, ++ self.name.as_ptr(), ++ curl_sys::CURLFORM_NAMELENGTH, ++ self.name.len(), ++ curl_sys::CURLFORM_ARRAY, ++ self.array.as_ptr(), ++ curl_sys::CURLFORM_END) ++ }; ++ if rc == curl_sys::CURL_FORMADD_OK { ++ Ok(()) ++ } else { ++ Err(FormError::new(rc)) ++ } ++ } ++ ++ #[cfg(unix)] ++ fn path2cstr(&mut self, p: &Path) -> Option { ++ use std::os::unix::prelude::*; ++ self.bytes2cstr(p.as_os_str().as_bytes()) ++ } ++ ++ #[cfg(windows)] ++ fn path2cstr(&mut self, p: &Path) -> Option { ++ match p.to_str() { ++ Some(bytes) => self.bytes2cstr(bytes.as_bytes()), ++ None if self.error.is_none() => { ++ // TODO: better error code ++ self.error = Some(FormError::new(curl_sys::CURL_FORMADD_INCOMPLETE)); ++ None ++ } ++ None => None, ++ } ++ } ++ ++ fn bytes2cstr(&mut self, bytes: &[u8]) -> Option { ++ match CString::new(bytes) { ++ Ok(c) => Some(c), ++ Err(..) if self.error.is_none() => { ++ // TODO: better error code ++ self.error = Some(FormError::new(curl_sys::CURL_FORMADD_INCOMPLETE)); ++ None ++ } ++ Err(..) => None, ++ } ++ } ++} ++ ++impl<'form, 'data> fmt::Debug for Part<'form, 'data> { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ // TODO: fill this out more ++ f.debug_struct("Part") ++ .field("name", &self.name) ++ .field("form", &self.form) ++ .finish() ++ } ++} diff --cc vendor/curl-0.4.8/src/easy/handle.rs index 000000000,000000000..170f801de new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/src/easy/handle.rs @@@ -1,0 -1,0 +1,1441 @@@ ++use std::cell::Cell; ++use std::fmt; ++use std::io::SeekFrom; ++use std::path::Path; ++use std::ptr; ++use std::str; ++use std::time::Duration; ++ ++use curl_sys; ++use libc::c_void; ++ ++use Error; ++use easy::{Form, List}; ++use easy::handler::{self, InfoType, SeekResult, ReadError, WriteError}; ++use easy::handler::{TimeCondition, IpResolve, HttpVersion, SslVersion}; ++use easy::handler::{SslOpt, NetRc, Auth, ProxyType}; ++use easy::{Easy2, Handler}; ++ ++/// Raw bindings to a libcurl "easy session". ++/// ++/// This type is the same as the `Easy2` type in this library except that it ++/// does not contain a type parameter. Callbacks from curl are all controlled ++/// via closures on this `Easy` type, and this type namely has a `transfer` ++/// method as well for ergonomic management of these callbacks. ++/// ++/// There's not necessarily a right answer for which type is correct to use, but ++/// as a general rule of thumb `Easy` is typically a reasonable choice for ++/// synchronous I/O and `Easy2` is a good choice for asynchronous I/O. ++/// ++/// ## Examples ++/// ++/// Creating a handle which can be used later ++/// ++/// ``` ++/// use curl::easy::Easy; ++/// ++/// let handle = Easy::new(); ++/// ``` ++/// ++/// Send an HTTP request, writing the response to stdout. ++/// ++/// ``` ++/// use std::io::{stdout, Write}; ++/// ++/// use curl::easy::Easy; ++/// ++/// let mut handle = Easy::new(); ++/// handle.url("https://www.rust-lang.org/").unwrap(); ++/// handle.write_function(|data| { ++/// Ok(stdout().write(data).unwrap()) ++/// }).unwrap(); ++/// handle.perform().unwrap(); ++/// ``` ++/// ++/// Collect all output of an HTTP request to a vector. ++/// ++/// ``` ++/// use curl::easy::Easy; ++/// ++/// let mut data = Vec::new(); ++/// let mut handle = Easy::new(); ++/// handle.url("https://www.rust-lang.org/").unwrap(); ++/// { ++/// let mut transfer = handle.transfer(); ++/// transfer.write_function(|new_data| { ++/// data.extend_from_slice(new_data); ++/// Ok(new_data.len()) ++/// }).unwrap(); ++/// transfer.perform().unwrap(); ++/// } ++/// println!("{:?}", data); ++/// ``` ++/// ++/// More examples of various properties of an HTTP request can be found on the ++/// specific methods as well. ++#[derive(Debug)] ++pub struct Easy { ++ inner: Easy2, ++} ++ ++/// A scoped transfer of information which borrows an `Easy` and allows ++/// referencing stack-local data of the lifetime `'data`. ++/// ++/// Usage of `Easy` requires the `'static` and `Send` bounds on all callbacks ++/// registered, but that's not often wanted if all you need is to collect a ++/// bunch of data in memory to a vector, for example. The `Transfer` structure, ++/// created by the `Easy::transfer` method, is used for this sort of request. ++/// ++/// The callbacks attached to a `Transfer` are only active for that one transfer ++/// object, and they allow to elide both the `Send` and `'static` bounds to ++/// close over stack-local information. ++pub struct Transfer<'easy, 'data> { ++ easy: &'easy mut Easy, ++ data: Box>, ++} ++ ++pub struct EasyData { ++ running: Cell, ++ owned: Callbacks<'static>, ++ borrowed: Cell<*mut Callbacks<'static>>, ++} ++ ++unsafe impl Send for EasyData {} ++ ++#[derive(Default)] ++struct Callbacks<'a> { ++ write: Option Result + 'a>>, ++ read: Option Result + 'a>>, ++ seek: Option SeekResult + 'a>>, ++ debug: Option>, ++ header: Option bool + 'a>>, ++ progress: Option bool + 'a>>, ++ ssl_ctx: Option Result<(), Error> + 'a>>, ++} ++ ++impl Easy { ++ /// Creates a new "easy" handle which is the core of almost all operations ++ /// in libcurl. ++ /// ++ /// To use a handle, applications typically configure a number of options ++ /// followed by a call to `perform`. Options are preserved across calls to ++ /// `perform` and need to be reset manually (or via the `reset` method) if ++ /// this is not desired. ++ pub fn new() -> Easy { ++ Easy { ++ inner: Easy2::new(EasyData { ++ running: Cell::new(false), ++ owned: Callbacks::default(), ++ borrowed: Cell::new(ptr::null_mut()), ++ }), ++ } ++ } ++ ++ // ========================================================================= ++ // Behavior options ++ ++ /// Same as [`Easy2::verbose`](struct.Easy2.html#method.verbose) ++ pub fn verbose(&mut self, verbose: bool) -> Result<(), Error> { ++ self.inner.verbose(verbose) ++ } ++ ++ /// Same as [`Easy2::show_header`](struct.Easy2.html#method.show_header) ++ pub fn show_header(&mut self, show: bool) -> Result<(), Error> { ++ self.inner.show_header(show) ++ } ++ ++ /// Same as [`Easy2::progress`](struct.Easy2.html#method.progress) ++ pub fn progress(&mut self, progress: bool) -> Result<(), Error> { ++ self.inner.progress(progress) ++ } ++ ++ /// Same as [`Easy2::signal`](struct.Easy2.html#method.signal) ++ pub fn signal(&mut self, signal: bool) -> Result<(), Error> { ++ self.inner.signal(signal) ++ } ++ ++ /// Same as [`Easy2::wildcard_match`](struct.Easy2.html#method.wildcard_match) ++ pub fn wildcard_match(&mut self, m: bool) -> Result<(), Error> { ++ self.inner.wildcard_match(m) ++ } ++ ++ /// Same as [`Easy2::unix_socket`](struct.Easy2.html#method.unix_socket) ++ pub fn unix_socket(&mut self, unix_domain_socket: &str) -> Result<(), Error> { ++ self.inner.unix_socket(unix_domain_socket) ++ } ++ ++ // ========================================================================= ++ // Callback options ++ ++ /// Set callback for writing received data. ++ /// ++ /// This callback function gets called by libcurl as soon as there is data ++ /// received that needs to be saved. ++ /// ++ /// The callback function will be passed as much data as possible in all ++ /// invokes, but you must not make any assumptions. It may be one byte, it ++ /// may be thousands. If `show_header` is enabled, which makes header data ++ /// get passed to the write callback, you can get up to ++ /// `CURL_MAX_HTTP_HEADER` bytes of header data passed into it. This ++ /// usually means 100K. ++ /// ++ /// This function may be called with zero bytes data if the transferred file ++ /// is empty. ++ /// ++ /// The callback should return the number of bytes actually taken care of. ++ /// If that amount differs from the amount passed to your callback function, ++ /// it'll signal an error condition to the library. This will cause the ++ /// transfer to get aborted and the libcurl function used will return ++ /// an error with `is_write_error`. ++ /// ++ /// If your callback function returns `Err(WriteError::Pause)` it will cause ++ /// this transfer to become paused. See `unpause_write` for further details. ++ /// ++ /// By default data is sent into the void, and this corresponds to the ++ /// `CURLOPT_WRITEFUNCTION` and `CURLOPT_WRITEDATA` options. ++ /// ++ /// Note that the lifetime bound on this function is `'static`, but that ++ /// is often too restrictive. To use stack data consider calling the ++ /// `transfer` method and then using `write_function` to configure a ++ /// callback that can reference stack-local data. ++ /// ++ /// # Examples ++ /// ++ /// ``` ++ /// use std::io::{stdout, Write}; ++ /// use curl::easy::Easy; ++ /// ++ /// let mut handle = Easy::new(); ++ /// handle.url("https://www.rust-lang.org/").unwrap(); ++ /// handle.write_function(|data| { ++ /// Ok(stdout().write(data).unwrap()) ++ /// }).unwrap(); ++ /// handle.perform().unwrap(); ++ /// ``` ++ /// ++ /// Writing to a stack-local buffer ++ /// ++ /// ``` ++ /// use std::io::{stdout, Write}; ++ /// use curl::easy::Easy; ++ /// ++ /// let mut buf = Vec::new(); ++ /// let mut handle = Easy::new(); ++ /// handle.url("https://www.rust-lang.org/").unwrap(); ++ /// ++ /// let mut transfer = handle.transfer(); ++ /// transfer.write_function(|data| { ++ /// buf.extend_from_slice(data); ++ /// Ok(data.len()) ++ /// }).unwrap(); ++ /// transfer.perform().unwrap(); ++ /// ``` ++ pub fn write_function(&mut self, f: F) -> Result<(), Error> ++ where F: FnMut(&[u8]) -> Result + Send + 'static ++ { ++ self.inner.get_mut().owned.write = Some(Box::new(f)); ++ Ok(()) ++ } ++ ++ /// Read callback for data uploads. ++ /// ++ /// This callback function gets called by libcurl as soon as it needs to ++ /// read data in order to send it to the peer - like if you ask it to upload ++ /// or post data to the server. ++ /// ++ /// Your function must then return the actual number of bytes that it stored ++ /// in that memory area. Returning 0 will signal end-of-file to the library ++ /// and cause it to stop the current transfer. ++ /// ++ /// If you stop the current transfer by returning 0 "pre-maturely" (i.e ++ /// before the server expected it, like when you've said you will upload N ++ /// bytes and you upload less than N bytes), you may experience that the ++ /// server "hangs" waiting for the rest of the data that won't come. ++ /// ++ /// The read callback may return `Err(ReadError::Abort)` to stop the ++ /// current operation immediately, resulting in a `is_aborted_by_callback` ++ /// error code from the transfer. ++ /// ++ /// The callback can return `Err(ReadError::Pause)` to cause reading from ++ /// this connection to pause. See `unpause_read` for further details. ++ /// ++ /// By default data not input, and this corresponds to the ++ /// `CURLOPT_READFUNCTION` and `CURLOPT_READDATA` options. ++ /// ++ /// Note that the lifetime bound on this function is `'static`, but that ++ /// is often too restrictive. To use stack data consider calling the ++ /// `transfer` method and then using `read_function` to configure a ++ /// callback that can reference stack-local data. ++ /// ++ /// # Examples ++ /// ++ /// Read input from stdin ++ /// ++ /// ```no_run ++ /// use std::io::{stdin, Read}; ++ /// use curl::easy::Easy; ++ /// ++ /// let mut handle = Easy::new(); ++ /// handle.url("https://example.com/login").unwrap(); ++ /// handle.read_function(|into| { ++ /// Ok(stdin().read(into).unwrap()) ++ /// }).unwrap(); ++ /// handle.post(true).unwrap(); ++ /// handle.perform().unwrap(); ++ /// ``` ++ /// ++ /// Reading from stack-local data: ++ /// ++ /// ```no_run ++ /// use std::io::{stdin, Read}; ++ /// use curl::easy::Easy; ++ /// ++ /// let mut data_to_upload = &b"foobar"[..]; ++ /// let mut handle = Easy::new(); ++ /// handle.url("https://example.com/login").unwrap(); ++ /// handle.post(true).unwrap(); ++ /// ++ /// let mut transfer = handle.transfer(); ++ /// transfer.read_function(|into| { ++ /// Ok(data_to_upload.read(into).unwrap()) ++ /// }).unwrap(); ++ /// transfer.perform().unwrap(); ++ /// ``` ++ pub fn read_function(&mut self, f: F) -> Result<(), Error> ++ where F: FnMut(&mut [u8]) -> Result + Send + 'static ++ { ++ self.inner.get_mut().owned.read = Some(Box::new(f)); ++ Ok(()) ++ } ++ ++ /// User callback for seeking in input stream. ++ /// ++ /// This function gets called by libcurl to seek to a certain position in ++ /// the input stream and can be used to fast forward a file in a resumed ++ /// upload (instead of reading all uploaded bytes with the normal read ++ /// function/callback). It is also called to rewind a stream when data has ++ /// already been sent to the server and needs to be sent again. This may ++ /// happen when doing a HTTP PUT or POST with a multi-pass authentication ++ /// method, or when an existing HTTP connection is reused too late and the ++ /// server closes the connection. ++ /// ++ /// The callback function must return `SeekResult::Ok` on success, ++ /// `SeekResult::Fail` to cause the upload operation to fail or ++ /// `SeekResult::CantSeek` to indicate that while the seek failed, libcurl ++ /// is free to work around the problem if possible. The latter can sometimes ++ /// be done by instead reading from the input or similar. ++ /// ++ /// By default data this option is not set, and this corresponds to the ++ /// `CURLOPT_SEEKFUNCTION` and `CURLOPT_SEEKDATA` options. ++ /// ++ /// Note that the lifetime bound on this function is `'static`, but that ++ /// is often too restrictive. To use stack data consider calling the ++ /// `transfer` method and then using `seek_function` to configure a ++ /// callback that can reference stack-local data. ++ pub fn seek_function(&mut self, f: F) -> Result<(), Error> ++ where F: FnMut(SeekFrom) -> SeekResult + Send + 'static ++ { ++ self.inner.get_mut().owned.seek = Some(Box::new(f)); ++ Ok(()) ++ } ++ ++ /// Callback to progress meter function ++ /// ++ /// This function gets called by libcurl instead of its internal equivalent ++ /// with a frequent interval. While data is being transferred it will be ++ /// called very frequently, and during slow periods like when nothing is ++ /// being transferred it can slow down to about one call per second. ++ /// ++ /// The callback gets told how much data libcurl will transfer and has ++ /// transferred, in number of bytes. The first argument is the total number ++ /// of bytes libcurl expects to download in this transfer. The second ++ /// argument is the number of bytes downloaded so far. The third argument is ++ /// the total number of bytes libcurl expects to upload in this transfer. ++ /// The fourth argument is the number of bytes uploaded so far. ++ /// ++ /// Unknown/unused argument values passed to the callback will be set to ++ /// zero (like if you only download data, the upload size will remain 0). ++ /// Many times the callback will be called one or more times first, before ++ /// it knows the data sizes so a program must be made to handle that. ++ /// ++ /// Returning `false` from this callback will cause libcurl to abort the ++ /// transfer and return `is_aborted_by_callback`. ++ /// ++ /// If you transfer data with the multi interface, this function will not be ++ /// called during periods of idleness unless you call the appropriate ++ /// libcurl function that performs transfers. ++ /// ++ /// `progress` must be set to `true` to make this function actually get ++ /// called. ++ /// ++ /// By default this function calls an internal method and corresponds to ++ /// `CURLOPT_PROGRESSFUNCTION` and `CURLOPT_PROGRESSDATA`. ++ /// ++ /// Note that the lifetime bound on this function is `'static`, but that ++ /// is often too restrictive. To use stack data consider calling the ++ /// `transfer` method and then using `progress_function` to configure a ++ /// callback that can reference stack-local data. ++ pub fn progress_function(&mut self, f: F) -> Result<(), Error> ++ where F: FnMut(f64, f64, f64, f64) -> bool + Send + 'static ++ { ++ self.inner.get_mut().owned.progress = Some(Box::new(f)); ++ Ok(()) ++ } ++ ++ /// Callback to SSL context ++ /// ++ /// This callback function gets called by libcurl just before the ++ /// initialization of an SSL connection after having processed all ++ /// other SSL related options to give a last chance to an ++ /// application to modify the behaviour of the SSL ++ /// initialization. The `ssl_ctx` parameter is actually a pointer ++ /// to the SSL library's SSL_CTX. If an error is returned from the ++ /// callback no attempt to establish a connection is made and the ++ /// perform operation will return the callback's error code. ++ /// ++ /// This function will get called on all new connections made to a ++ /// server, during the SSL negotiation. The SSL_CTX pointer will ++ /// be a new one every time. ++ /// ++ /// To use this properly, a non-trivial amount of knowledge of ++ /// your SSL library is necessary. For example, you can use this ++ /// function to call library-specific callbacks to add additional ++ /// validation code for certificates, and even to change the ++ /// actual URI of a HTTPS request. ++ /// ++ /// By default this function calls an internal method and ++ /// corresponds to `CURLOPT_SSL_CTX_FUNCTION` and ++ /// `CURLOPT_SSL_CTX_DATA`. ++ /// ++ /// Note that the lifetime bound on this function is `'static`, but that ++ /// is often too restrictive. To use stack data consider calling the ++ /// `transfer` method and then using `progress_function` to configure a ++ /// callback that can reference stack-local data. ++ pub fn ssl_ctx_function(&mut self, f: F) -> Result<(), Error> ++ where F: FnMut(*mut c_void) -> Result<(), Error> + Send + 'static ++ { ++ self.inner.get_mut().owned.ssl_ctx = Some(Box::new(f)); ++ Ok(()) ++ } ++ ++ /// Specify a debug callback ++ /// ++ /// `debug_function` replaces the standard debug function used when ++ /// `verbose` is in effect. This callback receives debug information, ++ /// as specified in the type argument. ++ /// ++ /// By default this option is not set and corresponds to the ++ /// `CURLOPT_DEBUGFUNCTION` and `CURLOPT_DEBUGDATA` options. ++ /// ++ /// Note that the lifetime bound on this function is `'static`, but that ++ /// is often too restrictive. To use stack data consider calling the ++ /// `transfer` method and then using `debug_function` to configure a ++ /// callback that can reference stack-local data. ++ pub fn debug_function(&mut self, f: F) -> Result<(), Error> ++ where F: FnMut(InfoType, &[u8]) + Send + 'static ++ { ++ self.inner.get_mut().owned.debug = Some(Box::new(f)); ++ Ok(()) ++ } ++ ++ /// Callback that receives header data ++ /// ++ /// This function gets called by libcurl as soon as it has received header ++ /// data. The header callback will be called once for each header and only ++ /// complete header lines are passed on to the callback. Parsing headers is ++ /// very easy using this. If this callback returns `false` it'll signal an ++ /// error to the library. This will cause the transfer to get aborted and ++ /// the libcurl function in progress will return `is_write_error`. ++ /// ++ /// A complete HTTP header that is passed to this function can be up to ++ /// CURL_MAX_HTTP_HEADER (100K) bytes. ++ /// ++ /// It's important to note that the callback will be invoked for the headers ++ /// of all responses received after initiating a request and not just the ++ /// final response. This includes all responses which occur during ++ /// authentication negotiation. If you need to operate on only the headers ++ /// from the final response, you will need to collect headers in the ++ /// callback yourself and use HTTP status lines, for example, to delimit ++ /// response boundaries. ++ /// ++ /// When a server sends a chunked encoded transfer, it may contain a ++ /// trailer. That trailer is identical to a HTTP header and if such a ++ /// trailer is received it is passed to the application using this callback ++ /// as well. There are several ways to detect it being a trailer and not an ++ /// ordinary header: 1) it comes after the response-body. 2) it comes after ++ /// the final header line (CR LF) 3) a Trailer: header among the regular ++ /// response-headers mention what header(s) to expect in the trailer. ++ /// ++ /// For non-HTTP protocols like FTP, POP3, IMAP and SMTP this function will ++ /// get called with the server responses to the commands that libcurl sends. ++ /// ++ /// By default this option is not set and corresponds to the ++ /// `CURLOPT_HEADERFUNCTION` and `CURLOPT_HEADERDATA` options. ++ /// ++ /// Note that the lifetime bound on this function is `'static`, but that ++ /// is often too restrictive. To use stack data consider calling the ++ /// `transfer` method and then using `header_function` to configure a ++ /// callback that can reference stack-local data. ++ /// ++ /// # Examples ++ /// ++ /// ``` ++ /// use std::str; ++ /// ++ /// use curl::easy::Easy; ++ /// ++ /// let mut handle = Easy::new(); ++ /// handle.url("https://www.rust-lang.org/").unwrap(); ++ /// handle.header_function(|header| { ++ /// print!("header: {}", str::from_utf8(header).unwrap()); ++ /// true ++ /// }).unwrap(); ++ /// handle.perform().unwrap(); ++ /// ``` ++ /// ++ /// Collecting headers to a stack local vector ++ /// ++ /// ``` ++ /// use std::str; ++ /// ++ /// use curl::easy::Easy; ++ /// ++ /// let mut headers = Vec::new(); ++ /// let mut handle = Easy::new(); ++ /// handle.url("https://www.rust-lang.org/").unwrap(); ++ /// ++ /// { ++ /// let mut transfer = handle.transfer(); ++ /// transfer.header_function(|header| { ++ /// headers.push(str::from_utf8(header).unwrap().to_string()); ++ /// true ++ /// }).unwrap(); ++ /// transfer.perform().unwrap(); ++ /// } ++ /// ++ /// println!("{:?}", headers); ++ /// ``` ++ pub fn header_function(&mut self, f: F) -> Result<(), Error> ++ where F: FnMut(&[u8]) -> bool + Send + 'static ++ { ++ self.inner.get_mut().owned.header = Some(Box::new(f)); ++ Ok(()) ++ } ++ ++ // ========================================================================= ++ // Error options ++ ++ // TODO: error buffer and stderr ++ ++ /// Same as [`Easy2::fail_on_error`](struct.Easy2.html#method.fail_on_error) ++ pub fn fail_on_error(&mut self, fail: bool) -> Result<(), Error> { ++ self.inner.fail_on_error(fail) ++ } ++ ++ // ========================================================================= ++ // Network options ++ ++ /// Same as [`Easy2::url`](struct.Easy2.html#method.url) ++ pub fn url(&mut self, url: &str) -> Result<(), Error> { ++ self.inner.url(url) ++ } ++ ++ /// Same as [`Easy2::port`](struct.Easy2.html#method.port) ++ pub fn port(&mut self, port: u16) -> Result<(), Error> { ++ self.inner.port(port) ++ } ++ ++ /// Same as [`Easy2::proxy`](struct.Easy2.html#method.proxy) ++ pub fn proxy(&mut self, url: &str) -> Result<(), Error> { ++ self.inner.proxy(url) ++ } ++ ++ /// Same as [`Easy2::proxy_port`](struct.Easy2.html#method.proxy_port) ++ pub fn proxy_port(&mut self, port: u16) -> Result<(), Error> { ++ self.inner.proxy_port(port) ++ } ++ ++ /// Same as [`Easy2::proxy_type`](struct.Easy2.html#method.proxy_type) ++ pub fn proxy_type(&mut self, kind: ProxyType) -> Result<(), Error> { ++ self.inner.proxy_type(kind) ++ } ++ ++ /// Same as [`Easy2::noproxy`](struct.Easy2.html#method.noproxy) ++ pub fn noproxy(&mut self, skip: &str) -> Result<(), Error> { ++ self.inner.noproxy(skip) ++ } ++ ++ /// Same as [`Easy2::http_proxy_tunnel`](struct.Easy2.html#method.http_proxy_tunnel) ++ pub fn http_proxy_tunnel(&mut self, tunnel: bool) -> Result<(), Error> { ++ self.inner.http_proxy_tunnel(tunnel) ++ } ++ ++ /// Same as [`Easy2::interface`](struct.Easy2.html#method.interface) ++ pub fn interface(&mut self, interface: &str) -> Result<(), Error> { ++ self.inner.interface(interface) ++ } ++ ++ /// Same as [`Easy2::set_local_port`](struct.Easy2.html#method.set_local_port) ++ pub fn set_local_port(&mut self, port: u16) -> Result<(), Error> { ++ self.inner.set_local_port(port) ++ } ++ ++ /// Same as [`Easy2::local_port_range`](struct.Easy2.html#method.local_port_range) ++ pub fn local_port_range(&mut self, range: u16) -> Result<(), Error> { ++ self.inner.local_port_range(range) ++ } ++ ++ /// Same as [`Easy2::dns_cache_timeout`](struct.Easy2.html#method.dns_cache_timeout) ++ pub fn dns_cache_timeout(&mut self, dur: Duration) -> Result<(), Error> { ++ self.inner.dns_cache_timeout(dur) ++ } ++ ++ /// Same as [`Easy2::buffer_size`](struct.Easy2.html#method.buffer_size) ++ pub fn buffer_size(&mut self, size: usize) -> Result<(), Error> { ++ self.inner.buffer_size(size) ++ } ++ ++ /// Same as [`Easy2::tcp_nodelay`](struct.Easy2.html#method.tcp_nodelay) ++ pub fn tcp_nodelay(&mut self, enable: bool) -> Result<(), Error> { ++ self.inner.tcp_nodelay(enable) ++ } ++ ++ /// Same as [`Easy2::tcp_keepalive`](struct.Easy2.html#method.tcp_keepalive) ++ pub fn tcp_keepalive(&mut self, enable: bool) -> Result<(), Error> { ++ self.inner.tcp_keepalive(enable) ++ } ++ ++ /// Same as [`Easy2::tcp_keepintvl`](struct.Easy2.html#method.tcp_keepalive) ++ pub fn tcp_keepintvl(&mut self, dur: Duration) -> Result<(), Error> { ++ self.inner.tcp_keepintvl(dur) ++ } ++ ++ /// Same as [`Easy2::tcp_keepidle`](struct.Easy2.html#method.tcp_keepidle) ++ pub fn tcp_keepidle(&mut self, dur: Duration) -> Result<(), Error> { ++ self.inner.tcp_keepidle(dur) ++ } ++ ++ /// Same as [`Easy2::address_scope`](struct.Easy2.html#method.address_scope) ++ pub fn address_scope(&mut self, scope: u32) -> Result<(), Error> { ++ self.inner.address_scope(scope) ++ } ++ ++ // ========================================================================= ++ // Names and passwords ++ ++ /// Same as [`Easy2::username`](struct.Easy2.html#method.username) ++ pub fn username(&mut self, user: &str) -> Result<(), Error> { ++ self.inner.username(user) ++ } ++ ++ /// Same as [`Easy2::password`](struct.Easy2.html#method.password) ++ pub fn password(&mut self, pass: &str) -> Result<(), Error> { ++ self.inner.password(pass) ++ } ++ ++ /// Same as [`Easy2::http_auth`](struct.Easy2.html#method.http_auth) ++ pub fn http_auth(&mut self, auth: &Auth) -> Result<(), Error> { ++ self.inner.http_auth(auth) ++ } ++ ++ /// Same as [`Easy2::proxy_username`](struct.Easy2.html#method.proxy_username) ++ pub fn proxy_username(&mut self, user: &str) -> Result<(), Error> { ++ self.inner.proxy_username(user) ++ } ++ ++ /// Same as [`Easy2::proxy_password`](struct.Easy2.html#method.proxy_password) ++ pub fn proxy_password(&mut self, pass: &str) -> Result<(), Error> { ++ self.inner.proxy_password(pass) ++ } ++ ++ /// Same as [`Easy2::proxy_auth`](struct.Easy2.html#method.proxy_auth) ++ pub fn proxy_auth(&mut self, auth: &Auth) -> Result<(), Error> { ++ self.inner.proxy_auth(auth) ++ } ++ ++ /// Same as [`Easy2::netrc`](struct.Easy2.html#method.netrc) ++ pub fn netrc(&mut self, netrc: NetRc) -> Result<(), Error> { ++ self.inner.netrc(netrc) ++ } ++ ++ // ========================================================================= ++ // HTTP Options ++ ++ /// Same as [`Easy2::autoreferer`](struct.Easy2.html#method.autoreferer) ++ pub fn autoreferer(&mut self, enable: bool) -> Result<(), Error> { ++ self.inner.autoreferer(enable) ++ } ++ ++ /// Same as [`Easy2::accept_encoding`](struct.Easy2.html#method.accept_encoding) ++ pub fn accept_encoding(&mut self, encoding: &str) -> Result<(), Error> { ++ self.inner.accept_encoding(encoding) ++ } ++ ++ /// Same as [`Easy2::transfer_encoding`](struct.Easy2.html#method.transfer_encoding) ++ pub fn transfer_encoding(&mut self, enable: bool) -> Result<(), Error> { ++ self.inner.transfer_encoding(enable) ++ } ++ ++ /// Same as [`Easy2::follow_location`](struct.Easy2.html#method.follow_location) ++ pub fn follow_location(&mut self, enable: bool) -> Result<(), Error> { ++ self.inner.follow_location(enable) ++ } ++ ++ /// Same as [`Easy2::unrestricted_auth`](struct.Easy2.html#method.unrestricted_auth) ++ pub fn unrestricted_auth(&mut self, enable: bool) -> Result<(), Error> { ++ self.inner.unrestricted_auth(enable) ++ } ++ ++ /// Same as [`Easy2::max_redirections`](struct.Easy2.html#method.max_redirections) ++ pub fn max_redirections(&mut self, max: u32) -> Result<(), Error> { ++ self.inner.max_redirections(max) ++ } ++ ++ /// Same as [`Easy2::put`](struct.Easy2.html#method.put) ++ pub fn put(&mut self, enable: bool) -> Result<(), Error> { ++ self.inner.put(enable) ++ } ++ ++ /// Same as [`Easy2::post`](struct.Easy2.html#method.post) ++ pub fn post(&mut self, enable: bool) -> Result<(), Error> { ++ self.inner.post(enable) ++ } ++ ++ /// Same as [`Easy2::post_field_copy`](struct.Easy2.html#method.post_field_copy) ++ pub fn post_fields_copy(&mut self, data: &[u8]) -> Result<(), Error> { ++ self.inner.post_fields_copy(data) ++ } ++ ++ /// Same as [`Easy2::post_field_size`](struct.Easy2.html#method.post_field_size) ++ pub fn post_field_size(&mut self, size: u64) -> Result<(), Error> { ++ self.inner.post_field_size(size) ++ } ++ ++ /// Same as [`Easy2::httppost`](struct.Easy2.html#method.httppost) ++ pub fn httppost(&mut self, form: Form) -> Result<(), Error> { ++ self.inner.httppost(form) ++ } ++ ++ /// Same as [`Easy2::referer`](struct.Easy2.html#method.referer) ++ pub fn referer(&mut self, referer: &str) -> Result<(), Error> { ++ self.inner.referer(referer) ++ } ++ ++ /// Same as [`Easy2::useragent`](struct.Easy2.html#method.useragent) ++ pub fn useragent(&mut self, useragent: &str) -> Result<(), Error> { ++ self.inner.useragent(useragent) ++ } ++ ++ /// Same as [`Easy2::http_headers`](struct.Easy2.html#method.http_headers) ++ pub fn http_headers(&mut self, list: List) -> Result<(), Error> { ++ self.inner.http_headers(list) ++ } ++ ++ /// Same as [`Easy2::cookie`](struct.Easy2.html#method.cookie) ++ pub fn cookie(&mut self, cookie: &str) -> Result<(), Error> { ++ self.inner.cookie(cookie) ++ } ++ ++ /// Same as [`Easy2::cookie_file`](struct.Easy2.html#method.cookie_file) ++ pub fn cookie_file>(&mut self, file: P) -> Result<(), Error> { ++ self.inner.cookie_file(file) ++ } ++ ++ /// Same as [`Easy2::cookie_jar`](struct.Easy2.html#method.cookie_jar) ++ pub fn cookie_jar>(&mut self, file: P) -> Result<(), Error> { ++ self.inner.cookie_jar(file) ++ } ++ ++ /// Same as [`Easy2::cookie_session`](struct.Easy2.html#method.cookie_session) ++ pub fn cookie_session(&mut self, session: bool) -> Result<(), Error> { ++ self.inner.cookie_session(session) ++ } ++ ++ /// Same as [`Easy2::cookie_list`](struct.Easy2.html#method.cookie_list) ++ pub fn cookie_list(&mut self, cookie: &str) -> Result<(), Error> { ++ self.inner.cookie_list(cookie) ++ } ++ ++ /// Same as [`Easy2::get`](struct.Easy2.html#method.get) ++ pub fn get(&mut self, enable: bool) -> Result<(), Error> { ++ self.inner.get(enable) ++ } ++ ++ /// Same as [`Easy2::ignore_content_length`](struct.Easy2.html#method.ignore_content_length) ++ pub fn ignore_content_length(&mut self, ignore: bool) -> Result<(), Error> { ++ self.inner.ignore_content_length(ignore) ++ } ++ ++ /// Same as [`Easy2::http_content_decoding`](struct.Easy2.html#method.http_content_decoding) ++ pub fn http_content_decoding(&mut self, enable: bool) -> Result<(), Error> { ++ self.inner.http_content_decoding(enable) ++ } ++ ++ /// Same as [`Easy2::http_transfer_decoding`](struct.Easy2.html#method.http_transfer_decoding) ++ pub fn http_transfer_decoding(&mut self, enable: bool) -> Result<(), Error> { ++ self.inner.http_transfer_decoding(enable) ++ } ++ ++ // ========================================================================= ++ // Protocol Options ++ ++ /// Same as [`Easy2::range`](struct.Easy2.html#method.range) ++ pub fn range(&mut self, range: &str) -> Result<(), Error> { ++ self.inner.range(range) ++ } ++ ++ /// Same as [`Easy2::resume_from`](struct.Easy2.html#method.resume_from) ++ pub fn resume_from(&mut self, from: u64) -> Result<(), Error> { ++ self.inner.resume_from(from) ++ } ++ ++ /// Same as [`Easy2::custom_request`](struct.Easy2.html#method.custom_request) ++ pub fn custom_request(&mut self, request: &str) -> Result<(), Error> { ++ self.inner.custom_request(request) ++ } ++ ++ /// Same as [`Easy2::fetch_filetime`](struct.Easy2.html#method.fetch_filetime) ++ pub fn fetch_filetime(&mut self, fetch: bool) -> Result<(), Error> { ++ self.inner.fetch_filetime(fetch) ++ } ++ ++ /// Same as [`Easy2::nobody`](struct.Easy2.html#method.nobody) ++ pub fn nobody(&mut self, enable: bool) -> Result<(), Error> { ++ self.inner.nobody(enable) ++ } ++ ++ /// Same as [`Easy2::in_filesize`](struct.Easy2.html#method.in_filesize) ++ pub fn in_filesize(&mut self, size: u64) -> Result<(), Error> { ++ self.inner.in_filesize(size) ++ } ++ ++ /// Same as [`Easy2::upload`](struct.Easy2.html#method.upload) ++ pub fn upload(&mut self, enable: bool) -> Result<(), Error> { ++ self.inner.upload(enable) ++ } ++ ++ /// Same as [`Easy2::max_filesize`](struct.Easy2.html#method.max_filesize) ++ pub fn max_filesize(&mut self, size: u64) -> Result<(), Error> { ++ self.inner.max_filesize(size) ++ } ++ ++ /// Same as [`Easy2::time_condition`](struct.Easy2.html#method.time_condition) ++ pub fn time_condition(&mut self, cond: TimeCondition) -> Result<(), Error> { ++ self.inner.time_condition(cond) ++ } ++ ++ /// Same as [`Easy2::time_value`](struct.Easy2.html#method.time_value) ++ pub fn time_value(&mut self, val: i64) -> Result<(), Error> { ++ self.inner.time_value(val) ++ } ++ ++ // ========================================================================= ++ // Connection Options ++ ++ /// Same as [`Easy2::timeout`](struct.Easy2.html#method.timeout) ++ pub fn timeout(&mut self, timeout: Duration) -> Result<(), Error> { ++ self.inner.timeout(timeout) ++ } ++ ++ /// Same as [`Easy2::low_speed_limit`](struct.Easy2.html#method.low_speed_limit) ++ pub fn low_speed_limit(&mut self, limit: u32) -> Result<(), Error> { ++ self.inner.low_speed_limit(limit) ++ } ++ ++ /// Same as [`Easy2::low_speed_time`](struct.Easy2.html#method.low_speed_time) ++ pub fn low_speed_time(&mut self, dur: Duration) -> Result<(), Error> { ++ self.inner.low_speed_time(dur) ++ } ++ ++ /// Same as [`Easy2::max_send_speed`](struct.Easy2.html#method.max_send_speed) ++ pub fn max_send_speed(&mut self, speed: u64) -> Result<(), Error> { ++ self.inner.max_send_speed(speed) ++ } ++ ++ /// Same as [`Easy2::max_recv_speed`](struct.Easy2.html#method.max_recv_speed) ++ pub fn max_recv_speed(&mut self, speed: u64) -> Result<(), Error> { ++ self.inner.max_recv_speed(speed) ++ } ++ ++ /// Same as [`Easy2::max_connects`](struct.Easy2.html#method.max_connects) ++ pub fn max_connects(&mut self, max: u32) -> Result<(), Error> { ++ self.inner.max_connects(max) ++ } ++ ++ /// Same as [`Easy2::fresh_connect`](struct.Easy2.html#method.fresh_connect) ++ pub fn fresh_connect(&mut self, enable: bool) -> Result<(), Error> { ++ self.inner.fresh_connect(enable) ++ } ++ ++ /// Same as [`Easy2::forbid_reuse`](struct.Easy2.html#method.forbid_reuse) ++ pub fn forbid_reuse(&mut self, enable: bool) -> Result<(), Error> { ++ self.inner.forbid_reuse(enable) ++ } ++ ++ /// Same as [`Easy2::connect_timeout`](struct.Easy2.html#method.connect_timeout) ++ pub fn connect_timeout(&mut self, timeout: Duration) -> Result<(), Error> { ++ self.inner.connect_timeout(timeout) ++ } ++ ++ /// Same as [`Easy2::ip_resolve`](struct.Easy2.html#method.ip_resolve) ++ pub fn ip_resolve(&mut self, resolve: IpResolve) -> Result<(), Error> { ++ self.inner.ip_resolve(resolve) ++ } ++ ++ /// Same as [`Easy2::connect_only`](struct.Easy2.html#method.connect_only) ++ pub fn connect_only(&mut self, enable: bool) -> Result<(), Error> { ++ self.inner.connect_only(enable) ++ } ++ ++ // ========================================================================= ++ // SSL/Security Options ++ ++ /// Same as [`Easy2::ssl_cert`](struct.Easy2.html#method.ssl_cert) ++ pub fn ssl_cert>(&mut self, cert: P) -> Result<(), Error> { ++ self.inner.ssl_cert(cert) ++ } ++ ++ /// Same as [`Easy2::ssl_cert_type`](struct.Easy2.html#method.ssl_cert_type) ++ pub fn ssl_cert_type(&mut self, kind: &str) -> Result<(), Error> { ++ self.inner.ssl_cert_type(kind) ++ } ++ ++ /// Same as [`Easy2::ssl_key`](struct.Easy2.html#method.ssl_key) ++ pub fn ssl_key>(&mut self, key: P) -> Result<(), Error> { ++ self.inner.ssl_key(key) ++ } ++ ++ /// Same as [`Easy2::ssl_key_type`](struct.Easy2.html#method.ssl_key_type) ++ pub fn ssl_key_type(&mut self, kind: &str) -> Result<(), Error> { ++ self.inner.ssl_key_type(kind) ++ } ++ ++ /// Same as [`Easy2::key_password`](struct.Easy2.html#method.key_password) ++ pub fn key_password(&mut self, password: &str) -> Result<(), Error> { ++ self.inner.key_password(password) ++ } ++ ++ /// Same as [`Easy2::ssl_engine`](struct.Easy2.html#method.ssl_engine) ++ pub fn ssl_engine(&mut self, engine: &str) -> Result<(), Error> { ++ self.inner.ssl_engine(engine) ++ } ++ ++ /// Same as [`Easy2::ssl_engine_default`](struct.Easy2.html#method.ssl_engine_default) ++ pub fn ssl_engine_default(&mut self, enable: bool) -> Result<(), Error> { ++ self.inner.ssl_engine_default(enable) ++ } ++ ++ /// Same as [`Easy2::http_version`](struct.Easy2.html#method.http_version) ++ pub fn http_version(&mut self, version: HttpVersion) -> Result<(), Error> { ++ self.inner.http_version(version) ++ } ++ ++ /// Same as [`Easy2::ssl_version`](struct.Easy2.html#method.ssl_version) ++ pub fn ssl_version(&mut self, version: SslVersion) -> Result<(), Error> { ++ self.inner.ssl_version(version) ++ } ++ ++ /// Same as [`Easy2::ssl_verify_host`](struct.Easy2.html#method.ssl_verify_host) ++ pub fn ssl_verify_host(&mut self, verify: bool) -> Result<(), Error> { ++ self.inner.ssl_verify_host(verify) ++ } ++ ++ /// Same as [`Easy2::ssl_verify_peer`](struct.Easy2.html#method.ssl_verify_peer) ++ pub fn ssl_verify_peer(&mut self, verify: bool) -> Result<(), Error> { ++ self.inner.ssl_verify_peer(verify) ++ } ++ ++ /// Same as [`Easy2::cainfo`](struct.Easy2.html#method.cainfo) ++ pub fn cainfo>(&mut self, path: P) -> Result<(), Error> { ++ self.inner.cainfo(path) ++ } ++ ++ /// Same as [`Easy2::issuer_cert`](struct.Easy2.html#method.issuer_cert) ++ pub fn issuer_cert>(&mut self, path: P) -> Result<(), Error> { ++ self.inner.issuer_cert(path) ++ } ++ ++ /// Same as [`Easy2::capath`](struct.Easy2.html#method.capath) ++ pub fn capath>(&mut self, path: P) -> Result<(), Error> { ++ self.inner.capath(path) ++ } ++ ++ /// Same as [`Easy2::crlfile`](struct.Easy2.html#method.crlfile) ++ pub fn crlfile>(&mut self, path: P) -> Result<(), Error> { ++ self.inner.crlfile(path) ++ } ++ ++ /// Same as [`Easy2::certinfo`](struct.Easy2.html#method.certinfo) ++ pub fn certinfo(&mut self, enable: bool) -> Result<(), Error> { ++ self.inner.certinfo(enable) ++ } ++ ++ /// Same as [`Easy2::random_file`](struct.Easy2.html#method.random_file) ++ pub fn random_file>(&mut self, p: P) -> Result<(), Error> { ++ self.inner.random_file(p) ++ } ++ ++ /// Same as [`Easy2::egd_socket`](struct.Easy2.html#method.egd_socket) ++ pub fn egd_socket>(&mut self, p: P) -> Result<(), Error> { ++ self.inner.egd_socket(p) ++ } ++ ++ /// Same as [`Easy2::ssl_cipher_list`](struct.Easy2.html#method.ssl_cipher_list) ++ pub fn ssl_cipher_list(&mut self, ciphers: &str) -> Result<(), Error> { ++ self.inner.ssl_cipher_list(ciphers) ++ } ++ ++ /// Same as [`Easy2::ssl_sessionid_cache`](struct.Easy2.html#method.ssl_sessionid_cache) ++ pub fn ssl_sessionid_cache(&mut self, enable: bool) -> Result<(), Error> { ++ self.inner.ssl_sessionid_cache(enable) ++ } ++ ++ /// Same as [`Easy2::ssl_options`](struct.Easy2.html#method.ssl_options) ++ pub fn ssl_options(&mut self, bits: &SslOpt) -> Result<(), Error> { ++ self.inner.ssl_options(bits) ++ } ++ ++ // ========================================================================= ++ // getters ++ ++ /// Same as [`Easy2::effective_url`](struct.Easy2.html#method.effective_url) ++ pub fn effective_url(&mut self) -> Result, Error> { ++ self.inner.effective_url() ++ } ++ ++ /// Same as [`Easy2::effective_url_bytes`](struct.Easy2.html#method.effective_url_bytes) ++ pub fn effective_url_bytes(&mut self) -> Result, Error> { ++ self.inner.effective_url_bytes() ++ } ++ ++ /// Same as [`Easy2::response_code`](struct.Easy2.html#method.response_code) ++ pub fn response_code(&mut self) -> Result { ++ self.inner.response_code() ++ } ++ ++ /// Same as [`Easy2::http_connectcode`](struct.Easy2.html#method.http_connectcode) ++ pub fn http_connectcode(&mut self) -> Result { ++ self.inner.http_connectcode() ++ } ++ ++ /// Same as [`Easy2::filetime`](struct.Easy2.html#method.filetime) ++ pub fn filetime(&mut self) -> Result, Error> { ++ self.inner.filetime() ++ } ++ ++ /// Same as [`Easy2::total_time`](struct.Easy2.html#method.total_time) ++ pub fn total_time(&mut self) -> Result { ++ self.inner.total_time() ++ } ++ ++ /// Same as [`Easy2::namelookup_time`](struct.Easy2.html#method.namelookup_time) ++ pub fn namelookup_time(&mut self) -> Result { ++ self.inner.namelookup_time() ++ } ++ ++ /// Same as [`Easy2::connect_time`](struct.Easy2.html#method.connect_time) ++ pub fn connect_time(&mut self) -> Result { ++ self.inner.connect_time() ++ } ++ ++ /// Same as [`Easy2::appconnect_time`](struct.Easy2.html#method.appconnect_time) ++ pub fn appconnect_time(&mut self) -> Result { ++ self.inner.appconnect_time() ++ } ++ ++ /// Same as [`Easy2::pretransfer_time`](struct.Easy2.html#method.pretransfer_time) ++ pub fn pretransfer_time(&mut self) -> Result { ++ self.inner.pretransfer_time() ++ } ++ ++ /// Same as [`Easy2::starttransfer_time`](struct.Easy2.html#method.starttransfer_time) ++ pub fn starttransfer_time(&mut self) -> Result { ++ self.inner.starttransfer_time() ++ } ++ ++ /// Same as [`Easy2::redirect_time`](struct.Easy2.html#method.redirect_time) ++ pub fn redirect_time(&mut self) -> Result { ++ self.inner.redirect_time() ++ } ++ ++ /// Same as [`Easy2::redirect_count`](struct.Easy2.html#method.redirect_count) ++ pub fn redirect_count(&mut self) -> Result { ++ self.inner.redirect_count() ++ } ++ ++ /// Same as [`Easy2::redirect_url`](struct.Easy2.html#method.redirect_url) ++ pub fn redirect_url(&mut self) -> Result, Error> { ++ self.inner.redirect_url() ++ } ++ ++ /// Same as [`Easy2::redirect_url_bytes`](struct.Easy2.html#method.redirect_url_bytes) ++ pub fn redirect_url_bytes(&mut self) -> Result, Error> { ++ self.inner.redirect_url_bytes() ++ } ++ ++ /// Same as [`Easy2::header_size`](struct.Easy2.html#method.header_size) ++ pub fn header_size(&mut self) -> Result { ++ self.inner.header_size() ++ } ++ ++ /// Same as [`Easy2::request_size`](struct.Easy2.html#method.request_size) ++ pub fn request_size(&mut self) -> Result { ++ self.inner.request_size() ++ } ++ ++ /// Same as [`Easy2::content_type`](struct.Easy2.html#method.content_type) ++ pub fn content_type(&mut self) -> Result, Error> { ++ self.inner.content_type() ++ } ++ ++ /// Same as [`Easy2::content_type_bytes`](struct.Easy2.html#method.content_type_bytes) ++ pub fn content_type_bytes(&mut self) -> Result, Error> { ++ self.inner.content_type_bytes() ++ } ++ ++ /// Same as [`Easy2::os_errno`](struct.Easy2.html#method.os_errno) ++ pub fn os_errno(&mut self) -> Result { ++ self.inner.os_errno() ++ } ++ ++ /// Same as [`Easy2::primary_ip`](struct.Easy2.html#method.primary_ip) ++ pub fn primary_ip(&mut self) -> Result, Error> { ++ self.inner.primary_ip() ++ } ++ ++ /// Same as [`Easy2::primary_port`](struct.Easy2.html#method.primary_port) ++ pub fn primary_port(&mut self) -> Result { ++ self.inner.primary_port() ++ } ++ ++ /// Same as [`Easy2::local_ip`](struct.Easy2.html#method.local_ip) ++ pub fn local_ip(&mut self) -> Result, Error> { ++ self.inner.local_ip() ++ } ++ ++ /// Same as [`Easy2::local_port`](struct.Easy2.html#method.local_port) ++ pub fn local_port(&mut self) -> Result { ++ self.inner.local_port() ++ } ++ ++ /// Same as [`Easy2::cookies`](struct.Easy2.html#method.cookies) ++ pub fn cookies(&mut self) -> Result { ++ self.inner.cookies() ++ } ++ ++ // ========================================================================= ++ // Other methods ++ ++ /// Same as [`Easy2::perform`](struct.Easy2.html#method.perform) ++ pub fn perform(&self) -> Result<(), Error> { ++ assert!(self.inner.get_ref().borrowed.get().is_null()); ++ self.do_perform() ++ } ++ ++ fn do_perform(&self) -> Result<(), Error> { ++ // We don't allow recursive invocations of `perform` because we're ++ // invoking `FnMut`closures behind a `&self` pointer. This flag acts as ++ // our own `RefCell` borrow flag sorta. ++ if self.inner.get_ref().running.get() { ++ return Err(Error::new(curl_sys::CURLE_FAILED_INIT)) ++ } ++ ++ self.inner.get_ref().running.set(true); ++ struct Reset<'a>(&'a Cell); ++ impl<'a> Drop for Reset<'a> { ++ fn drop(&mut self) { ++ self.0.set(false); ++ } ++ } ++ let _reset = Reset(&self.inner.get_ref().running); ++ ++ self.inner.perform() ++ } ++ ++ /// Creates a new scoped transfer which can be used to set callbacks and ++ /// data which only live for the scope of the returned object. ++ /// ++ /// An `Easy` handle is often reused between different requests to cache ++ /// connections to servers, but often the lifetime of the data as part of ++ /// each transfer is unique. This function serves as an ability to share an ++ /// `Easy` across many transfers while ergonomically using possibly ++ /// stack-local data as part of each transfer. ++ /// ++ /// Configuration can be set on the `Easy` and then a `Transfer` can be ++ /// created to set scoped configuration (like callbacks). Finally, the ++ /// `perform` method on the `Transfer` function can be used. ++ /// ++ /// When the `Transfer` option is dropped then all configuration set on the ++ /// transfer itself will be reset. ++ pub fn transfer<'data, 'easy>(&'easy mut self) -> Transfer<'easy, 'data> { ++ assert!(!self.inner.get_ref().running.get()); ++ Transfer { ++ data: Box::new(Callbacks::default()), ++ easy: self, ++ } ++ } ++ ++ /// Same as [`Easy2::unpause_read`](struct.Easy2.html#method.unpause_read) ++ pub fn unpause_read(&self) -> Result<(), Error> { ++ self.inner.unpause_read() ++ } ++ ++ /// Same as [`Easy2::unpause_write`](struct.Easy2.html#method.unpause_write) ++ pub fn unpause_write(&self) -> Result<(), Error> { ++ self.inner.unpause_write() ++ } ++ ++ /// Same as [`Easy2::url_encode`](struct.Easy2.html#method.url_encode) ++ pub fn url_encode(&mut self, s: &[u8]) -> String { ++ self.inner.url_encode(s) ++ } ++ ++ /// Same as [`Easy2::url_decode`](struct.Easy2.html#method.url_decode) ++ pub fn url_decode(&mut self, s: &str) -> Vec { ++ self.inner.url_decode(s) ++ } ++ ++ /// Same as [`Easy2::reset`](struct.Easy2.html#method.reset) ++ pub fn reset(&mut self) { ++ self.inner.reset() ++ } ++ ++ /// Same as [`Easy2::recv`](struct.Easy2.html#method.recv) ++ pub fn recv(&mut self, data: &mut [u8]) -> Result { ++ self.inner.recv(data) ++ } ++ ++ /// Same as [`Easy2::send`](struct.Easy2.html#method.send) ++ pub fn send(&mut self, data: &[u8]) -> Result { ++ self.inner.send(data) ++ } ++ ++ /// Same as [`Easy2::raw`](struct.Easy2.html#method.raw) ++ pub fn raw(&self) -> *mut curl_sys::CURL { ++ self.inner.raw() ++ } ++} ++ ++impl EasyData { ++ /// An unsafe function to get the appropriate callback field. ++ /// ++ /// We can have callbacks configured from one of two different sources. ++ /// We could either have a callback from the `borrowed` field, callbacks on ++ /// an ephemeral `Transfer`, or the `owned` field which are `'static` ++ /// callbacks that live for the lifetime of this `EasyData`. ++ /// ++ /// The first set of callbacks are unsafe to access because they're actually ++ /// owned elsewhere and we're just aliasing. Additionally they don't ++ /// technically live long enough for us to access them, so they're hidden ++ /// behind unsafe pointers and casts. ++ /// ++ /// This function returns `&'a mut T` but that's actually somewhat of a lie. ++ /// The value should **not be stored to** nor should it be used for the full ++ /// lifetime of `'a`, but rather immediately in the local scope. ++ /// ++ /// Basically this is just intended to acquire a callback, invoke it, and ++ /// then stop. Nothing else. Super unsafe. ++ unsafe fn callback<'a, T, F>(&'a mut self, f: F) -> Option<&'a mut T> ++ where F: for<'b> Fn(&'b mut Callbacks<'static>) -> &'b mut Option, ++ { ++ let ptr = self.borrowed.get(); ++ if !ptr.is_null() { ++ let val = f(&mut *ptr); ++ if val.is_some() { ++ return val.as_mut() ++ } ++ } ++ f(&mut self.owned).as_mut() ++ } ++} ++ ++impl Handler for EasyData { ++ fn write(&mut self, data: &[u8]) -> Result { ++ unsafe { ++ match self.callback(|s| &mut s.write) { ++ Some(write) => write(data), ++ None => Ok(data.len()), ++ } ++ } ++ } ++ ++ fn read(&mut self, data: &mut [u8]) -> Result { ++ unsafe { ++ match self.callback(|s| &mut s.read) { ++ Some(read) => read(data), ++ None => Ok(0), ++ } ++ } ++ } ++ ++ fn seek(&mut self, whence: SeekFrom) -> SeekResult { ++ unsafe { ++ match self.callback(|s| &mut s.seek) { ++ Some(seek) => seek(whence), ++ None => SeekResult::CantSeek, ++ } ++ } ++ } ++ ++ fn debug(&mut self, kind: InfoType, data: &[u8]) { ++ unsafe { ++ match self.callback(|s| &mut s.debug) { ++ Some(debug) => debug(kind, data), ++ None => handler::debug(kind, data), ++ } ++ } ++ } ++ ++ fn header(&mut self, data: &[u8]) -> bool { ++ unsafe { ++ match self.callback(|s| &mut s.header) { ++ Some(header) => header(data), ++ None => true, ++ } ++ } ++ } ++ ++ fn progress(&mut self, ++ dltotal: f64, ++ dlnow: f64, ++ ultotal: f64, ++ ulnow: f64) -> bool { ++ unsafe { ++ match self.callback(|s| &mut s.progress) { ++ Some(progress) => progress(dltotal, dlnow, ultotal, ulnow), ++ None => true, ++ } ++ } ++ } ++ ++ fn ssl_ctx(&mut self, cx: *mut c_void) -> Result<(), Error> { ++ unsafe { ++ match self.callback(|s| &mut s.ssl_ctx) { ++ Some(ssl_ctx) => ssl_ctx(cx), ++ None => Ok(()), ++ } ++ } ++ } ++} ++ ++impl fmt::Debug for EasyData { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ "callbacks ...".fmt(f) ++ } ++} ++ ++impl<'easy, 'data> Transfer<'easy, 'data> { ++ /// Same as `Easy::write_function`, just takes a non `'static` lifetime ++ /// corresponding to the lifetime of this transfer. ++ pub fn write_function(&mut self, f: F) -> Result<(), Error> ++ where F: FnMut(&[u8]) -> Result + 'data ++ { ++ self.data.write = Some(Box::new(f)); ++ Ok(()) ++ } ++ ++ /// Same as `Easy::read_function`, just takes a non `'static` lifetime ++ /// corresponding to the lifetime of this transfer. ++ pub fn read_function(&mut self, f: F) -> Result<(), Error> ++ where F: FnMut(&mut [u8]) -> Result + 'data ++ { ++ self.data.read = Some(Box::new(f)); ++ Ok(()) ++ } ++ ++ /// Same as `Easy::seek_function`, just takes a non `'static` lifetime ++ /// corresponding to the lifetime of this transfer. ++ pub fn seek_function(&mut self, f: F) -> Result<(), Error> ++ where F: FnMut(SeekFrom) -> SeekResult + 'data ++ { ++ self.data.seek = Some(Box::new(f)); ++ Ok(()) ++ } ++ ++ /// Same as `Easy::progress_function`, just takes a non `'static` lifetime ++ /// corresponding to the lifetime of this transfer. ++ pub fn progress_function(&mut self, f: F) -> Result<(), Error> ++ where F: FnMut(f64, f64, f64, f64) -> bool + 'data ++ { ++ self.data.progress = Some(Box::new(f)); ++ Ok(()) ++ } ++ ++ /// Same as `Easy::ssl_ctx_function`, just takes a non `'static` ++ /// lifetime corresponding to the lifetime of this transfer. ++ pub fn ssl_ctx_function(&mut self, f: F) -> Result<(), Error> ++ where F: FnMut(*mut c_void) -> Result<(), Error> + Send + 'data ++ { ++ self.data.ssl_ctx = Some(Box::new(f)); ++ Ok(()) ++ } ++ ++ /// Same as `Easy::debug_function`, just takes a non `'static` lifetime ++ /// corresponding to the lifetime of this transfer. ++ pub fn debug_function(&mut self, f: F) -> Result<(), Error> ++ where F: FnMut(InfoType, &[u8]) + 'data ++ { ++ self.data.debug = Some(Box::new(f)); ++ Ok(()) ++ } ++ ++ /// Same as `Easy::header_function`, just takes a non `'static` lifetime ++ /// corresponding to the lifetime of this transfer. ++ pub fn header_function(&mut self, f: F) -> Result<(), Error> ++ where F: FnMut(&[u8]) -> bool + 'data ++ { ++ self.data.header = Some(Box::new(f)); ++ Ok(()) ++ } ++ ++ /// Same as `Easy::transfer`. ++ pub fn perform(&self) -> Result<(), Error> { ++ let inner = self.easy.inner.get_ref(); ++ ++ // Note that we're casting a `&self` pointer to a `*mut`, and then ++ // during the invocation of this call we're going to invoke `FnMut` ++ // closures that we ourselves own. ++ // ++ // This should be ok, however, because `do_perform` checks for recursive ++ // invocations of `perform` and disallows them. Our type also isn't ++ // `Sync`. ++ inner.borrowed.set(&*self.data as *const _ as *mut _); ++ ++ // Make sure to reset everything back to the way it was before when ++ // we're done. ++ struct Reset<'a>(&'a Cell<*mut Callbacks<'static>>); ++ impl<'a> Drop for Reset<'a> { ++ fn drop(&mut self) { ++ self.0.set(ptr::null_mut()); ++ } ++ } ++ let _reset = Reset(&inner.borrowed); ++ ++ self.easy.do_perform() ++ } ++ ++ /// Same as `Easy::unpause_read`. ++ pub fn unpause_read(&self) -> Result<(), Error> { ++ self.easy.unpause_read() ++ } ++ ++ /// Same as `Easy::unpause_write` ++ pub fn unpause_write(&self) -> Result<(), Error> { ++ self.easy.unpause_write() ++ } ++} ++ ++impl<'easy, 'data> fmt::Debug for Transfer<'easy, 'data> { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ f.debug_struct("Transfer") ++ .field("easy", &self.easy) ++ .finish() ++ } ++} ++ ++impl<'easy, 'data> Drop for Transfer<'easy, 'data> { ++ fn drop(&mut self) { ++ // Extra double check to make sure we don't leak a pointer to ourselves. ++ assert!(self.easy.inner.get_ref().borrowed.get().is_null()); ++ } ++} diff --cc vendor/curl-0.4.8/src/easy/handler.rs index 000000000,000000000..2556fc82d new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/src/easy/handler.rs @@@ -1,0 -1,0 +1,3108 @@@ ++use std::cell::RefCell; ++use std::ffi::{CStr, CString}; ++use std::fmt; ++use std::io::{self, SeekFrom, Write}; ++use std::path::Path; ++use std::slice; ++use std::str; ++use std::time::Duration; ++ ++use curl_sys; ++use libc::{self, c_void, c_char, c_long, size_t, c_int, c_double, c_ulong}; ++use socket2::Socket; ++ ++use Error; ++use easy::form; ++use easy::list; ++use easy::{List, Form}; ++use panic; ++ ++/// A trait for the various callbacks used by libcurl to invoke user code. ++/// ++/// This trait represents all operations that libcurl can possibly invoke a ++/// client for code during an HTTP transaction. Each callback has a default ++/// "noop" implementation, the same as in libcurl. Types implementing this trait ++/// may simply override the relevant functions to learn about the callbacks ++/// they're interested in. ++/// ++/// # Examples ++/// ++/// ``` ++/// use curl::easy::{Easy2, Handler, WriteError}; ++/// ++/// struct Collector(Vec); ++/// ++/// impl Handler for Collector { ++/// fn write(&mut self, data: &[u8]) -> Result { ++/// self.0.extend_from_slice(data); ++/// Ok(data.len()) ++/// } ++/// } ++/// ++/// let mut easy = Easy2::new(Collector(Vec::new())); ++/// easy.get(true).unwrap(); ++/// easy.url("https://www.rust-lang.org/").unwrap(); ++/// easy.perform().unwrap(); ++/// ++/// assert_eq!(easy.response_code().unwrap(), 200); ++/// let contents = easy.get_ref(); ++/// println!("{}", String::from_utf8_lossy(&contents.0)); ++/// ``` ++pub trait Handler { ++ /// Callback invoked whenever curl has downloaded data for the application. ++ /// ++ /// This callback function gets called by libcurl as soon as there is data ++ /// received that needs to be saved. ++ /// ++ /// The callback function will be passed as much data as possible in all ++ /// invokes, but you must not make any assumptions. It may be one byte, it ++ /// may be thousands. If `show_header` is enabled, which makes header data ++ /// get passed to the write callback, you can get up to ++ /// `CURL_MAX_HTTP_HEADER` bytes of header data passed into it. This ++ /// usually means 100K. ++ /// ++ /// This function may be called with zero bytes data if the transferred file ++ /// is empty. ++ /// ++ /// The callback should return the number of bytes actually taken care of. ++ /// If that amount differs from the amount passed to your callback function, ++ /// it'll signal an error condition to the library. This will cause the ++ /// transfer to get aborted and the libcurl function used will return ++ /// an error with `is_write_error`. ++ /// ++ /// If your callback function returns `Err(WriteError::Pause)` it will cause ++ /// this transfer to become paused. See `unpause_write` for further details. ++ /// ++ /// By default data is sent into the void, and this corresponds to the ++ /// `CURLOPT_WRITEFUNCTION` and `CURLOPT_WRITEDATA` options. ++ fn write(&mut self, data: &[u8]) -> Result { ++ Ok(data.len()) ++ } ++ ++ /// Read callback for data uploads. ++ /// ++ /// This callback function gets called by libcurl as soon as it needs to ++ /// read data in order to send it to the peer - like if you ask it to upload ++ /// or post data to the server. ++ /// ++ /// Your function must then return the actual number of bytes that it stored ++ /// in that memory area. Returning 0 will signal end-of-file to the library ++ /// and cause it to stop the current transfer. ++ /// ++ /// If you stop the current transfer by returning 0 "pre-maturely" (i.e ++ /// before the server expected it, like when you've said you will upload N ++ /// bytes and you upload less than N bytes), you may experience that the ++ /// server "hangs" waiting for the rest of the data that won't come. ++ /// ++ /// The read callback may return `Err(ReadError::Abort)` to stop the ++ /// current operation immediately, resulting in a `is_aborted_by_callback` ++ /// error code from the transfer. ++ /// ++ /// The callback can return `Err(ReadError::Pause)` to cause reading from ++ /// this connection to pause. See `unpause_read` for further details. ++ /// ++ /// By default data not input, and this corresponds to the ++ /// `CURLOPT_READFUNCTION` and `CURLOPT_READDATA` options. ++ /// ++ /// Note that the lifetime bound on this function is `'static`, but that ++ /// is often too restrictive. To use stack data consider calling the ++ /// `transfer` method and then using `read_function` to configure a ++ /// callback that can reference stack-local data. ++ fn read(&mut self, data: &mut [u8]) -> Result { ++ drop(data); ++ Ok(0) ++ } ++ ++ /// User callback for seeking in input stream. ++ /// ++ /// This function gets called by libcurl to seek to a certain position in ++ /// the input stream and can be used to fast forward a file in a resumed ++ /// upload (instead of reading all uploaded bytes with the normal read ++ /// function/callback). It is also called to rewind a stream when data has ++ /// already been sent to the server and needs to be sent again. This may ++ /// happen when doing a HTTP PUT or POST with a multi-pass authentication ++ /// method, or when an existing HTTP connection is reused too late and the ++ /// server closes the connection. ++ /// ++ /// The callback function must return `SeekResult::Ok` on success, ++ /// `SeekResult::Fail` to cause the upload operation to fail or ++ /// `SeekResult::CantSeek` to indicate that while the seek failed, libcurl ++ /// is free to work around the problem if possible. The latter can sometimes ++ /// be done by instead reading from the input or similar. ++ /// ++ /// By default data this option is not set, and this corresponds to the ++ /// `CURLOPT_SEEKFUNCTION` and `CURLOPT_SEEKDATA` options. ++ fn seek(&mut self, whence: SeekFrom) -> SeekResult { ++ drop(whence); ++ SeekResult::CantSeek ++ } ++ ++ /// Specify a debug callback ++ /// ++ /// `debug_function` replaces the standard debug function used when ++ /// `verbose` is in effect. This callback receives debug information, ++ /// as specified in the type argument. ++ /// ++ /// By default this option is not set and corresponds to the ++ /// `CURLOPT_DEBUGFUNCTION` and `CURLOPT_DEBUGDATA` options. ++ fn debug(&mut self, kind: InfoType, data: &[u8]) { ++ debug(kind, data) ++ } ++ ++ /// Callback that receives header data ++ /// ++ /// This function gets called by libcurl as soon as it has received header ++ /// data. The header callback will be called once for each header and only ++ /// complete header lines are passed on to the callback. Parsing headers is ++ /// very easy using this. If this callback returns `false` it'll signal an ++ /// error to the library. This will cause the transfer to get aborted and ++ /// the libcurl function in progress will return `is_write_error`. ++ /// ++ /// A complete HTTP header that is passed to this function can be up to ++ /// CURL_MAX_HTTP_HEADER (100K) bytes. ++ /// ++ /// It's important to note that the callback will be invoked for the headers ++ /// of all responses received after initiating a request and not just the ++ /// final response. This includes all responses which occur during ++ /// authentication negotiation. If you need to operate on only the headers ++ /// from the final response, you will need to collect headers in the ++ /// callback yourself and use HTTP status lines, for example, to delimit ++ /// response boundaries. ++ /// ++ /// When a server sends a chunked encoded transfer, it may contain a ++ /// trailer. That trailer is identical to a HTTP header and if such a ++ /// trailer is received it is passed to the application using this callback ++ /// as well. There are several ways to detect it being a trailer and not an ++ /// ordinary header: 1) it comes after the response-body. 2) it comes after ++ /// the final header line (CR LF) 3) a Trailer: header among the regular ++ /// response-headers mention what header(s) to expect in the trailer. ++ /// ++ /// For non-HTTP protocols like FTP, POP3, IMAP and SMTP this function will ++ /// get called with the server responses to the commands that libcurl sends. ++ /// ++ /// By default this option is not set and corresponds to the ++ /// `CURLOPT_HEADERFUNCTION` and `CURLOPT_HEADERDATA` options. ++ fn header(&mut self, data: &[u8]) -> bool { ++ drop(data); ++ true ++ } ++ ++ /// Callback to progress meter function ++ /// ++ /// This function gets called by libcurl instead of its internal equivalent ++ /// with a frequent interval. While data is being transferred it will be ++ /// called very frequently, and during slow periods like when nothing is ++ /// being transferred it can slow down to about one call per second. ++ /// ++ /// The callback gets told how much data libcurl will transfer and has ++ /// transferred, in number of bytes. The first argument is the total number ++ /// of bytes libcurl expects to download in this transfer. The second ++ /// argument is the number of bytes downloaded so far. The third argument is ++ /// the total number of bytes libcurl expects to upload in this transfer. ++ /// The fourth argument is the number of bytes uploaded so far. ++ /// ++ /// Unknown/unused argument values passed to the callback will be set to ++ /// zero (like if you only download data, the upload size will remain 0). ++ /// Many times the callback will be called one or more times first, before ++ /// it knows the data sizes so a program must be made to handle that. ++ /// ++ /// Returning `false` from this callback will cause libcurl to abort the ++ /// transfer and return `is_aborted_by_callback`. ++ /// ++ /// If you transfer data with the multi interface, this function will not be ++ /// called during periods of idleness unless you call the appropriate ++ /// libcurl function that performs transfers. ++ /// ++ /// `progress` must be set to `true` to make this function actually get ++ /// called. ++ /// ++ /// By default this function calls an internal method and corresponds to ++ /// `CURLOPT_PROGRESSFUNCTION` and `CURLOPT_PROGRESSDATA`. ++ fn progress(&mut self, ++ dltotal: f64, ++ dlnow: f64, ++ ultotal: f64, ++ ulnow: f64) -> bool { ++ drop((dltotal, dlnow, ultotal, ulnow)); ++ true ++ } ++ ++ /// Callback to SSL context ++ /// ++ /// This callback function gets called by libcurl just before the ++ /// initialization of an SSL connection after having processed all ++ /// other SSL related options to give a last chance to an ++ /// application to modify the behaviour of the SSL ++ /// initialization. The `ssl_ctx` parameter is actually a pointer ++ /// to the SSL library's SSL_CTX. If an error is returned from the ++ /// callback no attempt to establish a connection is made and the ++ /// perform operation will return the callback's error code. ++ /// ++ /// This function will get called on all new connections made to a ++ /// server, during the SSL negotiation. The SSL_CTX pointer will ++ /// be a new one every time. ++ /// ++ /// To use this properly, a non-trivial amount of knowledge of ++ /// your SSL library is necessary. For example, you can use this ++ /// function to call library-specific callbacks to add additional ++ /// validation code for certificates, and even to change the ++ /// actual URI of a HTTPS request. ++ /// ++ /// By default this function calls an internal method and ++ /// corresponds to `CURLOPT_SSL_CTX_FUNCTION` and ++ /// `CURLOPT_SSL_CTX_DATA`. ++ /// ++ /// Note that this callback is not guaranteed to be called, not all versions ++ /// of libcurl support calling this callback. ++ fn ssl_ctx(&mut self, cx: *mut c_void) -> Result<(), Error> { ++ drop(cx); ++ Ok(()) ++ } ++ ++ /// Callback to open sockets for libcurl. ++ /// ++ /// This callback function gets called by libcurl instead of the socket(2) ++ /// call. The callback function should return the newly created socket ++ /// or `None` in case no connection could be established or another ++ /// error was detected. Any additional `setsockopt(2)` calls can of course ++ /// be done on the socket at the user's discretion. A `None` return ++ /// value from the callback function will signal an unrecoverable error to ++ /// libcurl and it will return `is_couldnt_connect` from the function that ++ /// triggered this callback. ++ /// ++ /// By default this function opens a standard socket and ++ /// corresponds to `CURLOPT_OPENSOCKETFUNCTION `. ++ fn open_socket(&mut self, ++ family: c_int, ++ socktype: c_int, ++ protocol: c_int) -> Option { ++ // Note that we override this to calling a function in `socket2` to ++ // ensure that we open all sockets with CLOEXEC. Otherwise if we rely on ++ // libcurl to open sockets it won't use CLOEXEC. ++ return Socket::new(family.into(), socktype.into(), Some(protocol.into())) ++ .ok() ++ .map(cvt); ++ ++ #[cfg(unix)] ++ fn cvt(socket: Socket) -> curl_sys::curl_socket_t { ++ use std::os::unix::prelude::*; ++ socket.into_raw_fd() ++ } ++ ++ #[cfg(windows)] ++ fn cvt(socket: Socket) -> curl_sys::curl_socket_t { ++ use std::os::windows::prelude::*; ++ socket.into_raw_socket() ++ } ++ } ++} ++ ++pub fn debug(kind: InfoType, data: &[u8]) { ++ let out = io::stderr(); ++ let prefix = match kind { ++ InfoType::Text => "*", ++ InfoType::HeaderIn => "<", ++ InfoType::HeaderOut => ">", ++ InfoType::DataIn | ++ InfoType::SslDataIn => "{", ++ InfoType::DataOut | ++ InfoType::SslDataOut => "}", ++ InfoType::__Nonexhaustive => " ", ++ }; ++ let mut out = out.lock(); ++ drop(write!(out, "{} ", prefix)); ++ drop(out.write_all(data)); ++} ++ ++/// Raw bindings to a libcurl "easy session". ++/// ++/// This type corresponds to the `CURL` type in libcurl, and is probably what ++/// you want for just sending off a simple HTTP request and fetching a response. ++/// Each easy handle can be thought of as a large builder before calling the ++/// final `perform` function. ++/// ++/// There are many many configuration options for each `Easy2` handle, and they ++/// should all have their own documentation indicating what it affects and how ++/// it interacts with other options. Some implementations of libcurl can use ++/// this handle to interact with many different protocols, although by default ++/// this crate only guarantees the HTTP/HTTPS protocols working. ++/// ++/// Note that almost all methods on this structure which configure various ++/// properties return a `Result`. This is largely used to detect whether the ++/// underlying implementation of libcurl actually implements the option being ++/// requested. If you're linked to a version of libcurl which doesn't support ++/// the option, then an error will be returned. Some options also perform some ++/// validation when they're set, and the error is returned through this vector. ++/// ++/// Note that historically this library contained an `Easy` handle so this one's ++/// called `Easy2`. The major difference between the `Easy` type is that an ++/// `Easy2` structure uses a trait instead of closures for all of the callbacks ++/// that curl can invoke. The `Easy` type is actually built on top of this ++/// `Easy` type, and this `Easy2` type can be more flexible in some situations ++/// due to the generic parameter. ++/// ++/// There's not necessarily a right answer for which type is correct to use, but ++/// as a general rule of thumb `Easy` is typically a reasonable choice for ++/// synchronous I/O and `Easy2` is a good choice for asynchronous I/O. ++/// ++/// # Examples ++/// ++/// ``` ++/// use curl::easy::{Easy2, Handler, WriteError}; ++/// ++/// struct Collector(Vec); ++/// ++/// impl Handler for Collector { ++/// fn write(&mut self, data: &[u8]) -> Result { ++/// self.0.extend_from_slice(data); ++/// Ok(data.len()) ++/// } ++/// } ++/// ++/// let mut easy = Easy2::new(Collector(Vec::new())); ++/// easy.get(true).unwrap(); ++/// easy.url("https://www.rust-lang.org/").unwrap(); ++/// easy.perform().unwrap(); ++/// ++/// assert_eq!(easy.response_code().unwrap(), 200); ++/// let contents = easy.get_ref(); ++/// println!("{}", String::from_utf8_lossy(&contents.0)); ++/// ``` ++pub struct Easy2 { ++ inner: Box>, ++} ++ ++struct Inner { ++ handle: *mut curl_sys::CURL, ++ header_list: Option, ++ form: Option

, ++ error_buf: RefCell>, ++ handler: H, ++} ++ ++unsafe impl Send for Inner {} ++ ++/// Possible proxy types that libcurl currently understands. ++#[allow(missing_docs)] ++#[derive(Debug)] ++pub enum ProxyType { ++ Http = curl_sys::CURLPROXY_HTTP as isize, ++ Http1 = curl_sys::CURLPROXY_HTTP_1_0 as isize, ++ Socks4 = curl_sys::CURLPROXY_SOCKS4 as isize, ++ Socks5 = curl_sys::CURLPROXY_SOCKS5 as isize, ++ Socks4a = curl_sys::CURLPROXY_SOCKS4A as isize, ++ Socks5Hostname = curl_sys::CURLPROXY_SOCKS5_HOSTNAME as isize, ++ ++ /// Hidden variant to indicate that this enum should not be matched on, it ++ /// may grow over time. ++ #[doc(hidden)] ++ __Nonexhaustive, ++} ++ ++/// Possible conditions for the `time_condition` method. ++#[allow(missing_docs)] ++#[derive(Debug)] ++pub enum TimeCondition { ++ None = curl_sys::CURL_TIMECOND_NONE as isize, ++ IfModifiedSince = curl_sys::CURL_TIMECOND_IFMODSINCE as isize, ++ IfUnmodifiedSince = curl_sys::CURL_TIMECOND_IFUNMODSINCE as isize, ++ LastModified = curl_sys::CURL_TIMECOND_LASTMOD as isize, ++ ++ /// Hidden variant to indicate that this enum should not be matched on, it ++ /// may grow over time. ++ #[doc(hidden)] ++ __Nonexhaustive, ++} ++ ++/// Possible values to pass to the `ip_resolve` method. ++#[allow(missing_docs)] ++#[derive(Debug)] ++pub enum IpResolve { ++ V4 = curl_sys::CURL_IPRESOLVE_V4 as isize, ++ V6 = curl_sys::CURL_IPRESOLVE_V6 as isize, ++ Any = curl_sys::CURL_IPRESOLVE_WHATEVER as isize, ++ ++ /// Hidden variant to indicate that this enum should not be matched on, it ++ /// may grow over time. ++ #[doc(hidden)] ++ __Nonexhaustive = 500, ++} ++ ++/// Possible values to pass to the `http_version` method. ++#[derive(Debug)] ++pub enum HttpVersion { ++ /// We don't care what http version to use, and we'd like the library to ++ /// choose the best possible for us. ++ Any = curl_sys::CURL_HTTP_VERSION_NONE as isize, ++ ++ /// Please use HTTP 1.0 in the request ++ V10 = curl_sys::CURL_HTTP_VERSION_1_0 as isize, ++ ++ /// Please use HTTP 1.1 in the request ++ V11 = curl_sys::CURL_HTTP_VERSION_1_1 as isize, ++ ++ /// Please use HTTP 2 in the request ++ /// (Added in CURL 7.33.0) ++ V2 = curl_sys::CURL_HTTP_VERSION_2_0 as isize, ++ ++ /// Use version 2 for HTTPS, version 1.1 for HTTP ++ /// (Added in CURL 7.47.0) ++ V2TLS = curl_sys::CURL_HTTP_VERSION_2TLS as isize, ++ ++ /// Please use HTTP 2 without HTTP/1.1 Upgrade ++ /// (Added in CURL 7.49.0) ++ V2PriorKnowledge = curl_sys::CURL_HTTP_VERSION_2_PRIOR_KNOWLEDGE as isize, ++ ++ /// Hidden variant to indicate that this enum should not be matched on, it ++ /// may grow over time. ++ #[doc(hidden)] ++ __Nonexhaustive = 500, ++} ++ ++/// Possible values to pass to the `ip_resolve` method. ++#[allow(missing_docs)] ++#[derive(Debug)] ++pub enum SslVersion { ++ Default = curl_sys::CURL_SSLVERSION_DEFAULT as isize, ++ Tlsv1 = curl_sys::CURL_SSLVERSION_TLSv1 as isize, ++ Sslv2 = curl_sys::CURL_SSLVERSION_SSLv2 as isize, ++ Sslv3 = curl_sys::CURL_SSLVERSION_SSLv3 as isize, ++ // Tlsv10 = curl_sys::CURL_SSLVERSION_TLSv1_0 as isize, ++ // Tlsv11 = curl_sys::CURL_SSLVERSION_TLSv1_1 as isize, ++ // Tlsv12 = curl_sys::CURL_SSLVERSION_TLSv1_2 as isize, ++ ++ /// Hidden variant to indicate that this enum should not be matched on, it ++ /// may grow over time. ++ #[doc(hidden)] ++ __Nonexhaustive = 500, ++} ++ ++/// Possible return values from the `seek_function` callback. ++#[derive(Debug)] ++pub enum SeekResult { ++ /// Indicates that the seek operation was a success ++ Ok = curl_sys::CURL_SEEKFUNC_OK as isize, ++ ++ /// Indicates that the seek operation failed, and the entire request should ++ /// fail as a result. ++ Fail = curl_sys::CURL_SEEKFUNC_FAIL as isize, ++ ++ /// Indicates that although the seek failed libcurl should attempt to keep ++ /// working if possible (for example "seek" through reading). ++ CantSeek = curl_sys::CURL_SEEKFUNC_CANTSEEK as isize, ++ ++ /// Hidden variant to indicate that this enum should not be matched on, it ++ /// may grow over time. ++ #[doc(hidden)] ++ __Nonexhaustive = 500, ++} ++ ++/// Possible data chunks that can be witnessed as part of the `debug_function` ++/// callback. ++#[derive(Debug)] ++pub enum InfoType { ++ /// The data is informational text. ++ Text, ++ ++ /// The data is header (or header-like) data received from the peer. ++ HeaderIn, ++ ++ /// The data is header (or header-like) data sent to the peer. ++ HeaderOut, ++ ++ /// The data is protocol data received from the peer. ++ DataIn, ++ ++ /// The data is protocol data sent to the peer. ++ DataOut, ++ ++ /// The data is SSL/TLS (binary) data received from the peer. ++ SslDataIn, ++ ++ /// The data is SSL/TLS (binary) data sent to the peer. ++ SslDataOut, ++ ++ /// Hidden variant to indicate that this enum should not be matched on, it ++ /// may grow over time. ++ #[doc(hidden)] ++ __Nonexhaustive, ++} ++ ++/// Possible error codes that can be returned from the `read_function` callback. ++#[derive(Debug)] ++pub enum ReadError { ++ /// Indicates that the connection should be aborted immediately ++ Abort, ++ ++ /// Indicates that reading should be paused until `unpause` is called. ++ Pause, ++ ++ /// Hidden variant to indicate that this enum should not be matched on, it ++ /// may grow over time. ++ #[doc(hidden)] ++ __Nonexhaustive, ++} ++ ++/// Possible error codes that can be returned from the `write_function` callback. ++#[derive(Debug)] ++pub enum WriteError { ++ /// Indicates that reading should be paused until `unpause` is called. ++ Pause, ++ ++ /// Hidden variant to indicate that this enum should not be matched on, it ++ /// may grow over time. ++ #[doc(hidden)] ++ __Nonexhaustive, ++} ++ ++/// Options for `.netrc` parsing. ++#[derive(Debug)] ++pub enum NetRc { ++ /// Ignoring `.netrc` file and use information from url ++ /// ++ /// This option is default ++ Ignored = curl_sys::CURL_NETRC_IGNORED as isize, ++ ++ /// The use of your `~/.netrc` file is optional, and information in the URL is to be ++ /// preferred. The file will be scanned for the host and user name (to find the password only) ++ /// or for the host only, to find the first user name and password after that machine, which ++ /// ever information is not specified in the URL. ++ Optional = curl_sys::CURL_NETRC_OPTIONAL as isize, ++ ++ /// This value tells the library that use of the file is required, to ignore the information in ++ /// the URL, and to search the file for the host only. ++ Required = curl_sys::CURL_NETRC_REQUIRED as isize, ++} ++ ++/// Structure which stores possible authentication methods to get passed to ++/// `http_auth` and `proxy_auth`. ++#[derive(Clone)] ++pub struct Auth { ++ bits: c_long, ++} ++ ++/// Structure which stores possible ssl options to pass to `ssl_options`. ++#[derive(Clone)] ++pub struct SslOpt { ++ bits: c_long, ++} ++ ++impl Easy2 { ++ /// Creates a new "easy" handle which is the core of almost all operations ++ /// in libcurl. ++ /// ++ /// To use a handle, applications typically configure a number of options ++ /// followed by a call to `perform`. Options are preserved across calls to ++ /// `perform` and need to be reset manually (or via the `reset` method) if ++ /// this is not desired. ++ pub fn new(handler: H) -> Easy2 { ++ ::init(); ++ unsafe { ++ let handle = curl_sys::curl_easy_init(); ++ assert!(!handle.is_null()); ++ let mut ret = Easy2 { ++ inner: Box::new(Inner { ++ handle: handle, ++ header_list: None, ++ form: None, ++ error_buf: RefCell::new(vec![0; curl_sys::CURL_ERROR_SIZE]), ++ handler: handler, ++ }), ++ }; ++ ret.default_configure(); ++ return ret ++ } ++ } ++ ++ /// Re-initializes this handle to the default values. ++ /// ++ /// This puts the handle to the same state as it was in when it was just ++ /// created. This does, however, keep live connections, the session id ++ /// cache, the dns cache, and cookies. ++ pub fn reset(&mut self) { ++ unsafe { ++ curl_sys::curl_easy_reset(self.inner.handle); ++ } ++ self.default_configure(); ++ } ++ ++ fn default_configure(&mut self) { ++ self.setopt_ptr(curl_sys::CURLOPT_ERRORBUFFER, ++ self.inner.error_buf.borrow().as_ptr() as *const _) ++ .expect("failed to set error buffer"); ++ let _ = self.signal(false); ++ self.ssl_configure(); ++ ++ let ptr = &*self.inner as *const _ as *const _; ++ ++ let cb: extern fn(*mut c_char, size_t, size_t, *mut c_void) -> size_t ++ = header_cb::; ++ self.setopt_ptr(curl_sys::CURLOPT_HEADERFUNCTION, cb as *const _) ++ .expect("failed to set header callback"); ++ self.setopt_ptr(curl_sys::CURLOPT_HEADERDATA, ptr) ++ .expect("failed to set header callback"); ++ ++ let cb: curl_sys::curl_write_callback = write_cb::; ++ self.setopt_ptr(curl_sys::CURLOPT_WRITEFUNCTION, cb as *const _) ++ .expect("failed to set write callback"); ++ self.setopt_ptr(curl_sys::CURLOPT_WRITEDATA, ptr) ++ .expect("failed to set write callback"); ++ ++ let cb: curl_sys::curl_read_callback = read_cb::; ++ self.setopt_ptr(curl_sys::CURLOPT_READFUNCTION, cb as *const _) ++ .expect("failed to set read callback"); ++ self.setopt_ptr(curl_sys::CURLOPT_READDATA, ptr) ++ .expect("failed to set read callback"); ++ ++ let cb: curl_sys::curl_seek_callback = seek_cb::; ++ self.setopt_ptr(curl_sys::CURLOPT_SEEKFUNCTION, cb as *const _) ++ .expect("failed to set seek callback"); ++ self.setopt_ptr(curl_sys::CURLOPT_SEEKDATA, ptr) ++ .expect("failed to set seek callback"); ++ ++ let cb: curl_sys::curl_progress_callback = progress_cb::; ++ self.setopt_ptr(curl_sys::CURLOPT_PROGRESSFUNCTION, cb as *const _) ++ .expect("failed to set progress callback"); ++ self.setopt_ptr(curl_sys::CURLOPT_PROGRESSDATA, ptr) ++ .expect("failed to set progress callback"); ++ ++ let cb: curl_sys::curl_debug_callback = debug_cb::; ++ self.setopt_ptr(curl_sys::CURLOPT_DEBUGFUNCTION, cb as *const _) ++ .expect("failed to set debug callback"); ++ self.setopt_ptr(curl_sys::CURLOPT_DEBUGDATA, ptr) ++ .expect("failed to set debug callback"); ++ ++ let cb: curl_sys::curl_ssl_ctx_callback = ssl_ctx_cb::; ++ drop(self.setopt_ptr(curl_sys::CURLOPT_SSL_CTX_FUNCTION, cb as *const _)); ++ drop(self.setopt_ptr(curl_sys::CURLOPT_SSL_CTX_DATA, ptr)); ++ ++ let cb: curl_sys::curl_opensocket_callback = opensocket_cb::; ++ self.setopt_ptr(curl_sys::CURLOPT_OPENSOCKETFUNCTION , cb as *const _) ++ .expect("failed to set open socket callback"); ++ self.setopt_ptr(curl_sys::CURLOPT_OPENSOCKETDATA, ptr) ++ .expect("failed to set open socket callback"); ++ } ++ ++ #[cfg(all(unix, not(target_os = "macos")))] ++ fn ssl_configure(&mut self) { ++ let probe = ::openssl_probe::probe(); ++ if let Some(ref path) = probe.cert_file { ++ let _ = self.cainfo(path); ++ } ++ if let Some(ref path) = probe.cert_dir { ++ let _ = self.capath(path); ++ } ++ } ++ ++ #[cfg(not(all(unix, not(target_os = "macos"))))] ++ fn ssl_configure(&mut self) {} ++} ++ ++impl Easy2 { ++ // ========================================================================= ++ // Behavior options ++ ++ /// Configures this handle to have verbose output to help debug protocol ++ /// information. ++ /// ++ /// By default output goes to stderr, but the `stderr` function on this type ++ /// can configure that. You can also use the `debug_function` method to get ++ /// all protocol data sent and received. ++ /// ++ /// By default, this option is `false`. ++ pub fn verbose(&mut self, verbose: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_VERBOSE, verbose as c_long) ++ } ++ ++ /// Indicates whether header information is streamed to the output body of ++ /// this request. ++ /// ++ /// This option is only relevant for protocols which have header metadata ++ /// (like http or ftp). It's not generally possible to extract headers ++ /// from the body if using this method, that use case should be intended for ++ /// the `header_function` method. ++ /// ++ /// To set HTTP headers, use the `http_header` method. ++ /// ++ /// By default, this option is `false` and corresponds to ++ /// `CURLOPT_HEADER`. ++ pub fn show_header(&mut self, show: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_HEADER, show as c_long) ++ } ++ ++ /// Indicates whether a progress meter will be shown for requests done with ++ /// this handle. ++ /// ++ /// This will also prevent the `progress_function` from being called. ++ /// ++ /// By default this option is `false` and corresponds to ++ /// `CURLOPT_NOPROGRESS`. ++ pub fn progress(&mut self, progress: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_NOPROGRESS, ++ (!progress) as c_long) ++ } ++ ++ /// Inform libcurl whether or not it should install signal handlers or ++ /// attempt to use signals to perform library functions. ++ /// ++ /// If this option is disabled then timeouts during name resolution will not ++ /// work unless libcurl is built against c-ares. Note that enabling this ++ /// option, however, may not cause libcurl to work with multiple threads. ++ /// ++ /// By default this option is `false` and corresponds to `CURLOPT_NOSIGNAL`. ++ /// Note that this default is **different than libcurl** as it is intended ++ /// that this library is threadsafe by default. See the [libcurl docs] for ++ /// some more information. ++ /// ++ /// [libcurl docs]: https://curl.haxx.se/libcurl/c/threadsafe.html ++ pub fn signal(&mut self, signal: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_NOSIGNAL, ++ (!signal) as c_long) ++ } ++ ++ /// Indicates whether multiple files will be transferred based on the file ++ /// name pattern. ++ /// ++ /// The last part of a filename uses fnmatch-like pattern matching. ++ /// ++ /// By default this option is `false` and corresponds to ++ /// `CURLOPT_WILDCARDMATCH`. ++ pub fn wildcard_match(&mut self, m: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_WILDCARDMATCH, m as c_long) ++ } ++ ++ /// Provides the unix domain socket which this handle will work with. ++ /// ++ /// The string provided must be unix domain socket -encoded with the format: ++ /// ++ /// ```text ++ /// /path/file.sock ++ /// ``` ++ pub fn unix_socket(&mut self, unix_domain_socket: &str) -> Result<(), Error> { ++ let socket = try!(CString::new(unix_domain_socket)); ++ self.setopt_str(curl_sys::CURLOPT_UNIX_SOCKET_PATH, &socket) ++ } ++ ++ ++ // ========================================================================= ++ // Internal accessors ++ ++ /// Acquires a reference to the underlying handler for events. ++ pub fn get_ref(&self) -> &H { ++ &self.inner.handler ++ } ++ ++ /// Acquires a reference to the underlying handler for events. ++ pub fn get_mut(&mut self) -> &mut H { ++ &mut self.inner.handler ++ } ++ ++ // ========================================================================= ++ // Error options ++ ++ // TODO: error buffer and stderr ++ ++ /// Indicates whether this library will fail on HTTP response codes >= 400. ++ /// ++ /// This method is not fail-safe especially when authentication is involved. ++ /// ++ /// By default this option is `false` and corresponds to ++ /// `CURLOPT_FAILONERROR`. ++ pub fn fail_on_error(&mut self, fail: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_FAILONERROR, fail as c_long) ++ } ++ ++ // ========================================================================= ++ // Network options ++ ++ /// Provides the URL which this handle will work with. ++ /// ++ /// The string provided must be URL-encoded with the format: ++ /// ++ /// ```text ++ /// scheme://host:port/path ++ /// ``` ++ /// ++ /// The syntax is not validated as part of this function and that is ++ /// deferred until later. ++ /// ++ /// By default this option is not set and `perform` will not work until it ++ /// is set. This option corresponds to `CURLOPT_URL`. ++ pub fn url(&mut self, url: &str) -> Result<(), Error> { ++ let url = try!(CString::new(url)); ++ self.setopt_str(curl_sys::CURLOPT_URL, &url) ++ } ++ ++ /// Configures the port number to connect to, instead of the one specified ++ /// in the URL or the default of the protocol. ++ pub fn port(&mut self, port: u16) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_PORT, port as c_long) ++ } ++ ++ // /// Indicates whether sequences of `/../` and `/./` will be squashed or not. ++ // /// ++ // /// By default this option is `false` and corresponds to ++ // /// `CURLOPT_PATH_AS_IS`. ++ // pub fn path_as_is(&mut self, as_is: bool) -> Result<(), Error> { ++ // } ++ ++ /// Provide the URL of a proxy to use. ++ /// ++ /// By default this option is not set and corresponds to `CURLOPT_PROXY`. ++ pub fn proxy(&mut self, url: &str) -> Result<(), Error> { ++ let url = try!(CString::new(url)); ++ self.setopt_str(curl_sys::CURLOPT_PROXY, &url) ++ } ++ ++ /// Provide port number the proxy is listening on. ++ /// ++ /// By default this option is not set (the default port for the proxy ++ /// protocol is used) and corresponds to `CURLOPT_PROXYPORT`. ++ pub fn proxy_port(&mut self, port: u16) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_PROXYPORT, port as c_long) ++ } ++ ++ /// Indicates the type of proxy being used. ++ /// ++ /// By default this option is `ProxyType::Http` and corresponds to ++ /// `CURLOPT_PROXYTYPE`. ++ pub fn proxy_type(&mut self, kind: ProxyType) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_PROXYTYPE, kind as c_long) ++ } ++ ++ /// Provide a list of hosts that should not be proxied to. ++ /// ++ /// This string is a comma-separated list of hosts which should not use the ++ /// proxy specified for connections. A single `*` character is also accepted ++ /// as a wildcard for all hosts. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_NOPROXY`. ++ pub fn noproxy(&mut self, skip: &str) -> Result<(), Error> { ++ let skip = try!(CString::new(skip)); ++ self.setopt_str(curl_sys::CURLOPT_PROXYTYPE, &skip) ++ } ++ ++ /// Inform curl whether it should tunnel all operations through the proxy. ++ /// ++ /// This essentially means that a `CONNECT` is sent to the proxy for all ++ /// outbound requests. ++ /// ++ /// By default this option is `false` and corresponds to ++ /// `CURLOPT_HTTPPROXYTUNNEL`. ++ pub fn http_proxy_tunnel(&mut self, tunnel: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_HTTPPROXYTUNNEL, ++ tunnel as c_long) ++ } ++ ++ /// Tell curl which interface to bind to for an outgoing network interface. ++ /// ++ /// The interface name, IP address, or host name can be specified here. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_INTERFACE`. ++ pub fn interface(&mut self, interface: &str) -> Result<(), Error> { ++ let s = try!(CString::new(interface)); ++ self.setopt_str(curl_sys::CURLOPT_INTERFACE, &s) ++ } ++ ++ /// Indicate which port should be bound to locally for this connection. ++ /// ++ /// By default this option is 0 (any port) and corresponds to ++ /// `CURLOPT_LOCALPORT`. ++ pub fn set_local_port(&mut self, port: u16) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_LOCALPORT, port as c_long) ++ } ++ ++ /// Indicates the number of attempts libcurl will perform to find a working ++ /// port number. ++ /// ++ /// By default this option is 1 and corresponds to ++ /// `CURLOPT_LOCALPORTRANGE`. ++ pub fn local_port_range(&mut self, range: u16) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_LOCALPORTRANGE, ++ range as c_long) ++ } ++ ++ /// Sets the timeout of how long name resolves will be kept in memory. ++ /// ++ /// This is distinct from DNS TTL options and is entirely speculative. ++ /// ++ /// By default this option is 60s and corresponds to ++ /// `CURLOPT_DNS_CACHE_TIMEOUT`. ++ pub fn dns_cache_timeout(&mut self, dur: Duration) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_DNS_CACHE_TIMEOUT, ++ dur.as_secs() as c_long) ++ } ++ ++ /// Specify the preferred receive buffer size, in bytes. ++ /// ++ /// This is treated as a request, not an order, and the main point of this ++ /// is that the write callback may get called more often with smaller ++ /// chunks. ++ /// ++ /// By default this option is the maximum write size and corresopnds to ++ /// `CURLOPT_BUFFERSIZE`. ++ pub fn buffer_size(&mut self, size: usize) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_BUFFERSIZE, size as c_long) ++ } ++ ++ // /// Enable or disable TCP Fast Open ++ // /// ++ // /// By default this options defaults to `false` and corresponds to ++ // /// `CURLOPT_TCP_FASTOPEN` ++ // pub fn fast_open(&mut self, enable: bool) -> Result<(), Error> { ++ // } ++ ++ /// Configures whether the TCP_NODELAY option is set, or Nagle's algorithm ++ /// is disabled. ++ /// ++ /// The purpose of Nagle's algorithm is to minimize the number of small ++ /// packet's on the network, and disabling this may be less efficient in ++ /// some situations. ++ /// ++ /// By default this option is `false` and corresponds to ++ /// `CURLOPT_TCP_NODELAY`. ++ pub fn tcp_nodelay(&mut self, enable: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_TCP_NODELAY, enable as c_long) ++ } ++ ++ /// Configures whether TCP keepalive probes will be sent. ++ /// ++ /// The delay and frequency of these probes is controlled by `tcp_keepidle` ++ /// and `tcp_keepintvl`. ++ /// ++ /// By default this option is `false` and corresponds to ++ /// `CURLOPT_TCP_KEEPALIVE`. ++ pub fn tcp_keepalive(&mut self, enable: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_TCP_KEEPALIVE, enable as c_long) ++ } ++ ++ /// Configures the TCP keepalive idle time wait. ++ /// ++ /// This is the delay, after which the connection is idle, keepalive probes ++ /// will be sent. Not all operating systems support this. ++ /// ++ /// By default this corresponds to `CURLOPT_TCP_KEEPIDLE`. ++ pub fn tcp_keepidle(&mut self, amt: Duration) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_TCP_KEEPIDLE, ++ amt.as_secs() as c_long) ++ } ++ ++ /// Configures the delay between keepalive probes. ++ /// ++ /// By default this corresponds to `CURLOPT_TCP_KEEPINTVL`. ++ pub fn tcp_keepintvl(&mut self, amt: Duration) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_TCP_KEEPINTVL, ++ amt.as_secs() as c_long) ++ } ++ ++ /// Configures the scope for local IPv6 addresses. ++ /// ++ /// Sets the scope_id value to use when connecting to IPv6 or link-local ++ /// addresses. ++ /// ++ /// By default this value is 0 and corresponds to `CURLOPT_ADDRESS_SCOPE` ++ pub fn address_scope(&mut self, scope: u32) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_ADDRESS_SCOPE, ++ scope as c_long) ++ } ++ ++ // ========================================================================= ++ // Names and passwords ++ ++ /// Configures the username to pass as authentication for this connection. ++ /// ++ /// By default this value is not set and corresponds to `CURLOPT_USERNAME`. ++ pub fn username(&mut self, user: &str) -> Result<(), Error> { ++ let user = try!(CString::new(user)); ++ self.setopt_str(curl_sys::CURLOPT_USERNAME, &user) ++ } ++ ++ /// Configures the password to pass as authentication for this connection. ++ /// ++ /// By default this value is not set and corresponds to `CURLOPT_PASSWORD`. ++ pub fn password(&mut self, pass: &str) -> Result<(), Error> { ++ let pass = try!(CString::new(pass)); ++ self.setopt_str(curl_sys::CURLOPT_PASSWORD, &pass) ++ } ++ ++ /// Set HTTP server authentication methods to try ++ /// ++ /// If more than one method is set, libcurl will first query the site to see ++ /// which authentication methods it supports and then pick the best one you ++ /// allow it to use. For some methods, this will induce an extra network ++ /// round-trip. Set the actual name and password with the `password` and ++ /// `username` methods. ++ /// ++ /// For authentication with a proxy, see `proxy_auth`. ++ /// ++ /// By default this value is basic and corresponds to `CURLOPT_HTTPAUTH`. ++ pub fn http_auth(&mut self, auth: &Auth) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_HTTPAUTH, auth.bits) ++ } ++ ++ /// Configures the proxy username to pass as authentication for this ++ /// connection. ++ /// ++ /// By default this value is not set and corresponds to ++ /// `CURLOPT_PROXYUSERNAME`. ++ pub fn proxy_username(&mut self, user: &str) -> Result<(), Error> { ++ let user = try!(CString::new(user)); ++ self.setopt_str(curl_sys::CURLOPT_PROXYUSERNAME, &user) ++ } ++ ++ /// Configures the proxy password to pass as authentication for this ++ /// connection. ++ /// ++ /// By default this value is not set and corresponds to ++ /// `CURLOPT_PROXYPASSWORD`. ++ pub fn proxy_password(&mut self, pass: &str) -> Result<(), Error> { ++ let pass = try!(CString::new(pass)); ++ self.setopt_str(curl_sys::CURLOPT_PROXYPASSWORD, &pass) ++ } ++ ++ /// Set HTTP proxy authentication methods to try ++ /// ++ /// If more than one method is set, libcurl will first query the site to see ++ /// which authentication methods it supports and then pick the best one you ++ /// allow it to use. For some methods, this will induce an extra network ++ /// round-trip. Set the actual name and password with the `proxy_password` ++ /// and `proxy_username` methods. ++ /// ++ /// By default this value is basic and corresponds to `CURLOPT_PROXYAUTH`. ++ pub fn proxy_auth(&mut self, auth: &Auth) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_PROXYAUTH, auth.bits) ++ } ++ ++ /// Enable .netrc parsing ++ /// ++ /// By default the .netrc file is ignored and corresponds to `CURL_NETRC_IGNORED`. ++ pub fn netrc(&mut self, netrc: NetRc) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_NETRC, netrc as c_long) ++ } ++ ++ // ========================================================================= ++ // HTTP Options ++ ++ /// Indicates whether the referer header is automatically updated ++ /// ++ /// By default this option is `false` and corresponds to ++ /// `CURLOPT_AUTOREFERER`. ++ pub fn autoreferer(&mut self, enable: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_AUTOREFERER, enable as c_long) ++ } ++ ++ /// Enables automatic decompression of HTTP downloads. ++ /// ++ /// Sets the contents of the Accept-Encoding header sent in an HTTP request. ++ /// This enables decoding of a response with Content-Encoding. ++ /// ++ /// Currently supported encoding are `identity`, `zlib`, and `gzip`. A ++ /// zero-length string passed in will send all accepted encodings. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_ACCEPT_ENCODING`. ++ pub fn accept_encoding(&mut self, encoding: &str) -> Result<(), Error> { ++ let encoding = try!(CString::new(encoding)); ++ self.setopt_str(curl_sys::CURLOPT_ACCEPT_ENCODING, &encoding) ++ } ++ ++ /// Request the HTTP Transfer Encoding. ++ /// ++ /// By default this option is `false` and corresponds to ++ /// `CURLOPT_TRANSFER_ENCODING`. ++ pub fn transfer_encoding(&mut self, enable: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_TRANSFER_ENCODING, enable as c_long) ++ } ++ ++ /// Follow HTTP 3xx redirects. ++ /// ++ /// Indicates whether any `Location` headers in the response should get ++ /// followed. ++ /// ++ /// By default this option is `false` and corresponds to ++ /// `CURLOPT_FOLLOWLOCATION`. ++ pub fn follow_location(&mut self, enable: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_FOLLOWLOCATION, enable as c_long) ++ } ++ ++ /// Send credentials to hosts other than the first as well. ++ /// ++ /// Sends username/password credentials even when the host changes as part ++ /// of a redirect. ++ /// ++ /// By default this option is `false` and corresponds to ++ /// `CURLOPT_UNRESTRICTED_AUTH`. ++ pub fn unrestricted_auth(&mut self, enable: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_UNRESTRICTED_AUTH, enable as c_long) ++ } ++ ++ /// Set the maximum number of redirects allowed. ++ /// ++ /// A value of 0 will refuse any redirect. ++ /// ++ /// By default this option is `-1` (unlimited) and corresponds to ++ /// `CURLOPT_MAXREDIRS`. ++ pub fn max_redirections(&mut self, max: u32) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_MAXREDIRS, max as c_long) ++ } ++ ++ // TODO: post_redirections ++ ++ /// Make an HTTP PUT request. ++ /// ++ /// By default this option is `false` and corresponds to `CURLOPT_PUT`. ++ pub fn put(&mut self, enable: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_PUT, enable as c_long) ++ } ++ ++ /// Make an HTTP POST request. ++ /// ++ /// This will also make the library use the ++ /// `Content-Type: application/x-www-form-urlencoded` header. ++ /// ++ /// POST data can be specified through `post_fields` or by specifying a read ++ /// function. ++ /// ++ /// By default this option is `false` and corresponds to `CURLOPT_POST`. ++ pub fn post(&mut self, enable: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_POST, enable as c_long) ++ } ++ ++ /// Configures the data that will be uploaded as part of a POST. ++ /// ++ /// Note that the data is copied into this handle and if that's not desired ++ /// then the read callbacks can be used instead. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_COPYPOSTFIELDS`. ++ pub fn post_fields_copy(&mut self, data: &[u8]) -> Result<(), Error> { ++ // Set the length before the pointer so libcurl knows how much to read ++ try!(self.post_field_size(data.len() as u64)); ++ self.setopt_ptr(curl_sys::CURLOPT_COPYPOSTFIELDS, ++ data.as_ptr() as *const _) ++ } ++ ++ /// Configures the size of data that's going to be uploaded as part of a ++ /// POST operation. ++ /// ++ /// This is called automaticsally as part of `post_fields` and should only ++ /// be called if data is being provided in a read callback (and even then ++ /// it's optional). ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_POSTFIELDSIZE_LARGE`. ++ pub fn post_field_size(&mut self, size: u64) -> Result<(), Error> { ++ // Clear anything previous to ensure we don't read past a buffer ++ try!(self.setopt_ptr(curl_sys::CURLOPT_POSTFIELDS, 0 as *const _)); ++ self.setopt_off_t(curl_sys::CURLOPT_POSTFIELDSIZE_LARGE, ++ size as curl_sys::curl_off_t) ++ } ++ ++ /// Tells libcurl you want a multipart/formdata HTTP POST to be made and you ++ /// instruct what data to pass on to the server in the `form` argument. ++ /// ++ /// By default this option is set to null and corresponds to ++ /// `CURLOPT_HTTPPOST`. ++ pub fn httppost(&mut self, form: Form) -> Result<(), Error> { ++ try!(self.setopt_ptr(curl_sys::CURLOPT_HTTPPOST, ++ form::raw(&form) as *const _)); ++ self.inner.form = Some(form); ++ Ok(()) ++ } ++ ++ /// Sets the HTTP referer header ++ /// ++ /// By default this option is not set and corresponds to `CURLOPT_REFERER`. ++ pub fn referer(&mut self, referer: &str) -> Result<(), Error> { ++ let referer = try!(CString::new(referer)); ++ self.setopt_str(curl_sys::CURLOPT_REFERER, &referer) ++ } ++ ++ /// Sets the HTTP user-agent header ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_USERAGENT`. ++ pub fn useragent(&mut self, useragent: &str) -> Result<(), Error> { ++ let useragent = try!(CString::new(useragent)); ++ self.setopt_str(curl_sys::CURLOPT_USERAGENT, &useragent) ++ } ++ ++ /// Add some headers to this HTTP request. ++ /// ++ /// If you add a header that is otherwise used internally, the value here ++ /// takes precedence. If a header is added with no content (like `Accept:`) ++ /// the internally the header will get disabled. To add a header with no ++ /// content, use the form `MyHeader;` (not the trailing semicolon). ++ /// ++ /// Headers must not be CRLF terminated. Many replaced headers have common ++ /// shortcuts which should be prefered. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_HTTPHEADER` ++ /// ++ /// # Examples ++ /// ++ /// ``` ++ /// use curl::easy::{Easy, List}; ++ /// ++ /// let mut list = List::new(); ++ /// list.append("Foo: bar").unwrap(); ++ /// list.append("Bar: baz").unwrap(); ++ /// ++ /// let mut handle = Easy::new(); ++ /// handle.url("https://www.rust-lang.org/").unwrap(); ++ /// handle.http_headers(list).unwrap(); ++ /// handle.perform().unwrap(); ++ /// ``` ++ pub fn http_headers(&mut self, list: List) -> Result<(), Error> { ++ let ptr = list::raw(&list); ++ self.inner.header_list = Some(list); ++ self.setopt_ptr(curl_sys::CURLOPT_HTTPHEADER, ptr as *const _) ++ } ++ ++ // /// Add some headers to send to the HTTP proxy. ++ // /// ++ // /// This function is essentially the same as `http_headers`. ++ // /// ++ // /// By default this option is not set and corresponds to ++ // /// `CURLOPT_PROXYHEADER` ++ // pub fn proxy_headers(&mut self, list: &'a List) -> Result<(), Error> { ++ // self.setopt_ptr(curl_sys::CURLOPT_PROXYHEADER, list.raw as *const _) ++ // } ++ ++ /// Set the contents of the HTTP Cookie header. ++ /// ++ /// Pass a string of the form `name=contents` for one cookie value or ++ /// `name1=val1; name2=val2` for multiple values. ++ /// ++ /// Using this option multiple times will only make the latest string ++ /// override the previous ones. This option will not enable the cookie ++ /// engine, use `cookie_file` or `cookie_jar` to do that. ++ /// ++ /// By default this option is not set and corresponds to `CURLOPT_COOKIE`. ++ pub fn cookie(&mut self, cookie: &str) -> Result<(), Error> { ++ let cookie = try!(CString::new(cookie)); ++ self.setopt_str(curl_sys::CURLOPT_COOKIE, &cookie) ++ } ++ ++ /// Set the file name to read cookies from. ++ /// ++ /// The cookie data can be in either the old Netscape / Mozilla cookie data ++ /// format or just regular HTTP headers (Set-Cookie style) dumped to a file. ++ /// ++ /// This also enables the cookie engine, making libcurl parse and send ++ /// cookies on subsequent requests with this handle. ++ /// ++ /// Given an empty or non-existing file or by passing the empty string ("") ++ /// to this option, you can enable the cookie engine without reading any ++ /// initial cookies. ++ /// ++ /// If you use this option multiple times, you just add more files to read. ++ /// Subsequent files will add more cookies. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_COOKIEFILE`. ++ pub fn cookie_file>(&mut self, file: P) -> Result<(), Error> { ++ self.setopt_path(curl_sys::CURLOPT_COOKIEFILE, file.as_ref()) ++ } ++ ++ /// Set the file name to store cookies to. ++ /// ++ /// This will make libcurl write all internally known cookies to the file ++ /// when this handle is dropped. If no cookies are known, no file will be ++ /// created. Specify "-" as filename to instead have the cookies written to ++ /// stdout. Using this option also enables cookies for this session, so if ++ /// you for example follow a location it will make matching cookies get sent ++ /// accordingly. ++ /// ++ /// Note that libcurl doesn't read any cookies from the cookie jar. If you ++ /// want to read cookies from a file, use `cookie_file`. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_COOKIEJAR`. ++ pub fn cookie_jar>(&mut self, file: P) -> Result<(), Error> { ++ self.setopt_path(curl_sys::CURLOPT_COOKIEJAR, file.as_ref()) ++ } ++ ++ /// Start a new cookie session ++ /// ++ /// Marks this as a new cookie "session". It will force libcurl to ignore ++ /// all cookies it is about to load that are "session cookies" from the ++ /// previous session. By default, libcurl always stores and loads all ++ /// cookies, independent if they are session cookies or not. Session cookies ++ /// are cookies without expiry date and they are meant to be alive and ++ /// existing for this "session" only. ++ /// ++ /// By default this option is `false` and corresponds to ++ /// `CURLOPT_COOKIESESSION`. ++ pub fn cookie_session(&mut self, session: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_COOKIESESSION, session as c_long) ++ } ++ ++ /// Add to or manipulate cookies held in memory. ++ /// ++ /// Such a cookie can be either a single line in Netscape / Mozilla format ++ /// or just regular HTTP-style header (Set-Cookie: ...) format. This will ++ /// also enable the cookie engine. This adds that single cookie to the ++ /// internal cookie store. ++ /// ++ /// Exercise caution if you are using this option and multiple transfers may ++ /// occur. If you use the Set-Cookie format and don't specify a domain then ++ /// the cookie is sent for any domain (even after redirects are followed) ++ /// and cannot be modified by a server-set cookie. If a server sets a cookie ++ /// of the same name (or maybe you've imported one) then both will be sent ++ /// on a future transfer to that server, likely not what you intended. ++ /// address these issues set a domain in Set-Cookie or use the Netscape ++ /// format. ++ /// ++ /// Additionally, there are commands available that perform actions if you ++ /// pass in these exact strings: ++ /// ++ /// * "ALL" - erases all cookies held in memory ++ /// * "SESS" - erases all session cookies held in memory ++ /// * "FLUSH" - write all known cookies to the specified cookie jar ++ /// * "RELOAD" - reread all cookies from the cookie file ++ /// ++ /// By default this options corresponds to `CURLOPT_COOKIELIST` ++ pub fn cookie_list(&mut self, cookie: &str) -> Result<(), Error> { ++ let cookie = try!(CString::new(cookie)); ++ self.setopt_str(curl_sys::CURLOPT_COOKIELIST, &cookie) ++ } ++ ++ /// Ask for a HTTP GET request. ++ /// ++ /// By default this option is `false` and corresponds to `CURLOPT_HTTPGET`. ++ pub fn get(&mut self, enable: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_HTTPGET, enable as c_long) ++ } ++ ++ // /// Ask for a HTTP GET request. ++ // /// ++ // /// By default this option is `false` and corresponds to `CURLOPT_HTTPGET`. ++ // pub fn http_version(&mut self, vers: &str) -> Result<(), Error> { ++ // self.setopt_long(curl_sys::CURLOPT_HTTPGET, enable as c_long) ++ // } ++ ++ /// Ignore the content-length header. ++ /// ++ /// By default this option is `false` and corresponds to ++ /// `CURLOPT_IGNORE_CONTENT_LENGTH`. ++ pub fn ignore_content_length(&mut self, ignore: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_IGNORE_CONTENT_LENGTH, ++ ignore as c_long) ++ } ++ ++ /// Enable or disable HTTP content decoding. ++ /// ++ /// By default this option is `true` and corresponds to ++ /// `CURLOPT_HTTP_CONTENT_DECODING`. ++ pub fn http_content_decoding(&mut self, enable: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_HTTP_CONTENT_DECODING, ++ enable as c_long) ++ } ++ ++ /// Enable or disable HTTP transfer decoding. ++ /// ++ /// By default this option is `true` and corresponds to ++ /// `CURLOPT_HTTP_TRANSFER_DECODING`. ++ pub fn http_transfer_decoding(&mut self, enable: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_HTTP_TRANSFER_DECODING, ++ enable as c_long) ++ } ++ ++ // /// Timeout for the Expect: 100-continue response ++ // /// ++ // /// By default this option is 1s and corresponds to ++ // /// `CURLOPT_EXPECT_100_TIMEOUT_MS`. ++ // pub fn expect_100_timeout(&mut self, enable: bool) -> Result<(), Error> { ++ // self.setopt_long(curl_sys::CURLOPT_HTTP_TRANSFER_DECODING, ++ // enable as c_long) ++ // } ++ ++ // /// Wait for pipelining/multiplexing. ++ // /// ++ // /// Tells libcurl to prefer to wait for a connection to confirm or deny that ++ // /// it can do pipelining or multiplexing before continuing. ++ // /// ++ // /// When about to perform a new transfer that allows pipelining or ++ // /// multiplexing, libcurl will check for existing connections to re-use and ++ // /// pipeline on. If no such connection exists it will immediately continue ++ // /// and create a fresh new connection to use. ++ // /// ++ // /// By setting this option to `true` - having `pipeline` enabled for the ++ // /// multi handle this transfer is associated with - libcurl will instead ++ // /// wait for the connection to reveal if it is possible to ++ // /// pipeline/multiplex on before it continues. This enables libcurl to much ++ // /// better keep the number of connections to a minimum when using pipelining ++ // /// or multiplexing protocols. ++ // /// ++ // /// The effect thus becomes that with this option set, libcurl prefers to ++ // /// wait and re-use an existing connection for pipelining rather than the ++ // /// opposite: prefer to open a new connection rather than waiting. ++ // /// ++ // /// The waiting time is as long as it takes for the connection to get up and ++ // /// for libcurl to get the necessary response back that informs it about its ++ // /// protocol and support level. ++ // pub fn http_pipewait(&mut self, enable: bool) -> Result<(), Error> { ++ // } ++ ++ ++ // ========================================================================= ++ // Protocol Options ++ ++ /// Indicates the range that this request should retrieve. ++ /// ++ /// The string provided should be of the form `N-M` where either `N` or `M` ++ /// can be left out. For HTTP transfers multiple ranges separated by commas ++ /// are also accepted. ++ /// ++ /// By default this option is not set and corresponds to `CURLOPT_RANGE`. ++ pub fn range(&mut self, range: &str) -> Result<(), Error> { ++ let range = try!(CString::new(range)); ++ self.setopt_str(curl_sys::CURLOPT_RANGE, &range) ++ } ++ ++ /// Set a point to resume transfer from ++ /// ++ /// Specify the offset in bytes you want the transfer to start from. ++ /// ++ /// By default this option is 0 and corresponds to ++ /// `CURLOPT_RESUME_FROM_LARGE`. ++ pub fn resume_from(&mut self, from: u64) -> Result<(), Error> { ++ self.setopt_off_t(curl_sys::CURLOPT_RESUME_FROM_LARGE, ++ from as curl_sys::curl_off_t) ++ } ++ ++ /// Set a custom request string ++ /// ++ /// Specifies that a custom request will be made (e.g. a custom HTTP ++ /// method). This does not change how libcurl performs internally, just ++ /// changes the string sent to the server. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_CUSTOMREQUEST`. ++ pub fn custom_request(&mut self, request: &str) -> Result<(), Error> { ++ let request = try!(CString::new(request)); ++ self.setopt_str(curl_sys::CURLOPT_CUSTOMREQUEST, &request) ++ } ++ ++ /// Get the modification time of the remote resource ++ /// ++ /// If true, libcurl will attempt to get the modification time of the ++ /// remote document in this operation. This requires that the remote server ++ /// sends the time or replies to a time querying command. The `filetime` ++ /// function can be used after a transfer to extract the received time (if ++ /// any). ++ /// ++ /// By default this option is `false` and corresponds to `CURLOPT_FILETIME` ++ pub fn fetch_filetime(&mut self, fetch: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_FILETIME, fetch as c_long) ++ } ++ ++ /// Indicate whether to download the request without getting the body ++ /// ++ /// This is useful, for example, for doing a HEAD request. ++ /// ++ /// By default this option is `false` and corresponds to `CURLOPT_NOBODY`. ++ pub fn nobody(&mut self, enable: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_NOBODY, enable as c_long) ++ } ++ ++ /// Set the size of the input file to send off. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_INFILESIZE_LARGE`. ++ pub fn in_filesize(&mut self, size: u64) -> Result<(), Error> { ++ self.setopt_off_t(curl_sys::CURLOPT_INFILESIZE_LARGE, ++ size as curl_sys::curl_off_t) ++ } ++ ++ /// Enable or disable data upload. ++ /// ++ /// This means that a PUT request will be made for HTTP and probably wants ++ /// to be combined with the read callback as well as the `in_filesize` ++ /// method. ++ /// ++ /// By default this option is `false` and corresponds to `CURLOPT_UPLOAD`. ++ pub fn upload(&mut self, enable: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_UPLOAD, enable as c_long) ++ } ++ ++ /// Configure the maximum file size to download. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_MAXFILESIZE_LARGE`. ++ pub fn max_filesize(&mut self, size: u64) -> Result<(), Error> { ++ self.setopt_off_t(curl_sys::CURLOPT_MAXFILESIZE_LARGE, ++ size as curl_sys::curl_off_t) ++ } ++ ++ /// Selects a condition for a time request. ++ /// ++ /// This value indicates how the `time_value` option is interpreted. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_TIMECONDITION`. ++ pub fn time_condition(&mut self, cond: TimeCondition) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_TIMECONDITION, cond as c_long) ++ } ++ ++ /// Sets the time value for a conditional request. ++ /// ++ /// The value here should be the number of seconds elapsed since January 1, ++ /// 1970. To pass how to interpret this value, use `time_condition`. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_TIMEVALUE`. ++ pub fn time_value(&mut self, val: i64) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_TIMEVALUE, val as c_long) ++ } ++ ++ // ========================================================================= ++ // Connection Options ++ ++ /// Set maximum time the request is allowed to take. ++ /// ++ /// Normally, name lookups can take a considerable time and limiting ++ /// operations to less than a few minutes risk aborting perfectly normal ++ /// operations. ++ /// ++ /// If libcurl is built to use the standard system name resolver, that ++ /// portion of the transfer will still use full-second resolution for ++ /// timeouts with a minimum timeout allowed of one second. ++ /// ++ /// In unix-like systems, this might cause signals to be used unless ++ /// `nosignal` is set. ++ /// ++ /// Since this puts a hard limit for how long a request is allowed to ++ /// take, it has limited use in dynamic use cases with varying transfer ++ /// times. You are then advised to explore `low_speed_limit`, ++ /// `low_speed_time` or using `progress_function` to implement your own ++ /// timeout logic. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_TIMEOUT_MS`. ++ pub fn timeout(&mut self, timeout: Duration) -> Result<(), Error> { ++ // TODO: checked arithmetic and casts ++ // TODO: use CURLOPT_TIMEOUT if the timeout is too great ++ let ms = timeout.as_secs() * 1000 + ++ (timeout.subsec_nanos() / 1_000_000) as u64; ++ self.setopt_long(curl_sys::CURLOPT_TIMEOUT_MS, ms as c_long) ++ ++ } ++ ++ /// Set the low speed limit in bytes per second. ++ /// ++ /// This specifies the average transfer speed in bytes per second that the ++ /// transfer should be below during `low_speed_time` for libcurl to consider ++ /// it to be too slow and abort. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_LOW_SPEED_LIMIT`. ++ pub fn low_speed_limit(&mut self, limit: u32) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_LOW_SPEED_LIMIT, limit as c_long) ++ } ++ ++ /// Set the low speed time period. ++ /// ++ /// Specifies the window of time for which if the transfer rate is below ++ /// `low_speed_limit` the request will be aborted. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_LOW_SPEED_TIME`. ++ pub fn low_speed_time(&mut self, dur: Duration) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_LOW_SPEED_TIME, ++ dur.as_secs() as c_long) ++ } ++ ++ /// Rate limit data upload speed ++ /// ++ /// If an upload exceeds this speed (counted in bytes per second) on ++ /// cumulative average during the transfer, the transfer will pause to keep ++ /// the average rate less than or equal to the parameter value. ++ /// ++ /// By default this option is not set (unlimited speed) and corresponds to ++ /// `CURLOPT_MAX_SEND_SPEED_LARGE`. ++ pub fn max_send_speed(&mut self, speed: u64) -> Result<(), Error> { ++ self.setopt_off_t(curl_sys::CURLOPT_MAX_SEND_SPEED_LARGE, ++ speed as curl_sys::curl_off_t) ++ } ++ ++ /// Rate limit data download speed ++ /// ++ /// If a download exceeds this speed (counted in bytes per second) on ++ /// cumulative average during the transfer, the transfer will pause to keep ++ /// the average rate less than or equal to the parameter value. ++ /// ++ /// By default this option is not set (unlimited speed) and corresponds to ++ /// `CURLOPT_MAX_RECV_SPEED_LARGE`. ++ pub fn max_recv_speed(&mut self, speed: u64) -> Result<(), Error> { ++ self.setopt_off_t(curl_sys::CURLOPT_MAX_RECV_SPEED_LARGE, ++ speed as curl_sys::curl_off_t) ++ } ++ ++ /// Set the maximum connection cache size. ++ /// ++ /// The set amount will be the maximum number of simultaneously open ++ /// persistent connections that libcurl may cache in the pool associated ++ /// with this handle. The default is 5, and there isn't much point in ++ /// changing this value unless you are perfectly aware of how this works and ++ /// changes libcurl's behaviour. This concerns connections using any of the ++ /// protocols that support persistent connections. ++ /// ++ /// When reaching the maximum limit, curl closes the oldest one in the cache ++ /// to prevent increasing the number of open connections. ++ /// ++ /// By default this option is set to 5 and corresponds to ++ /// `CURLOPT_MAXCONNECTS` ++ pub fn max_connects(&mut self, max: u32) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_MAXCONNECTS, max as c_long) ++ } ++ ++ /// Force a new connection to be used. ++ /// ++ /// Makes the next transfer use a new (fresh) connection by force instead of ++ /// trying to re-use an existing one. This option should be used with ++ /// caution and only if you understand what it does as it may seriously ++ /// impact performance. ++ /// ++ /// By default this option is `false` and corresponds to ++ /// `CURLOPT_FRESH_CONNECT`. ++ pub fn fresh_connect(&mut self, enable: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_FRESH_CONNECT, enable as c_long) ++ } ++ ++ /// Make connection get closed at once after use. ++ /// ++ /// Makes libcurl explicitly close the connection when done with the ++ /// transfer. Normally, libcurl keeps all connections alive when done with ++ /// one transfer in case a succeeding one follows that can re-use them. ++ /// This option should be used with caution and only if you understand what ++ /// it does as it can seriously impact performance. ++ /// ++ /// By default this option is `false` and corresponds to ++ /// `CURLOPT_FORBID_REUSE`. ++ pub fn forbid_reuse(&mut self, enable: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_FORBID_REUSE, enable as c_long) ++ } ++ ++ /// Timeout for the connect phase ++ /// ++ /// This is the maximum time that you allow the connection phase to the ++ /// server to take. This only limits the connection phase, it has no impact ++ /// once it has connected. ++ /// ++ /// By default this value is 300 seconds and corresponds to ++ /// `CURLOPT_CONNECTTIMEOUT_MS`. ++ pub fn connect_timeout(&mut self, timeout: Duration) -> Result<(), Error> { ++ let ms = timeout.as_secs() * 1000 + ++ (timeout.subsec_nanos() / 1_000_000) as u64; ++ self.setopt_long(curl_sys::CURLOPT_CONNECTTIMEOUT_MS, ms as c_long) ++ } ++ ++ /// Specify which IP protocol version to use ++ /// ++ /// Allows an application to select what kind of IP addresses to use when ++ /// resolving host names. This is only interesting when using host names ++ /// that resolve addresses using more than one version of IP. ++ /// ++ /// By default this value is "any" and corresponds to `CURLOPT_IPRESOLVE`. ++ pub fn ip_resolve(&mut self, resolve: IpResolve) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_IPRESOLVE, resolve as c_long) ++ } ++ ++ /// Configure whether to stop when connected to target server ++ /// ++ /// When enabled it tells the library to perform all the required proxy ++ /// authentication and connection setup, but no data transfer, and then ++ /// return. ++ /// ++ /// The option can be used to simply test a connection to a server. ++ /// ++ /// By default this value is `false` and corresponds to ++ /// `CURLOPT_CONNECT_ONLY`. ++ pub fn connect_only(&mut self, enable: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_CONNECT_ONLY, enable as c_long) ++ } ++ ++ // /// Set interface to speak DNS over. ++ // /// ++ // /// Set the name of the network interface that the DNS resolver should bind ++ // /// to. This must be an interface name (not an address). ++ // /// ++ // /// By default this option is not set and corresponds to ++ // /// `CURLOPT_DNS_INTERFACE`. ++ // pub fn dns_interface(&mut self, interface: &str) -> Result<(), Error> { ++ // let interface = try!(CString::new(interface)); ++ // self.setopt_str(curl_sys::CURLOPT_DNS_INTERFACE, &interface) ++ // } ++ // ++ // /// IPv4 address to bind DNS resolves to ++ // /// ++ // /// Set the local IPv4 address that the resolver should bind to. The ++ // /// argument should be of type char * and contain a single numerical IPv4 ++ // /// address as a string. ++ // /// ++ // /// By default this option is not set and corresponds to ++ // /// `CURLOPT_DNS_LOCAL_IP4`. ++ // pub fn dns_local_ip4(&mut self, ip: &str) -> Result<(), Error> { ++ // let ip = try!(CString::new(ip)); ++ // self.setopt_str(curl_sys::CURLOPT_DNS_LOCAL_IP4, &ip) ++ // } ++ // ++ // /// IPv6 address to bind DNS resolves to ++ // /// ++ // /// Set the local IPv6 address that the resolver should bind to. The ++ // /// argument should be of type char * and contain a single numerical IPv6 ++ // /// address as a string. ++ // /// ++ // /// By default this option is not set and corresponds to ++ // /// `CURLOPT_DNS_LOCAL_IP6`. ++ // pub fn dns_local_ip6(&mut self, ip: &str) -> Result<(), Error> { ++ // let ip = try!(CString::new(ip)); ++ // self.setopt_str(curl_sys::CURLOPT_DNS_LOCAL_IP6, &ip) ++ // } ++ // ++ // /// Set preferred DNS servers. ++ // /// ++ // /// Provides a list of DNS servers to be used instead of the system default. ++ // /// The format of the dns servers option is: ++ // /// ++ // /// ```text ++ // /// host[:port],[host[:port]]... ++ // /// ``` ++ // /// ++ // /// By default this option is not set and corresponds to ++ // /// `CURLOPT_DNS_SERVERS`. ++ // pub fn dns_servers(&mut self, servers: &str) -> Result<(), Error> { ++ // let servers = try!(CString::new(servers)); ++ // self.setopt_str(curl_sys::CURLOPT_DNS_SERVERS, &servers) ++ // } ++ ++ // ========================================================================= ++ // SSL/Security Options ++ ++ /// Sets the SSL client certificate. ++ /// ++ /// The string should be the file name of your client certificate. The ++ /// default format is "P12" on Secure Transport and "PEM" on other engines, ++ /// and can be changed with `ssl_cert_type`. ++ /// ++ /// With NSS or Secure Transport, this can also be the nickname of the ++ /// certificate you wish to authenticate with as it is named in the security ++ /// database. If you want to use a file from the current directory, please ++ /// precede it with "./" prefix, in order to avoid confusion with a ++ /// nickname. ++ /// ++ /// When using a client certificate, you most likely also need to provide a ++ /// private key with `ssl_key`. ++ /// ++ /// By default this option is not set and corresponds to `CURLOPT_SSLCERT`. ++ pub fn ssl_cert>(&mut self, cert: P) -> Result<(), Error> { ++ self.setopt_path(curl_sys::CURLOPT_SSLCERT, cert.as_ref()) ++ } ++ ++ /// Specify type of the client SSL certificate. ++ /// ++ /// The string should be the format of your certificate. Supported formats ++ /// are "PEM" and "DER", except with Secure Transport. OpenSSL (versions ++ /// 0.9.3 and later) and Secure Transport (on iOS 5 or later, or OS X 10.7 ++ /// or later) also support "P12" for PKCS#12-encoded files. ++ /// ++ /// By default this option is "PEM" and corresponds to ++ /// `CURLOPT_SSLCERTTYPE`. ++ pub fn ssl_cert_type(&mut self, kind: &str) -> Result<(), Error> { ++ let kind = try!(CString::new(kind)); ++ self.setopt_str(curl_sys::CURLOPT_SSLCERTTYPE, &kind) ++ } ++ ++ /// Specify private keyfile for TLS and SSL client cert. ++ /// ++ /// The string should be the file name of your private key. The default ++ /// format is "PEM" and can be changed with `ssl_key_type`. ++ /// ++ /// (iOS and Mac OS X only) This option is ignored if curl was built against ++ /// Secure Transport. Secure Transport expects the private key to be already ++ /// present in the keychain or PKCS#12 file containing the certificate. ++ /// ++ /// By default this option is not set and corresponds to `CURLOPT_SSLKEY`. ++ pub fn ssl_key>(&mut self, key: P) -> Result<(), Error> { ++ self.setopt_path(curl_sys::CURLOPT_SSLKEY, key.as_ref()) ++ } ++ ++ /// Set type of the private key file. ++ /// ++ /// The string should be the format of your private key. Supported formats ++ /// are "PEM", "DER" and "ENG". ++ /// ++ /// The format "ENG" enables you to load the private key from a crypto ++ /// engine. In this case `ssl_key` is used as an identifier passed to ++ /// the engine. You have to set the crypto engine with `ssl_engine`. ++ /// "DER" format key file currently does not work because of a bug in ++ /// OpenSSL. ++ /// ++ /// By default this option is "PEM" and corresponds to ++ /// `CURLOPT_SSLKEYTYPE`. ++ pub fn ssl_key_type(&mut self, kind: &str) -> Result<(), Error> { ++ let kind = try!(CString::new(kind)); ++ self.setopt_str(curl_sys::CURLOPT_SSLKEYTYPE, &kind) ++ } ++ ++ /// Set passphrase to private key. ++ /// ++ /// This will be used as the password required to use the `ssl_key`. ++ /// You never needed a pass phrase to load a certificate but you need one to ++ /// load your private key. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_KEYPASSWD`. ++ pub fn key_password(&mut self, password: &str) -> Result<(), Error> { ++ let password = try!(CString::new(password)); ++ self.setopt_str(curl_sys::CURLOPT_KEYPASSWD, &password) ++ } ++ ++ /// Set the SSL engine identifier. ++ /// ++ /// This will be used as the identifier for the crypto engine you want to ++ /// use for your private key. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_SSLENGINE`. ++ pub fn ssl_engine(&mut self, engine: &str) -> Result<(), Error> { ++ let engine = try!(CString::new(engine)); ++ self.setopt_str(curl_sys::CURLOPT_SSLENGINE, &engine) ++ } ++ ++ /// Make this handle's SSL engine the default. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_SSLENGINE_DEFAULT`. ++ pub fn ssl_engine_default(&mut self, enable: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_SSLENGINE_DEFAULT, enable as c_long) ++ } ++ ++ // /// Enable TLS false start. ++ // /// ++ // /// This option determines whether libcurl should use false start during the ++ // /// TLS handshake. False start is a mode where a TLS client will start ++ // /// sending application data before verifying the server's Finished message, ++ // /// thus saving a round trip when performing a full handshake. ++ // /// ++ // /// By default this option is not set and corresponds to ++ // /// `CURLOPT_SSL_FALSESTARTE`. ++ // pub fn ssl_false_start(&mut self, enable: bool) -> Result<(), Error> { ++ // self.setopt_long(curl_sys::CURLOPT_SSLENGINE_DEFAULT, enable as c_long) ++ // } ++ ++ /// Set preferred HTTP version. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_HTTP_VERSION`. ++ pub fn http_version(&mut self, version: HttpVersion) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_HTTP_VERSION, version as c_long) ++ } ++ ++ /// Set preferred TLS/SSL version. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_SSLVERSION`. ++ pub fn ssl_version(&mut self, version: SslVersion) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_SSLVERSION, version as c_long) ++ } ++ ++ /// Verify the certificate's name against host. ++ /// ++ /// This should be disabled with great caution! It basically disables the ++ /// security features of SSL if it is disabled. ++ /// ++ /// By default this option is set to `true` and corresponds to ++ /// `CURLOPT_SSL_VERIFYHOST`. ++ pub fn ssl_verify_host(&mut self, verify: bool) -> Result<(), Error> { ++ let val = if verify {2} else {0}; ++ self.setopt_long(curl_sys::CURLOPT_SSL_VERIFYHOST, val) ++ } ++ ++ /// Verify the peer's SSL certificate. ++ /// ++ /// This should be disabled with great caution! It basically disables the ++ /// security features of SSL if it is disabled. ++ /// ++ /// By default this option is set to `true` and corresponds to ++ /// `CURLOPT_SSL_VERIFYPEER`. ++ pub fn ssl_verify_peer(&mut self, verify: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_SSL_VERIFYPEER, verify as c_long) ++ } ++ ++ // /// Verify the certificate's status. ++ // /// ++ // /// This option determines whether libcurl verifies the status of the server ++ // /// cert using the "Certificate Status Request" TLS extension (aka. OCSP ++ // /// stapling). ++ // /// ++ // /// By default this option is set to `false` and corresponds to ++ // /// `CURLOPT_SSL_VERIFYSTATUS`. ++ // pub fn ssl_verify_status(&mut self, verify: bool) -> Result<(), Error> { ++ // self.setopt_long(curl_sys::CURLOPT_SSL_VERIFYSTATUS, verify as c_long) ++ // } ++ ++ /// Specify the path to Certificate Authority (CA) bundle ++ /// ++ /// The file referenced should hold one or more certificates to verify the ++ /// peer with. ++ /// ++ /// This option is by default set to the system path where libcurl's cacert ++ /// bundle is assumed to be stored, as established at build time. ++ /// ++ /// If curl is built against the NSS SSL library, the NSS PEM PKCS#11 module ++ /// (libnsspem.so) needs to be available for this option to work properly. ++ /// ++ /// By default this option is the system defaults, and corresponds to ++ /// `CURLOPT_CAINFO`. ++ pub fn cainfo>(&mut self, path: P) -> Result<(), Error> { ++ self.setopt_path(curl_sys::CURLOPT_CAINFO, path.as_ref()) ++ } ++ ++ /// Set the issuer SSL certificate filename ++ /// ++ /// Specifies a file holding a CA certificate in PEM format. If the option ++ /// is set, an additional check against the peer certificate is performed to ++ /// verify the issuer is indeed the one associated with the certificate ++ /// provided by the option. This additional check is useful in multi-level ++ /// PKI where one needs to enforce that the peer certificate is from a ++ /// specific branch of the tree. ++ /// ++ /// This option makes sense only when used in combination with the ++ /// `ssl_verify_peer` option. Otherwise, the result of the check is not ++ /// considered as failure. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_ISSUERCERT`. ++ pub fn issuer_cert>(&mut self, path: P) -> Result<(), Error> { ++ self.setopt_path(curl_sys::CURLOPT_ISSUERCERT, path.as_ref()) ++ } ++ ++ /// Specify directory holding CA certificates ++ /// ++ /// Names a directory holding multiple CA certificates to verify the peer ++ /// with. If libcurl is built against OpenSSL, the certificate directory ++ /// must be prepared using the openssl c_rehash utility. This makes sense ++ /// only when used in combination with the `ssl_verify_peer` option. ++ /// ++ /// By default this option is not set and corresponds to `CURLOPT_CAPATH`. ++ pub fn capath>(&mut self, path: P) -> Result<(), Error> { ++ self.setopt_path(curl_sys::CURLOPT_CAPATH, path.as_ref()) ++ } ++ ++ /// Specify a Certificate Revocation List file ++ /// ++ /// Names a file with the concatenation of CRL (in PEM format) to use in the ++ /// certificate validation that occurs during the SSL exchange. ++ /// ++ /// When curl is built to use NSS or GnuTLS, there is no way to influence ++ /// the use of CRL passed to help in the verification process. When libcurl ++ /// is built with OpenSSL support, X509_V_FLAG_CRL_CHECK and ++ /// X509_V_FLAG_CRL_CHECK_ALL are both set, requiring CRL check against all ++ /// the elements of the certificate chain if a CRL file is passed. ++ /// ++ /// This option makes sense only when used in combination with the ++ /// `ssl_verify_peer` option. ++ /// ++ /// A specific error code (`is_ssl_crl_badfile`) is defined with the ++ /// option. It is returned when the SSL exchange fails because the CRL file ++ /// cannot be loaded. A failure in certificate verification due to a ++ /// revocation information found in the CRL does not trigger this specific ++ /// error. ++ /// ++ /// By default this option is not set and corresponds to `CURLOPT_CRLFILE`. ++ pub fn crlfile>(&mut self, path: P) -> Result<(), Error> { ++ self.setopt_path(curl_sys::CURLOPT_CRLFILE, path.as_ref()) ++ } ++ ++ /// Request SSL certificate information ++ /// ++ /// Enable libcurl's certificate chain info gatherer. With this enabled, ++ /// libcurl will extract lots of information and data about the certificates ++ /// in the certificate chain used in the SSL connection. ++ /// ++ /// By default this option is `false` and corresponds to ++ /// `CURLOPT_CERTINFO`. ++ pub fn certinfo(&mut self, enable: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_CERTINFO, enable as c_long) ++ } ++ ++ // /// Set pinned public key. ++ // /// ++ // /// Pass a pointer to a zero terminated string as parameter. The string can ++ // /// be the file name of your pinned public key. The file format expected is ++ // /// "PEM" or "DER". The string can also be any number of base64 encoded ++ // /// sha256 hashes preceded by "sha256//" and separated by ";" ++ // /// ++ // /// When negotiating a TLS or SSL connection, the server sends a certificate ++ // /// indicating its identity. A public key is extracted from this certificate ++ // /// and if it does not exactly match the public key provided to this option, ++ // /// curl will abort the connection before sending or receiving any data. ++ // /// ++ // /// By default this option is not set and corresponds to ++ // /// `CURLOPT_PINNEDPUBLICKEY`. ++ // pub fn pinned_public_key(&mut self, enable: bool) -> Result<(), Error> { ++ // self.setopt_long(curl_sys::CURLOPT_CERTINFO, enable as c_long) ++ // } ++ ++ /// Specify a source for random data ++ /// ++ /// The file will be used to read from to seed the random engine for SSL and ++ /// more. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_RANDOM_FILE`. ++ pub fn random_file>(&mut self, p: P) -> Result<(), Error> { ++ self.setopt_path(curl_sys::CURLOPT_RANDOM_FILE, p.as_ref()) ++ } ++ ++ /// Specify EGD socket path. ++ /// ++ /// Indicates the path name to the Entropy Gathering Daemon socket. It will ++ /// be used to seed the random engine for SSL. ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_EGDSOCKET`. ++ pub fn egd_socket>(&mut self, p: P) -> Result<(), Error> { ++ self.setopt_path(curl_sys::CURLOPT_EGDSOCKET, p.as_ref()) ++ } ++ ++ /// Specify ciphers to use for TLS. ++ /// ++ /// Holds the list of ciphers to use for the SSL connection. The list must ++ /// be syntactically correct, it consists of one or more cipher strings ++ /// separated by colons. Commas or spaces are also acceptable separators ++ /// but colons are normally used, !, - and + can be used as operators. ++ /// ++ /// For OpenSSL and GnuTLS valid examples of cipher lists include 'RC4-SHA', ++ /// ´SHA1+DES´, 'TLSv1' and 'DEFAULT'. The default list is normally set when ++ /// you compile OpenSSL. ++ /// ++ /// You'll find more details about cipher lists on this URL: ++ /// ++ /// https://www.openssl.org/docs/apps/ciphers.html ++ /// ++ /// For NSS, valid examples of cipher lists include 'rsa_rc4_128_md5', ++ /// ´rsa_aes_128_sha´, etc. With NSS you don't add/remove ciphers. If one ++ /// uses this option then all known ciphers are disabled and only those ++ /// passed in are enabled. ++ /// ++ /// You'll find more details about the NSS cipher lists on this URL: ++ /// ++ /// http://git.fedorahosted.org/cgit/mod_nss.git/plain/docs/mod_nss.html#Directives ++ /// ++ /// By default this option is not set and corresponds to ++ /// `CURLOPT_SSL_CIPHER_LIST`. ++ pub fn ssl_cipher_list(&mut self, ciphers: &str) -> Result<(), Error> { ++ let ciphers = try!(CString::new(ciphers)); ++ self.setopt_str(curl_sys::CURLOPT_SSL_CIPHER_LIST, &ciphers) ++ } ++ ++ /// Enable or disable use of the SSL session-ID cache ++ /// ++ /// By default all transfers are done using the cache enabled. While nothing ++ /// ever should get hurt by attempting to reuse SSL session-IDs, there seem ++ /// to be or have been broken SSL implementations in the wild that may ++ /// require you to disable this in order for you to succeed. ++ /// ++ /// This corresponds to the `CURLOPT_SSL_SESSIONID_CACHE` option. ++ pub fn ssl_sessionid_cache(&mut self, enable: bool) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_SSL_SESSIONID_CACHE, ++ enable as c_long) ++ } ++ ++ /// Set SSL behavior options ++ /// ++ /// Inform libcurl about SSL specific behaviors. ++ /// ++ /// This corresponds to the `CURLOPT_SSL_OPTIONS` option. ++ pub fn ssl_options(&mut self, bits: &SslOpt) -> Result<(), Error> { ++ self.setopt_long(curl_sys::CURLOPT_SSL_OPTIONS, bits.bits) ++ } ++ ++ // /// Set SSL behavior options for proxies ++ // /// ++ // /// Inform libcurl about SSL specific behaviors. ++ // /// ++ // /// This corresponds to the `CURLOPT_PROXY_SSL_OPTIONS` option. ++ // pub fn proxy_ssl_options(&mut self, bits: &SslOpt) -> Result<(), Error> { ++ // self.setopt_long(curl_sys::CURLOPT_PROXY_SSL_OPTIONS, bits.bits) ++ // } ++ ++ // /// Stores a private pointer-sized piece of data. ++ // /// ++ // /// This can be retrieved through the `private` function and otherwise ++ // /// libcurl does not tamper with this value. This corresponds to ++ // /// `CURLOPT_PRIVATE` and defaults to 0. ++ // pub fn set_private(&mut self, private: usize) -> Result<(), Error> { ++ // self.setopt_ptr(curl_sys::CURLOPT_PRIVATE, private as *const _) ++ // } ++ // ++ // /// Fetches this handle's private pointer-sized piece of data. ++ // /// ++ // /// This corresponds to `CURLINFO_PRIVATE` and defaults to 0. ++ // pub fn private(&mut self) -> Result { ++ // self.getopt_ptr(curl_sys::CURLINFO_PRIVATE).map(|p| p as usize) ++ // } ++ ++ // ========================================================================= ++ // getters ++ ++ /// Get the last used URL ++ /// ++ /// In cases when you've asked libcurl to follow redirects, it may ++ /// not be the same value you set with `url`. ++ /// ++ /// This methods corresponds to the `CURLINFO_EFFECTIVE_URL` option. ++ /// ++ /// Returns `Ok(None)` if no effective url is listed or `Err` if an error ++ /// happens or the underlying bytes aren't valid utf-8. ++ pub fn effective_url(&mut self) -> Result, Error> { ++ self.getopt_str(curl_sys::CURLINFO_EFFECTIVE_URL) ++ } ++ ++ /// Get the last used URL, in bytes ++ /// ++ /// In cases when you've asked libcurl to follow redirects, it may ++ /// not be the same value you set with `url`. ++ /// ++ /// This methods corresponds to the `CURLINFO_EFFECTIVE_URL` option. ++ /// ++ /// Returns `Ok(None)` if no effective url is listed or `Err` if an error ++ /// happens or the underlying bytes aren't valid utf-8. ++ pub fn effective_url_bytes(&mut self) -> Result, Error> { ++ self.getopt_bytes(curl_sys::CURLINFO_EFFECTIVE_URL) ++ } ++ ++ /// Get the last response code ++ /// ++ /// The stored value will be zero if no server response code has been ++ /// received. Note that a proxy's CONNECT response should be read with ++ /// `http_connectcode` and not this. ++ /// ++ /// Corresponds to `CURLINFO_RESPONSE_CODE` and returns an error if this ++ /// option is not supported. ++ pub fn response_code(&mut self) -> Result { ++ self.getopt_long(curl_sys::CURLINFO_RESPONSE_CODE).map(|c| c as u32) ++ } ++ ++ /// Get the CONNECT response code ++ /// ++ /// Returns the last received HTTP proxy response code to a CONNECT request. ++ /// The returned value will be zero if no such response code was available. ++ /// ++ /// Corresponds to `CURLINFO_HTTP_CONNECTCODE` and returns an error if this ++ /// option is not supported. ++ pub fn http_connectcode(&mut self) -> Result { ++ self.getopt_long(curl_sys::CURLINFO_HTTP_CONNECTCODE).map(|c| c as u32) ++ } ++ ++ /// Get the remote time of the retrieved document ++ /// ++ /// Returns the remote time of the retrieved document (in number of seconds ++ /// since 1 Jan 1970 in the GMT/UTC time zone). If you get `None`, it can be ++ /// because of many reasons (it might be unknown, the server might hide it ++ /// or the server doesn't support the command that tells document time etc) ++ /// and the time of the document is unknown. ++ /// ++ /// Note that you must tell the server to collect this information before ++ /// the transfer is made, by using the `filetime` method to ++ /// or you will unconditionally get a `None` back. ++ /// ++ /// This corresponds to `CURLINFO_FILETIME` and may return an error if the ++ /// option is not supported ++ pub fn filetime(&mut self) -> Result, Error> { ++ self.getopt_long(curl_sys::CURLINFO_FILETIME).map(|r| { ++ if r == -1 { ++ None ++ } else { ++ Some(r as i64) ++ } ++ }) ++ } ++ ++ /// Get total time of previous transfer ++ /// ++ /// Returns the total time for the previous transfer, ++ /// including name resolving, TCP connect etc. ++ /// ++ /// Corresponds to `CURLINFO_TOTAL_TIME` and may return an error if the ++ /// option isn't supported. ++ pub fn total_time(&mut self) -> Result { ++ self.getopt_double(curl_sys::CURLINFO_TOTAL_TIME) ++ .map(double_seconds_to_duration) ++ } ++ ++ /// Get the name lookup time ++ /// ++ /// Returns the total time from the start ++ /// until the name resolving was completed. ++ /// ++ /// Corresponds to `CURLINFO_NAMELOOKUP_TIME` and may return an error if the ++ /// option isn't supported. ++ pub fn namelookup_time(&mut self) -> Result { ++ self.getopt_double(curl_sys::CURLINFO_NAMELOOKUP_TIME) ++ .map(double_seconds_to_duration) ++ } ++ ++ /// Get the time until connect ++ /// ++ /// Returns the total time from the start ++ /// until the connection to the remote host (or proxy) was completed. ++ /// ++ /// Corresponds to `CURLINFO_CONNECT_TIME` and may return an error if the ++ /// option isn't supported. ++ pub fn connect_time(&mut self) -> Result { ++ self.getopt_double(curl_sys::CURLINFO_CONNECT_TIME) ++ .map(double_seconds_to_duration) ++ } ++ ++ /// Get the time until the SSL/SSH handshake is completed ++ /// ++ /// Returns the total time it took from the start until the SSL/SSH ++ /// connect/handshake to the remote host was completed. This time is most often ++ /// very near to the `pretransfer_time` time, except for cases such as ++ /// HTTP pipelining where the pretransfer time can be delayed due to waits in ++ /// line for the pipeline and more. ++ /// ++ /// Corresponds to `CURLINFO_APPCONNECT_TIME` and may return an error if the ++ /// option isn't supported. ++ pub fn appconnect_time(&mut self) -> Result { ++ self.getopt_double(curl_sys::CURLINFO_APPCONNECT_TIME) ++ .map(double_seconds_to_duration) ++ } ++ ++ /// Get the time until the file transfer start ++ /// ++ /// Returns the total time it took from the start until the file ++ /// transfer is just about to begin. This includes all pre-transfer commands ++ /// and negotiations that are specific to the particular protocol(s) involved. ++ /// It does not involve the sending of the protocol- specific request that ++ /// triggers a transfer. ++ /// ++ /// Corresponds to `CURLINFO_PRETRANSFER_TIME` and may return an error if the ++ /// option isn't supported. ++ pub fn pretransfer_time(&mut self) -> Result { ++ self.getopt_double(curl_sys::CURLINFO_PRETRANSFER_TIME) ++ .map(double_seconds_to_duration) ++ } ++ ++ /// Get the time until the first byte is received ++ /// ++ /// Returns the total time it took from the start until the first ++ /// byte is received by libcurl. This includes `pretransfer_time` and ++ /// also the time the server needs to calculate the result. ++ /// ++ /// Corresponds to `CURLINFO_STARTTRANSFER_TIME` and may return an error if the ++ /// option isn't supported. ++ pub fn starttransfer_time(&mut self) -> Result { ++ self.getopt_double(curl_sys::CURLINFO_STARTTRANSFER_TIME) ++ .map(double_seconds_to_duration) ++ } ++ ++ /// Get the time for all redirection steps ++ /// ++ /// Returns the total time it took for all redirection steps ++ /// include name lookup, connect, pretransfer and transfer before final ++ /// transaction was started. `redirect_time` contains the complete ++ /// execution time for multiple redirections. ++ /// ++ /// Corresponds to `CURLINFO_REDIRECT_TIME` and may return an error if the ++ /// option isn't supported. ++ pub fn redirect_time(&mut self) -> Result { ++ self.getopt_double(curl_sys::CURLINFO_REDIRECT_TIME) ++ .map(double_seconds_to_duration) ++ } ++ ++ /// Get the number of redirects ++ /// ++ /// Corresponds to `CURLINFO_REDIRECT_COUNT` and may return an error if the ++ /// option isn't supported. ++ pub fn redirect_count(&mut self) -> Result { ++ self.getopt_long(curl_sys::CURLINFO_REDIRECT_COUNT).map(|c| c as u32) ++ } ++ ++ /// Get the URL a redirect would go to ++ /// ++ /// Returns the URL a redirect would take you to if you would enable ++ /// `follow_location`. This can come very handy if you think using the ++ /// built-in libcurl redirect logic isn't good enough for you but you would ++ /// still prefer to avoid implementing all the magic of figuring out the new ++ /// URL. ++ /// ++ /// Corresponds to `CURLINFO_REDIRECT_URL` and may return an error if the ++ /// url isn't valid utf-8 or an error happens. ++ pub fn redirect_url(&mut self) -> Result, Error> { ++ self.getopt_str(curl_sys::CURLINFO_REDIRECT_URL) ++ } ++ ++ /// Get the URL a redirect would go to, in bytes ++ /// ++ /// Returns the URL a redirect would take you to if you would enable ++ /// `follow_location`. This can come very handy if you think using the ++ /// built-in libcurl redirect logic isn't good enough for you but you would ++ /// still prefer to avoid implementing all the magic of figuring out the new ++ /// URL. ++ /// ++ /// Corresponds to `CURLINFO_REDIRECT_URL` and may return an error. ++ pub fn redirect_url_bytes(&mut self) -> Result, Error> { ++ self.getopt_bytes(curl_sys::CURLINFO_REDIRECT_URL) ++ } ++ ++ /// Get size of retrieved headers ++ /// ++ /// Corresponds to `CURLINFO_HEADER_SIZE` and may return an error if the ++ /// option isn't supported. ++ pub fn header_size(&mut self) -> Result { ++ self.getopt_long(curl_sys::CURLINFO_HEADER_SIZE).map(|c| c as u64) ++ } ++ ++ /// Get size of sent request. ++ /// ++ /// Corresponds to `CURLINFO_REQUEST_SIZE` and may return an error if the ++ /// option isn't supported. ++ pub fn request_size(&mut self) -> Result { ++ self.getopt_long(curl_sys::CURLINFO_REQUEST_SIZE).map(|c| c as u64) ++ } ++ ++ /// Get Content-Type ++ /// ++ /// Returns the content-type of the downloaded object. This is the value ++ /// read from the Content-Type: field. If you get `None`, it means that the ++ /// server didn't send a valid Content-Type header or that the protocol ++ /// used doesn't support this. ++ /// ++ /// Corresponds to `CURLINFO_CONTENT_TYPE` and may return an error if the ++ /// option isn't supported. ++ pub fn content_type(&mut self) -> Result, Error> { ++ self.getopt_str(curl_sys::CURLINFO_CONTENT_TYPE) ++ } ++ ++ /// Get Content-Type, in bytes ++ /// ++ /// Returns the content-type of the downloaded object. This is the value ++ /// read from the Content-Type: field. If you get `None`, it means that the ++ /// server didn't send a valid Content-Type header or that the protocol ++ /// used doesn't support this. ++ /// ++ /// Corresponds to `CURLINFO_CONTENT_TYPE` and may return an error if the ++ /// option isn't supported. ++ pub fn content_type_bytes(&mut self) -> Result, Error> { ++ self.getopt_bytes(curl_sys::CURLINFO_CONTENT_TYPE) ++ } ++ ++ /// Get errno number from last connect failure. ++ /// ++ /// Note that the value is only set on failure, it is not reset upon a ++ /// successful operation. The number is OS and system specific. ++ /// ++ /// Corresponds to `CURLINFO_OS_ERRNO` and may return an error if the ++ /// option isn't supported. ++ pub fn os_errno(&mut self) -> Result { ++ self.getopt_long(curl_sys::CURLINFO_OS_ERRNO).map(|c| c as i32) ++ } ++ ++ /// Get IP address of last connection. ++ /// ++ /// Returns a string holding the IP address of the most recent connection ++ /// done with this curl handle. This string may be IPv6 when that is ++ /// enabled. ++ /// ++ /// Corresponds to `CURLINFO_PRIMARY_IP` and may return an error if the ++ /// option isn't supported. ++ pub fn primary_ip(&mut self) -> Result, Error> { ++ self.getopt_str(curl_sys::CURLINFO_PRIMARY_IP) ++ } ++ ++ /// Get the latest destination port number ++ /// ++ /// Corresponds to `CURLINFO_PRIMARY_PORT` and may return an error if the ++ /// option isn't supported. ++ pub fn primary_port(&mut self) -> Result { ++ self.getopt_long(curl_sys::CURLINFO_PRIMARY_PORT).map(|c| c as u16) ++ } ++ ++ /// Get local IP address of last connection ++ /// ++ /// Returns a string holding the IP address of the local end of most recent ++ /// connection done with this curl handle. This string may be IPv6 when that ++ /// is enabled. ++ /// ++ /// Corresponds to `CURLINFO_LOCAL_IP` and may return an error if the ++ /// option isn't supported. ++ pub fn local_ip(&mut self) -> Result, Error> { ++ self.getopt_str(curl_sys::CURLINFO_LOCAL_IP) ++ } ++ ++ /// Get the latest local port number ++ /// ++ /// Corresponds to `CURLINFO_LOCAL_PORT` and may return an error if the ++ /// option isn't supported. ++ pub fn local_port(&mut self) -> Result { ++ self.getopt_long(curl_sys::CURLINFO_LOCAL_PORT).map(|c| c as u16) ++ } ++ ++ /// Get all known cookies ++ /// ++ /// Returns a linked-list of all cookies cURL knows (expired ones, too). ++ /// ++ /// Corresponds to the `CURLINFO_COOKIELIST` option and may return an error ++ /// if the option isn't supported. ++ pub fn cookies(&mut self) -> Result { ++ unsafe { ++ let mut list = 0 as *mut _; ++ let rc = curl_sys::curl_easy_getinfo(self.inner.handle, ++ curl_sys::CURLINFO_COOKIELIST, ++ &mut list); ++ try!(self.cvt(rc)); ++ Ok(list::from_raw(list)) ++ } ++ } ++ ++ // ========================================================================= ++ // Other methods ++ ++ /// After options have been set, this will perform the transfer described by ++ /// the options. ++ /// ++ /// This performs the request in a synchronous fashion. This can be used ++ /// multiple times for one easy handle and libcurl will attempt to re-use ++ /// the same connection for all transfers. ++ /// ++ /// This method will preserve all options configured in this handle for the ++ /// next request, and if that is not desired then the options can be ++ /// manually reset or the `reset` method can be called. ++ /// ++ /// Note that this method takes `&self`, which is quite important! This ++ /// allows applications to close over the handle in various callbacks to ++ /// call methods like `unpause_write` and `unpause_read` while a transfer is ++ /// in progress. ++ pub fn perform(&self) -> Result<(), Error> { ++ let ret = unsafe { ++ self.cvt(curl_sys::curl_easy_perform(self.inner.handle)) ++ }; ++ panic::propagate(); ++ return ret ++ } ++ ++ /// Unpause reading on a connection. ++ /// ++ /// Using this function, you can explicitly unpause a connection that was ++ /// previously paused. ++ /// ++ /// A connection can be paused by letting the read or the write callbacks ++ /// return `ReadError::Pause` or `WriteError::Pause`. ++ /// ++ /// To unpause, you may for example call this from the progress callback ++ /// which gets called at least once per second, even if the connection is ++ /// paused. ++ /// ++ /// The chance is high that you will get your write callback called before ++ /// this function returns. ++ pub fn unpause_read(&self) -> Result<(), Error> { ++ unsafe { ++ let rc = curl_sys::curl_easy_pause(self.inner.handle, ++ curl_sys::CURLPAUSE_RECV_CONT); ++ self.cvt(rc) ++ } ++ } ++ ++ /// Unpause writing on a connection. ++ /// ++ /// Using this function, you can explicitly unpause a connection that was ++ /// previously paused. ++ /// ++ /// A connection can be paused by letting the read or the write callbacks ++ /// return `ReadError::Pause` or `WriteError::Pause`. A write callback that ++ /// returns pause signals to the library that it couldn't take care of any ++ /// data at all, and that data will then be delivered again to the callback ++ /// when the writing is later unpaused. ++ /// ++ /// To unpause, you may for example call this from the progress callback ++ /// which gets called at least once per second, even if the connection is ++ /// paused. ++ pub fn unpause_write(&self) -> Result<(), Error> { ++ unsafe { ++ let rc = curl_sys::curl_easy_pause(self.inner.handle, ++ curl_sys::CURLPAUSE_SEND_CONT); ++ self.cvt(rc) ++ } ++ } ++ ++ /// URL encodes a string `s` ++ pub fn url_encode(&mut self, s: &[u8]) -> String { ++ if s.len() == 0 { ++ return String::new() ++ } ++ unsafe { ++ let p = curl_sys::curl_easy_escape(self.inner.handle, ++ s.as_ptr() as *const _, ++ s.len() as c_int); ++ assert!(!p.is_null()); ++ let ret = str::from_utf8(CStr::from_ptr(p).to_bytes()).unwrap(); ++ let ret = String::from(ret); ++ curl_sys::curl_free(p as *mut _); ++ return ret ++ } ++ } ++ ++ /// URL decodes a string `s`, returning `None` if it fails ++ pub fn url_decode(&mut self, s: &str) -> Vec { ++ if s.len() == 0 { ++ return Vec::new(); ++ } ++ ++ // Work around https://curl.haxx.se/docs/adv_20130622.html, a bug where ++ // if the last few characters are a bad escape then curl will have a ++ // buffer overrun. ++ let mut iter = s.chars().rev(); ++ let orig_len = s.len(); ++ let mut data; ++ let mut s = s; ++ if iter.next() == Some('%') || ++ iter.next() == Some('%') || ++ iter.next() == Some('%') { ++ data = s.to_string(); ++ data.push(0u8 as char); ++ s = &data[..]; ++ } ++ unsafe { ++ let mut len = 0; ++ let p = curl_sys::curl_easy_unescape(self.inner.handle, ++ s.as_ptr() as *const _, ++ orig_len as c_int, ++ &mut len); ++ assert!(!p.is_null()); ++ let slice = slice::from_raw_parts(p as *const u8, len as usize); ++ let ret = slice.to_vec(); ++ curl_sys::curl_free(p as *mut _); ++ return ret ++ } ++ } ++ ++ // TODO: I don't think this is safe, you can drop this which has all the ++ // callback data and then the next is use-after-free ++ // ++ // /// Attempts to clone this handle, returning a new session handle with the ++ // /// same options set for this handle. ++ // /// ++ // /// Internal state info and things like persistent connections ccannot be ++ // /// transferred. ++ // /// ++ // /// # Errors ++ // /// ++ // /// If a new handle could not be allocated or another error happens, `None` ++ // /// is returned. ++ // pub fn try_clone<'b>(&mut self) -> Option> { ++ // unsafe { ++ // let handle = curl_sys::curl_easy_duphandle(self.handle); ++ // if handle.is_null() { ++ // None ++ // } else { ++ // Some(Easy { ++ // handle: handle, ++ // data: blank_data(), ++ // _marker: marker::PhantomData, ++ // }) ++ // } ++ // } ++ // } ++ ++ /// Receives data from a connected socket. ++ /// ++ /// Only useful after a successful `perform` with the `connect_only` option ++ /// set as well. ++ pub fn recv(&mut self, data: &mut [u8]) -> Result { ++ unsafe { ++ let mut n = 0; ++ let r = curl_sys::curl_easy_recv(self.inner.handle, ++ data.as_mut_ptr() as *mut _, ++ data.len(), ++ &mut n); ++ if r == curl_sys::CURLE_OK { ++ Ok(n) ++ } else { ++ Err(Error::new(r)) ++ } ++ } ++ } ++ ++ /// Sends data over the connected socket. ++ /// ++ /// Only useful after a successful `perform` with the `connect_only` option ++ /// set as well. ++ pub fn send(&mut self, data: &[u8]) -> Result { ++ unsafe { ++ let mut n = 0; ++ let rc = curl_sys::curl_easy_send(self.inner.handle, ++ data.as_ptr() as *const _, ++ data.len(), ++ &mut n); ++ try!(self.cvt(rc)); ++ Ok(n) ++ } ++ } ++ ++ /// Get a pointer to the raw underlying CURL handle. ++ pub fn raw(&self) -> *mut curl_sys::CURL { ++ self.inner.handle ++ } ++ ++ #[cfg(unix)] ++ fn setopt_path(&mut self, ++ opt: curl_sys::CURLoption, ++ val: &Path) -> Result<(), Error> { ++ use std::os::unix::prelude::*; ++ let s = try!(CString::new(val.as_os_str().as_bytes())); ++ self.setopt_str(opt, &s) ++ } ++ ++ #[cfg(windows)] ++ fn setopt_path(&mut self, ++ opt: curl_sys::CURLoption, ++ val: &Path) -> Result<(), Error> { ++ match val.to_str() { ++ Some(s) => self.setopt_str(opt, &try!(CString::new(s))), ++ None => Err(Error::new(curl_sys::CURLE_CONV_FAILED)), ++ } ++ } ++ ++ fn setopt_long(&mut self, ++ opt: curl_sys::CURLoption, ++ val: c_long) -> Result<(), Error> { ++ unsafe { ++ self.cvt(curl_sys::curl_easy_setopt(self.inner.handle, opt, val)) ++ } ++ } ++ ++ fn setopt_str(&mut self, ++ opt: curl_sys::CURLoption, ++ val: &CStr) -> Result<(), Error> { ++ self.setopt_ptr(opt, val.as_ptr()) ++ } ++ ++ fn setopt_ptr(&self, ++ opt: curl_sys::CURLoption, ++ val: *const c_char) -> Result<(), Error> { ++ unsafe { ++ self.cvt(curl_sys::curl_easy_setopt(self.inner.handle, opt, val)) ++ } ++ } ++ ++ fn setopt_off_t(&mut self, ++ opt: curl_sys::CURLoption, ++ val: curl_sys::curl_off_t) -> Result<(), Error> { ++ unsafe { ++ let rc = curl_sys::curl_easy_setopt(self.inner.handle, opt, val); ++ self.cvt(rc) ++ } ++ } ++ ++ fn getopt_bytes(&mut self, opt: curl_sys::CURLINFO) ++ -> Result, Error> { ++ unsafe { ++ let p = try!(self.getopt_ptr(opt)); ++ if p.is_null() { ++ Ok(None) ++ } else { ++ Ok(Some(CStr::from_ptr(p).to_bytes())) ++ } ++ } ++ } ++ ++ fn getopt_ptr(&mut self, opt: curl_sys::CURLINFO) ++ -> Result<*const c_char, Error> { ++ unsafe { ++ let mut p = 0 as *const c_char; ++ let rc = curl_sys::curl_easy_getinfo(self.inner.handle, opt, &mut p); ++ try!(self.cvt(rc)); ++ Ok(p) ++ } ++ } ++ ++ fn getopt_str(&mut self, opt: curl_sys::CURLINFO) ++ -> Result, Error> { ++ match self.getopt_bytes(opt) { ++ Ok(None) => Ok(None), ++ Err(e) => Err(e), ++ Ok(Some(bytes)) => { ++ match str::from_utf8(bytes) { ++ Ok(s) => Ok(Some(s)), ++ Err(_) => Err(Error::new(curl_sys::CURLE_CONV_FAILED)), ++ } ++ } ++ } ++ } ++ ++ fn getopt_long(&mut self, opt: curl_sys::CURLINFO) -> Result { ++ unsafe { ++ let mut p = 0; ++ let rc = curl_sys::curl_easy_getinfo(self.inner.handle, opt, &mut p); ++ try!(self.cvt(rc)); ++ Ok(p) ++ } ++ } ++ ++ fn getopt_double(&mut self, opt: curl_sys::CURLINFO) -> Result { ++ unsafe { ++ let mut p = 0 as c_double; ++ let rc = curl_sys::curl_easy_getinfo(self.inner.handle, opt, &mut p); ++ try!(self.cvt(rc)); ++ Ok(p) ++ } ++ } ++ ++ fn cvt(&self, rc: curl_sys::CURLcode) -> Result<(), Error> { ++ if rc == curl_sys::CURLE_OK { ++ return Ok(()) ++ } ++ let mut buf = self.inner.error_buf.borrow_mut(); ++ if buf[0] == 0 { ++ return Err(Error::new(rc)) ++ } ++ let pos = buf.iter().position(|i| *i == 0).unwrap_or(buf.len()); ++ let msg = String::from_utf8_lossy(&buf[..pos]).into_owned(); ++ buf[0] = 0; ++ Err(::error::error_with_extra(rc, msg.into_boxed_str())) ++ } ++} ++ ++impl fmt::Debug for Easy2 { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ f.debug_struct("Easy") ++ .field("handle", &self.inner.handle) ++ .field("handler", &self.inner.handle) ++ .finish() ++ } ++} ++ ++impl Drop for Easy2 { ++ fn drop(&mut self) { ++ unsafe { ++ curl_sys::curl_easy_cleanup(self.inner.handle); ++ } ++ } ++} ++ ++extern fn header_cb(buffer: *mut c_char, ++ size: size_t, ++ nitems: size_t, ++ userptr: *mut c_void) -> size_t { ++ let keep_going = panic::catch(|| unsafe { ++ let data = slice::from_raw_parts(buffer as *const u8, ++ size * nitems); ++ (*(userptr as *mut Inner)).handler.header(data) ++ }).unwrap_or(false); ++ if keep_going { ++ size * nitems ++ } else { ++ !0 ++ } ++} ++ ++extern fn write_cb(ptr: *mut c_char, ++ size: size_t, ++ nmemb: size_t, ++ data: *mut c_void) -> size_t { ++ panic::catch(|| unsafe { ++ let input = slice::from_raw_parts(ptr as *const u8, ++ size * nmemb); ++ match (*(data as *mut Inner)).handler.write(input) { ++ Ok(s) => s, ++ Err(WriteError::Pause) | ++ Err(WriteError::__Nonexhaustive) => curl_sys::CURL_WRITEFUNC_PAUSE, ++ } ++ }).unwrap_or(!0) ++} ++ ++extern fn read_cb(ptr: *mut c_char, ++ size: size_t, ++ nmemb: size_t, ++ data: *mut c_void) -> size_t { ++ panic::catch(|| unsafe { ++ let input = slice::from_raw_parts_mut(ptr as *mut u8, ++ size * nmemb); ++ match (*(data as *mut Inner)).handler.read(input) { ++ Ok(s) => s, ++ Err(ReadError::Pause) => { ++ curl_sys::CURL_READFUNC_PAUSE ++ } ++ Err(ReadError::__Nonexhaustive) | ++ Err(ReadError::Abort) => { ++ curl_sys::CURL_READFUNC_ABORT ++ } ++ } ++ }).unwrap_or(!0) ++} ++ ++extern fn seek_cb(data: *mut c_void, ++ offset: curl_sys::curl_off_t, ++ origin: c_int) -> c_int { ++ panic::catch(|| unsafe { ++ let from = if origin == libc::SEEK_SET { ++ SeekFrom::Start(offset as u64) ++ } else { ++ panic!("unknown origin from libcurl: {}", origin); ++ }; ++ (*(data as *mut Inner)).handler.seek(from) as c_int ++ }).unwrap_or(!0) ++} ++ ++extern fn progress_cb(data: *mut c_void, ++ dltotal: c_double, ++ dlnow: c_double, ++ ultotal: c_double, ++ ulnow: c_double) -> c_int { ++ let keep_going = panic::catch(|| unsafe { ++ (*(data as *mut Inner)).handler.progress(dltotal, dlnow, ultotal, ulnow) ++ }).unwrap_or(false); ++ if keep_going { ++ 0 ++ } else { ++ 1 ++ } ++} ++ ++// TODO: expose `handle`? is that safe? ++extern fn debug_cb(_handle: *mut curl_sys::CURL, ++ kind: curl_sys::curl_infotype, ++ data: *mut c_char, ++ size: size_t, ++ userptr: *mut c_void) -> c_int { ++ panic::catch(|| unsafe { ++ let data = slice::from_raw_parts(data as *const u8, size); ++ let kind = match kind { ++ curl_sys::CURLINFO_TEXT => InfoType::Text, ++ curl_sys::CURLINFO_HEADER_IN => InfoType::HeaderIn, ++ curl_sys::CURLINFO_HEADER_OUT => InfoType::HeaderOut, ++ curl_sys::CURLINFO_DATA_IN => InfoType::DataIn, ++ curl_sys::CURLINFO_DATA_OUT => InfoType::DataOut, ++ curl_sys::CURLINFO_SSL_DATA_IN => InfoType::SslDataIn, ++ curl_sys::CURLINFO_SSL_DATA_OUT => InfoType::SslDataOut, ++ _ => return, ++ }; ++ (*(userptr as *mut Inner)).handler.debug(kind, data) ++ }); ++ return 0 ++} ++ ++extern fn ssl_ctx_cb(_handle: *mut curl_sys::CURL, ++ ssl_ctx: *mut c_void, ++ data: *mut c_void) -> curl_sys::CURLcode { ++ let res = panic::catch(|| unsafe { ++ match (*(data as *mut Inner)).handler.ssl_ctx(ssl_ctx) { ++ Ok(()) => curl_sys::CURLE_OK, ++ Err(e) => e.code(), ++ } ++ }); ++ // Default to a generic SSL error in case of panic. This ++ // shouldn't really matter since the error should be ++ // propagated later on but better safe than sorry... ++ res.unwrap_or(curl_sys::CURLE_SSL_CONNECT_ERROR) ++} ++ ++// TODO: expose `purpose` and `sockaddr` inside of `address` ++extern fn opensocket_cb(data: *mut c_void, ++ _purpose: curl_sys::curlsocktype, ++ address: *mut curl_sys::curl_sockaddr) ++ -> curl_sys::curl_socket_t ++{ ++ let res = panic::catch(|| unsafe { ++ (*(data as *mut Inner)).handler.open_socket((*address).family, ++ (*address).socktype, ++ (*address).protocol) ++ .unwrap_or(curl_sys::CURL_SOCKET_BAD) ++ }); ++ res.unwrap_or(curl_sys::CURL_SOCKET_BAD) ++} ++ ++fn double_seconds_to_duration(seconds: f64) -> Duration { ++ let whole_seconds = seconds.trunc() as u64; ++ let nanos = seconds.fract() * 1_000_000_000f64; ++ Duration::new(whole_seconds, nanos as u32) ++} ++ ++#[test] ++fn double_seconds_to_duration_whole_second() { ++ let dur = double_seconds_to_duration(1.0); ++ assert_eq!(dur.as_secs(), 1); ++ assert_eq!(dur.subsec_nanos(), 0); ++} ++ ++#[test] ++fn double_seconds_to_duration_sub_second1() { ++ let dur = double_seconds_to_duration(0.0); ++ assert_eq!(dur.as_secs(), 0); ++ assert_eq!(dur.subsec_nanos(), 0); ++} ++ ++#[test] ++fn double_seconds_to_duration_sub_second2() { ++ let dur = double_seconds_to_duration(0.5); ++ assert_eq!(dur.as_secs(), 0); ++ assert_eq!(dur.subsec_nanos(), 500_000_000); ++} ++ ++impl Auth { ++ /// Creates a new set of authentications with no members. ++ /// ++ /// An `Auth` structure is used to configure which forms of authentication ++ /// are attempted when negotiating connections with servers. ++ pub fn new() -> Auth { ++ Auth { bits: 0 } ++ } ++ ++ /// HTTP Basic authentication. ++ /// ++ /// This is the default choice, and the only method that is in wide-spread ++ /// use and supported virtually everywhere. This sends the user name and ++ /// password over the network in plain text, easily captured by others. ++ pub fn basic(&mut self, on: bool) -> &mut Auth { ++ self.flag(curl_sys::CURLAUTH_BASIC, on) ++ } ++ ++ /// HTTP Digest authentication. ++ /// ++ /// Digest authentication is defined in RFC 2617 and is a more secure way to ++ /// do authentication over public networks than the regular old-fashioned ++ /// Basic method. ++ pub fn digest(&mut self, on: bool) -> &mut Auth { ++ self.flag(curl_sys::CURLAUTH_DIGEST, on) ++ } ++ ++ /// HTTP Digest authentication with an IE flavor. ++ /// ++ /// Digest authentication is defined in RFC 2617 and is a more secure way to ++ /// do authentication over public networks than the regular old-fashioned ++ /// Basic method. The IE flavor is simply that libcurl will use a special ++ /// "quirk" that IE is known to have used before version 7 and that some ++ /// servers require the client to use. ++ pub fn digest_ie(&mut self, on: bool) -> &mut Auth { ++ self.flag(curl_sys::CURLAUTH_DIGEST_IE, on) ++ } ++ ++ /// HTTP Negotiate (SPNEGO) authentication. ++ /// ++ /// Negotiate authentication is defined in RFC 4559 and is the most secure ++ /// way to perform authentication over HTTP. ++ /// ++ /// You need to build libcurl with a suitable GSS-API library or SSPI on ++ /// Windows for this to work. ++ pub fn gssnegotiate(&mut self, on: bool) -> &mut Auth { ++ self.flag(curl_sys::CURLAUTH_GSSNEGOTIATE, on) ++ } ++ ++ /// HTTP NTLM authentication. ++ /// ++ /// A proprietary protocol invented and used by Microsoft. It uses a ++ /// challenge-response and hash concept similar to Digest, to prevent the ++ /// password from being eavesdropped. ++ /// ++ /// You need to build libcurl with either OpenSSL, GnuTLS or NSS support for ++ /// this option to work, or build libcurl on Windows with SSPI support. ++ pub fn ntlm(&mut self, on: bool) -> &mut Auth { ++ self.flag(curl_sys::CURLAUTH_NTLM, on) ++ } ++ ++ /// NTLM delegating to winbind helper. ++ /// ++ /// Authentication is performed by a separate binary application that is ++ /// executed when needed. The name of the application is specified at ++ /// compile time but is typically /usr/bin/ntlm_auth ++ /// ++ /// Note that libcurl will fork when necessary to run the winbind ++ /// application and kill it when complete, calling waitpid() to await its ++ /// exit when done. On POSIX operating systems, killing the process will ++ /// cause a SIGCHLD signal to be raised (regardless of whether ++ /// CURLOPT_NOSIGNAL is set), which must be handled intelligently by the ++ /// application. In particular, the application must not unconditionally ++ /// call wait() in its SIGCHLD signal handler to avoid being subject to a ++ /// race condition. This behavior is subject to change in future versions of ++ /// libcurl. ++ /// ++ /// A proprietary protocol invented and used by Microsoft. It uses a ++ /// challenge-response and hash concept similar to Digest, to prevent the ++ /// password from being eavesdropped. ++ pub fn ntlm_wb(&mut self, on: bool) -> &mut Auth { ++ self.flag(curl_sys::CURLAUTH_NTLM_WB, on) ++ } ++ ++ fn flag(&mut self, bit: c_ulong, on: bool) -> &mut Auth { ++ if on { ++ self.bits |= bit as c_long; ++ } else { ++ self.bits &= !bit as c_long; ++ } ++ self ++ } ++} ++ ++impl fmt::Debug for Auth { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ let bits = self.bits as c_ulong; ++ f.debug_struct("Auth") ++ .field("basic", &(bits & curl_sys::CURLAUTH_BASIC != 0)) ++ .field("digest", &(bits & curl_sys::CURLAUTH_DIGEST != 0)) ++ .field("digest_ie", &(bits & curl_sys::CURLAUTH_DIGEST_IE != 0)) ++ .field("gssnegotiate", &(bits & curl_sys::CURLAUTH_GSSNEGOTIATE != 0)) ++ .field("ntlm", &(bits & curl_sys::CURLAUTH_NTLM != 0)) ++ .field("ntlm_wb", &(bits & curl_sys::CURLAUTH_NTLM_WB != 0)) ++ .finish() ++ } ++} ++ ++impl SslOpt { ++ /// Creates a new set of SSL options. ++ pub fn new() -> SslOpt { ++ SslOpt { bits: 0 } ++ } ++ ++ /// Tells libcurl to disable certificate revocation checks for those SSL ++ /// backends where such behavior is present. ++ /// ++ /// Currently this option is only supported for WinSSL (the native Windows ++ /// SSL library), with an exception in the case of Windows' Untrusted ++ /// Publishers blacklist which it seems can't be bypassed. This option may ++ /// have broader support to accommodate other SSL backends in the future. ++ /// https://curl.haxx.se/docs/ssl-compared.html ++ pub fn no_revoke(&mut self, on: bool) -> &mut SslOpt { ++ self.flag(curl_sys::CURLSSLOPT_NO_REVOKE, on) ++ } ++ ++ /// Tells libcurl to not attempt to use any workarounds for a security flaw ++ /// in the SSL3 and TLS1.0 protocols. ++ /// ++ /// If this option isn't used or this bit is set to 0, the SSL layer libcurl ++ /// uses may use a work-around for this flaw although it might cause ++ /// interoperability problems with some (older) SSL implementations. ++ /// ++ /// > WARNING: avoiding this work-around lessens the security, and by ++ /// > setting this option to 1 you ask for exactly that. This option is only ++ /// > supported for DarwinSSL, NSS and OpenSSL. ++ pub fn allow_beast(&mut self, on: bool) -> &mut SslOpt { ++ self.flag(curl_sys::CURLSSLOPT_ALLOW_BEAST, on) ++ } ++ ++ fn flag(&mut self, bit: c_long, on: bool) -> &mut SslOpt { ++ if on { ++ self.bits |= bit as c_long; ++ } else { ++ self.bits &= !bit as c_long; ++ } ++ self ++ } ++} ++ ++impl fmt::Debug for SslOpt { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ f.debug_struct("SslOpt") ++ .field("no_revoke", &(self.bits & curl_sys::CURLSSLOPT_NO_REVOKE != 0)) ++ .field("allow_beast", &(self.bits & curl_sys::CURLSSLOPT_ALLOW_BEAST != 0)) ++ .finish() ++ } ++} diff --cc vendor/curl-0.4.8/src/easy/list.rs index 000000000,000000000..732d00639 new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/src/easy/list.rs @@@ -1,0 -1,0 +1,99 @@@ ++use std::ffi::{CStr, CString}; ++use std::fmt; ++ ++use curl_sys; ++use Error; ++ ++/// A linked list of a strings ++pub struct List { ++ raw: *mut curl_sys::curl_slist, ++} ++ ++/// An iterator over `List` ++#[derive(Clone)] ++pub struct Iter<'a> { ++ _me: &'a List, ++ cur: *mut curl_sys::curl_slist, ++} ++ ++pub fn raw(list: &List) -> *mut curl_sys::curl_slist { ++ list.raw ++} ++ ++pub unsafe fn from_raw(raw: *mut curl_sys::curl_slist) -> List { ++ List { raw: raw } ++} ++ ++unsafe impl Send for List {} ++ ++impl List { ++ /// Creates a new empty list of strings. ++ pub fn new() -> List { ++ List { raw: 0 as *mut _ } ++ } ++ ++ /// Appends some data into this list. ++ pub fn append(&mut self, data: &str) -> Result<(), Error> { ++ let data = try!(CString::new(data)); ++ unsafe { ++ let raw = curl_sys::curl_slist_append(self.raw, data.as_ptr()); ++ assert!(!raw.is_null()); ++ self.raw = raw; ++ Ok(()) ++ } ++ } ++ ++ /// Returns an iterator over the nodes in this list. ++ pub fn iter(&self) -> Iter { ++ Iter { _me: self, cur: self.raw } ++ } ++} ++ ++impl fmt::Debug for List { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ f.debug_list() ++ .entries(self.iter().map(String::from_utf8_lossy)) ++ .finish() ++ } ++} ++ ++impl<'a> IntoIterator for &'a List { ++ type IntoIter = Iter<'a>; ++ type Item = &'a [u8]; ++ ++ fn into_iter(self) -> Iter<'a> { ++ self.iter() ++ } ++} ++ ++impl Drop for List { ++ fn drop(&mut self) { ++ unsafe { ++ curl_sys::curl_slist_free_all(self.raw) ++ } ++ } ++} ++ ++impl<'a> Iterator for Iter<'a> { ++ type Item = &'a [u8]; ++ ++ fn next(&mut self) -> Option<&'a [u8]> { ++ if self.cur.is_null() { ++ return None ++ } ++ ++ unsafe { ++ let ret = Some(CStr::from_ptr((*self.cur).data).to_bytes()); ++ self.cur = (*self.cur).next; ++ return ret ++ } ++ } ++} ++ ++impl<'a> fmt::Debug for Iter<'a> { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ f.debug_list() ++ .entries(self.clone().map(String::from_utf8_lossy)) ++ .finish() ++ } ++} diff --cc vendor/curl-0.4.8/src/easy/mod.rs index 000000000,000000000..a13df134e new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/src/easy/mod.rs @@@ -1,0 -1,0 +1,21 @@@ ++//! Bindings to the "easy" libcurl API. ++//! ++//! This module contains some simple types like `Easy` and `List` which are just ++//! wrappers around the corresponding libcurl types. There's also a few enums ++//! scattered about for various options here and there. ++//! ++//! Most simple usage of libcurl will likely use the `Easy` structure here, and ++//! you can find more docs about its usage on that struct. ++ ++mod list; ++mod form; ++mod handle; ++mod handler; ++ ++pub use self::list::{List, Iter}; ++pub use self::form::{Form, Part}; ++pub use self::handle::{Easy, Transfer}; ++pub use self::handler::{Easy2, Handler}; ++pub use self::handler::{InfoType, SeekResult, ReadError, WriteError}; ++pub use self::handler::{TimeCondition, IpResolve, HttpVersion, SslVersion}; ++pub use self::handler::{SslOpt, NetRc, Auth, ProxyType}; diff --cc vendor/curl-0.4.8/src/error.rs index 000000000,000000000..8dacc2b49 new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/src/error.rs @@@ -1,0 -1,0 +1,598 @@@ ++use std::error; ++use std::ffi::{self, CStr}; ++use std::fmt; ++use std::str; ++use std::io; ++ ++use curl_sys; ++ ++/// An error returned from various "easy" operations. ++/// ++/// This structure wraps a `CURLcode`. ++#[derive(Clone, PartialEq)] ++pub struct Error { ++ code: curl_sys::CURLcode, ++ extra: Option>, ++} ++ ++pub fn error_with_extra(code: curl_sys::CURLcode, extra: Box) -> Error { ++ Error { ++ code: code, ++ extra: Some(extra), ++ } ++} ++ ++impl Error { ++ /// Creates a new error from the underlying code returned by libcurl. ++ pub fn new(code: curl_sys::CURLcode) -> Error { ++ Error { ++ code: code, ++ extra: None, ++ } ++ } ++ ++ /// Returns whether this error corresponds to CURLE_UNSUPPORTED_PROTOCOL. ++ pub fn is_unsupported_protocol(&self) -> bool { ++ self.code == curl_sys::CURLE_UNSUPPORTED_PROTOCOL ++ } ++ ++ /// Returns whether this error corresponds to CURLE_FAILED_INIT. ++ pub fn is_failed_init(&self) -> bool { ++ self.code == curl_sys::CURLE_FAILED_INIT ++ } ++ ++ /// Returns whether this error corresponds to CURLE_URL_MALFORMAT. ++ pub fn is_url_malformed(&self) -> bool { ++ self.code == curl_sys::CURLE_URL_MALFORMAT ++ } ++ ++ // /// Returns whether this error corresponds to CURLE_NOT_BUILT_IN. ++ // pub fn is_not_built_in(&self) -> bool { ++ // self.code == curl_sys::CURLE_NOT_BUILT_IN ++ // } ++ ++ /// Returns whether this error corresponds to CURLE_COULDNT_RESOLVE_PROXY. ++ pub fn is_couldnt_resolve_proxy(&self) -> bool { ++ self.code == curl_sys::CURLE_COULDNT_RESOLVE_PROXY ++ } ++ ++ /// Returns whether this error corresponds to CURLE_COULDNT_RESOLVE_HOST. ++ pub fn is_couldnt_resolve_host(&self) -> bool { ++ self.code == curl_sys::CURLE_COULDNT_RESOLVE_HOST ++ } ++ ++ /// Returns whether this error corresponds to CURLE_COULDNT_CONNECT. ++ pub fn is_couldnt_connect(&self) -> bool { ++ self.code == curl_sys::CURLE_COULDNT_CONNECT ++ } ++ ++ /// Returns whether this error corresponds to CURLE_REMOTE_ACCESS_DENIED. ++ pub fn is_remote_access_denied(&self) -> bool { ++ self.code == curl_sys::CURLE_REMOTE_ACCESS_DENIED ++ } ++ ++ /// Returns whether this error corresponds to CURLE_PARTIAL_FILE. ++ pub fn is_partial_file(&self) -> bool { ++ self.code == curl_sys::CURLE_PARTIAL_FILE ++ } ++ ++ /// Returns whether this error corresponds to CURLE_QUOTE_ERROR. ++ pub fn is_quote_error(&self) -> bool { ++ self.code == curl_sys::CURLE_QUOTE_ERROR ++ } ++ ++ /// Returns whether this error corresponds to CURLE_HTTP_RETURNED_ERROR. ++ pub fn is_http_returned_error(&self) -> bool { ++ self.code == curl_sys::CURLE_HTTP_RETURNED_ERROR ++ } ++ ++ /// Returns whether this error corresponds to CURLE_READ_ERROR. ++ pub fn is_read_error(&self) -> bool { ++ self.code == curl_sys::CURLE_READ_ERROR ++ } ++ ++ /// Returns whether this error corresponds to CURLE_WRITE_ERROR. ++ pub fn is_write_error(&self) -> bool { ++ self.code == curl_sys::CURLE_WRITE_ERROR ++ } ++ ++ /// Returns whether this error corresponds to CURLE_UPLOAD_FAILED. ++ pub fn is_upload_failed(&self) -> bool { ++ self.code == curl_sys::CURLE_UPLOAD_FAILED ++ } ++ ++ /// Returns whether this error corresponds to CURLE_OUT_OF_MEMORY. ++ pub fn is_out_of_memory(&self) -> bool { ++ self.code == curl_sys::CURLE_OUT_OF_MEMORY ++ } ++ ++ /// Returns whether this error corresponds to CURLE_OPERATION_TIMEDOUT. ++ pub fn is_operation_timedout(&self) -> bool { ++ self.code == curl_sys::CURLE_OPERATION_TIMEDOUT ++ } ++ ++ /// Returns whether this error corresponds to CURLE_RANGE_ERROR. ++ pub fn is_range_error(&self) -> bool { ++ self.code == curl_sys::CURLE_RANGE_ERROR ++ } ++ ++ /// Returns whether this error corresponds to CURLE_HTTP_POST_ERROR. ++ pub fn is_http_post_error(&self) -> bool { ++ self.code == curl_sys::CURLE_HTTP_POST_ERROR ++ } ++ ++ /// Returns whether this error corresponds to CURLE_SSL_CONNECT_ERROR. ++ pub fn is_ssl_connect_error(&self) -> bool { ++ self.code == curl_sys::CURLE_SSL_CONNECT_ERROR ++ } ++ ++ /// Returns whether this error corresponds to CURLE_BAD_DOWNLOAD_RESUME. ++ pub fn is_bad_download_resume(&self) -> bool { ++ self.code == curl_sys::CURLE_BAD_DOWNLOAD_RESUME ++ } ++ ++ /// Returns whether this error corresponds to CURLE_FILE_COULDNT_READ_FILE. ++ pub fn is_file_couldnt_read_file(&self) -> bool { ++ self.code == curl_sys::CURLE_FILE_COULDNT_READ_FILE ++ } ++ ++ /// Returns whether this error corresponds to CURLE_FUNCTION_NOT_FOUND. ++ pub fn is_function_not_found(&self) -> bool { ++ self.code == curl_sys::CURLE_FUNCTION_NOT_FOUND ++ } ++ ++ /// Returns whether this error corresponds to CURLE_ABORTED_BY_CALLBACK. ++ pub fn is_aborted_by_callback(&self) -> bool { ++ self.code == curl_sys::CURLE_ABORTED_BY_CALLBACK ++ } ++ ++ /// Returns whether this error corresponds to CURLE_BAD_FUNCTION_ARGUMENT. ++ pub fn is_bad_function_argument(&self) -> bool { ++ self.code == curl_sys::CURLE_BAD_FUNCTION_ARGUMENT ++ } ++ ++ /// Returns whether this error corresponds to CURLE_INTERFACE_FAILED. ++ pub fn is_interface_failed(&self) -> bool { ++ self.code == curl_sys::CURLE_INTERFACE_FAILED ++ } ++ ++ /// Returns whether this error corresponds to CURLE_TOO_MANY_REDIRECTS. ++ pub fn is_too_many_redirects(&self) -> bool { ++ self.code == curl_sys::CURLE_TOO_MANY_REDIRECTS ++ } ++ ++ /// Returns whether this error corresponds to CURLE_UNKNOWN_OPTION. ++ pub fn is_unknown_option(&self) -> bool { ++ self.code == curl_sys::CURLE_UNKNOWN_OPTION ++ } ++ ++ /// Returns whether this error corresponds to CURLE_PEER_FAILED_VERIFICATION. ++ pub fn is_peer_failed_verification(&self) -> bool { ++ self.code == curl_sys::CURLE_PEER_FAILED_VERIFICATION ++ } ++ ++ /// Returns whether this error corresponds to CURLE_GOT_NOTHING. ++ pub fn is_got_nothing(&self) -> bool { ++ self.code == curl_sys::CURLE_GOT_NOTHING ++ } ++ ++ /// Returns whether this error corresponds to CURLE_SSL_ENGINE_NOTFOUND. ++ pub fn is_ssl_engine_notfound(&self) -> bool { ++ self.code == curl_sys::CURLE_SSL_ENGINE_NOTFOUND ++ } ++ ++ /// Returns whether this error corresponds to CURLE_SSL_ENGINE_SETFAILED. ++ pub fn is_ssl_engine_setfailed(&self) -> bool { ++ self.code == curl_sys::CURLE_SSL_ENGINE_SETFAILED ++ } ++ ++ /// Returns whether this error corresponds to CURLE_SEND_ERROR. ++ pub fn is_send_error(&self) -> bool { ++ self.code == curl_sys::CURLE_SEND_ERROR ++ } ++ ++ /// Returns whether this error corresponds to CURLE_RECV_ERROR. ++ pub fn is_recv_error(&self) -> bool { ++ self.code == curl_sys::CURLE_RECV_ERROR ++ } ++ ++ /// Returns whether this error corresponds to CURLE_SSL_CERTPROBLEM. ++ pub fn is_ssl_certproblem(&self) -> bool { ++ self.code == curl_sys::CURLE_SSL_CERTPROBLEM ++ } ++ ++ /// Returns whether this error corresponds to CURLE_SSL_CIPHER. ++ pub fn is_ssl_cipher(&self) -> bool { ++ self.code == curl_sys::CURLE_SSL_CIPHER ++ } ++ ++ /// Returns whether this error corresponds to CURLE_SSL_CACERT. ++ pub fn is_ssl_cacert(&self) -> bool { ++ self.code == curl_sys::CURLE_SSL_CACERT ++ } ++ ++ /// Returns whether this error corresponds to CURLE_BAD_CONTENT_ENCODING. ++ pub fn is_bad_content_encoding(&self) -> bool { ++ self.code == curl_sys::CURLE_BAD_CONTENT_ENCODING ++ } ++ ++ /// Returns whether this error corresponds to CURLE_FILESIZE_EXCEEDED. ++ pub fn is_filesize_exceeded(&self) -> bool { ++ self.code == curl_sys::CURLE_FILESIZE_EXCEEDED ++ } ++ ++ /// Returns whether this error corresponds to CURLE_USE_SSL_FAILED. ++ pub fn is_use_ssl_failed(&self) -> bool { ++ self.code == curl_sys::CURLE_USE_SSL_FAILED ++ } ++ ++ /// Returns whether this error corresponds to CURLE_SEND_FAIL_REWIND. ++ pub fn is_send_fail_rewind(&self) -> bool { ++ self.code == curl_sys::CURLE_SEND_FAIL_REWIND ++ } ++ ++ /// Returns whether this error corresponds to CURLE_SSL_ENGINE_INITFAILED. ++ pub fn is_ssl_engine_initfailed(&self) -> bool { ++ self.code == curl_sys::CURLE_SSL_ENGINE_INITFAILED ++ } ++ ++ /// Returns whether this error corresponds to CURLE_LOGIN_DENIED. ++ pub fn is_login_denied(&self) -> bool { ++ self.code == curl_sys::CURLE_LOGIN_DENIED ++ } ++ ++ /// Returns whether this error corresponds to CURLE_CONV_FAILED. ++ pub fn is_conv_failed(&self) -> bool { ++ self.code == curl_sys::CURLE_CONV_FAILED ++ } ++ ++ /// Returns whether this error corresponds to CURLE_CONV_REQD. ++ pub fn is_conv_required(&self) -> bool { ++ self.code == curl_sys::CURLE_CONV_REQD ++ } ++ ++ /// Returns whether this error corresponds to CURLE_SSL_CACERT_BADFILE. ++ pub fn is_ssl_cacert_badfile(&self) -> bool { ++ self.code == curl_sys::CURLE_SSL_CACERT_BADFILE ++ } ++ ++ /// Returns whether this error corresponds to CURLE_SSL_CRL_BADFILE. ++ pub fn is_ssl_crl_badfile(&self) -> bool { ++ self.code == curl_sys::CURLE_SSL_CRL_BADFILE ++ } ++ ++ /// Returns whether this error corresponds to CURLE_SSL_SHUTDOWN_FAILED. ++ pub fn is_ssl_shutdown_failed(&self) -> bool { ++ self.code == curl_sys::CURLE_SSL_SHUTDOWN_FAILED ++ } ++ ++ /// Returns whether this error corresponds to CURLE_AGAIN. ++ pub fn is_again(&self) -> bool { ++ self.code == curl_sys::CURLE_AGAIN ++ } ++ ++ /// Returns whether this error corresponds to CURLE_SSL_ISSUER_ERROR. ++ pub fn is_ssl_issuer_error(&self) -> bool { ++ self.code == curl_sys::CURLE_SSL_ISSUER_ERROR ++ } ++ ++ /// Returns whether this error corresponds to CURLE_CHUNK_FAILED. ++ pub fn is_chunk_failed(&self) -> bool { ++ self.code == curl_sys::CURLE_CHUNK_FAILED ++ } ++ ++ // /// Returns whether this error corresponds to CURLE_NO_CONNECTION_AVAILABLE. ++ // pub fn is_no_connection_available(&self) -> bool { ++ // self.code == curl_sys::CURLE_NO_CONNECTION_AVAILABLE ++ // } ++ ++ /// Returns the value of the underlying error corresponding to libcurl. ++ pub fn code(&self) -> curl_sys::CURLcode { ++ self.code ++ } ++ ++ /// Returns the extra description of this error, if any is available. ++ pub fn extra_description(&self) -> Option<&str> { ++ self.extra.as_ref().map(|s| &**s) ++ } ++} ++ ++impl fmt::Display for Error { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ let desc = error::Error::description(self); ++ match self.extra { ++ Some(ref s) => write!(f, "[{}] {} ({})", self.code(), desc, s), ++ None => write!(f, "[{}] {}", self.code(), desc), ++ } ++ } ++} ++ ++impl fmt::Debug for Error { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ f.debug_struct("Error") ++ .field("description", &error::Error::description(self)) ++ .field("code", &self.code) ++ .field("extra", &self.extra) ++ .finish() ++ } ++} ++ ++impl error::Error for Error { ++ fn description(&self) -> &str { ++ unsafe { ++ let s = curl_sys::curl_easy_strerror(self.code); ++ assert!(!s.is_null()); ++ str::from_utf8(CStr::from_ptr(s).to_bytes()).unwrap() ++ } ++ } ++} ++ ++/// An error returned from "share" operations. ++/// ++/// This structure wraps a `CURLSHcode`. ++#[derive(Clone, PartialEq)] ++pub struct ShareError { ++ code: curl_sys::CURLSHcode, ++} ++ ++impl ShareError { ++ /// Creates a new error from the underlying code returned by libcurl. ++ pub fn new(code: curl_sys::CURLSHcode) -> ShareError { ++ ShareError { code: code } ++ } ++ ++ /// Returns whether this error corresponds to CURLSHE_BAD_OPTION. ++ pub fn is_bad_option(&self) -> bool { ++ self.code == curl_sys::CURLSHE_BAD_OPTION ++ } ++ ++ /// Returns whether this error corresponds to CURLSHE_IN_USE. ++ pub fn is_in_use(&self) -> bool { ++ self.code == curl_sys::CURLSHE_IN_USE ++ } ++ ++ /// Returns whether this error corresponds to CURLSHE_INVALID. ++ pub fn is_invalid(&self) -> bool { ++ self.code == curl_sys::CURLSHE_INVALID ++ } ++ ++ /// Returns whether this error corresponds to CURLSHE_NOMEM. ++ pub fn is_nomem(&self) -> bool { ++ self.code == curl_sys::CURLSHE_NOMEM ++ } ++ ++ // /// Returns whether this error corresponds to CURLSHE_NOT_BUILT_IN. ++ // pub fn is_not_built_in(&self) -> bool { ++ // self.code == curl_sys::CURLSHE_NOT_BUILT_IN ++ // } ++ ++ /// Returns the value of the underlying error corresponding to libcurl. ++ pub fn code(&self) -> curl_sys::CURLSHcode { ++ self.code ++ } ++} ++ ++impl fmt::Display for ShareError { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ error::Error::description(self).fmt(f) ++ } ++} ++ ++impl fmt::Debug for ShareError { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ write!(f, "ShareError {{ description: {:?}, code: {} }}", ++ error::Error::description(self), ++ self.code) ++ } ++} ++ ++impl error::Error for ShareError { ++ fn description(&self) -> &str { ++ unsafe { ++ let s = curl_sys::curl_share_strerror(self.code); ++ assert!(!s.is_null()); ++ str::from_utf8(CStr::from_ptr(s).to_bytes()).unwrap() ++ } ++ } ++} ++ ++/// An error from "multi" operations. ++/// ++/// THis structure wraps a `CURLMcode`. ++#[derive(Clone, PartialEq)] ++pub struct MultiError { ++ code: curl_sys::CURLMcode, ++} ++ ++impl MultiError { ++ /// Creates a new error from the underlying code returned by libcurl. ++ pub fn new(code: curl_sys::CURLMcode) -> MultiError { ++ MultiError { code: code } ++ } ++ ++ /// Returns whether this error corresponds to CURLM_BAD_HANDLE. ++ pub fn is_bad_handle(&self) -> bool { ++ self.code == curl_sys::CURLM_BAD_HANDLE ++ } ++ ++ /// Returns whether this error corresponds to CURLM_BAD_EASY_HANDLE. ++ pub fn is_bad_easy_handle(&self) -> bool { ++ self.code == curl_sys::CURLM_BAD_EASY_HANDLE ++ } ++ ++ /// Returns whether this error corresponds to CURLM_OUT_OF_MEMORY. ++ pub fn is_out_of_memory(&self) -> bool { ++ self.code == curl_sys::CURLM_OUT_OF_MEMORY ++ } ++ ++ /// Returns whether this error corresponds to CURLM_INTERNAL_ERROR. ++ pub fn is_internal_error(&self) -> bool { ++ self.code == curl_sys::CURLM_INTERNAL_ERROR ++ } ++ ++ /// Returns whether this error corresponds to CURLM_BAD_SOCKET. ++ pub fn is_bad_socket(&self) -> bool { ++ self.code == curl_sys::CURLM_BAD_SOCKET ++ } ++ ++ /// Returns whether this error corresponds to CURLM_UNKNOWN_OPTION. ++ pub fn is_unknown_option(&self) -> bool { ++ self.code == curl_sys::CURLM_UNKNOWN_OPTION ++ } ++ ++ /// Returns whether this error corresponds to CURLM_CALL_MULTI_PERFORM. ++ pub fn is_call_perform(&self) -> bool { ++ self.code == curl_sys::CURLM_CALL_MULTI_PERFORM ++ } ++ ++ // /// Returns whether this error corresponds to CURLM_ADDED_ALREADY. ++ // pub fn is_added_already(&self) -> bool { ++ // self.code == curl_sys::CURLM_ADDED_ALREADY ++ // } ++ ++ /// Returns the value of the underlying error corresponding to libcurl. ++ pub fn code(&self) -> curl_sys::CURLMcode { ++ self.code ++ } ++} ++ ++impl fmt::Display for MultiError { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ error::Error::description(self).fmt(f) ++ } ++} ++ ++impl fmt::Debug for MultiError { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ write!(f, "MultiError {{ description: {:?}, code: {} }}", ++ error::Error::description(self), ++ self.code) ++ } ++} ++ ++impl error::Error for MultiError { ++ fn description(&self) -> &str { ++ unsafe { ++ let s = curl_sys::curl_multi_strerror(self.code); ++ assert!(!s.is_null()); ++ str::from_utf8(CStr::from_ptr(s).to_bytes()).unwrap() ++ } ++ } ++} ++ ++ ++/// An error from "form add" operations. ++/// ++/// THis structure wraps a `CURLFORMcode`. ++#[derive(Clone, PartialEq)] ++pub struct FormError { ++ code: curl_sys::CURLFORMcode, ++} ++ ++impl FormError { ++ /// Creates a new error from the underlying code returned by libcurl. ++ pub fn new(code: curl_sys::CURLFORMcode) -> FormError { ++ FormError { code: code } ++ } ++ ++ /// Returns whether this error corresponds to CURL_FORMADD_MEMORY. ++ pub fn is_memory(&self) -> bool { ++ self.code == curl_sys::CURL_FORMADD_MEMORY ++ } ++ ++ /// Returns whether this error corresponds to CURL_FORMADD_OPTION_TWICE. ++ pub fn is_option_twice(&self) -> bool { ++ self.code == curl_sys::CURL_FORMADD_OPTION_TWICE ++ } ++ ++ /// Returns whether this error corresponds to CURL_FORMADD_NULL. ++ pub fn is_null(&self) -> bool { ++ self.code == curl_sys::CURL_FORMADD_NULL ++ } ++ ++ /// Returns whether this error corresponds to CURL_FORMADD_UNKNOWN_OPTION. ++ pub fn is_unknown_option(&self) -> bool { ++ self.code == curl_sys::CURL_FORMADD_UNKNOWN_OPTION ++ } ++ ++ /// Returns whether this error corresponds to CURL_FORMADD_INCOMPLETE. ++ pub fn is_incomplete(&self) -> bool { ++ self.code == curl_sys::CURL_FORMADD_INCOMPLETE ++ } ++ ++ /// Returns whether this error corresponds to CURL_FORMADD_ILLEGAL_ARRAY. ++ pub fn is_illegal_array(&self) -> bool { ++ self.code == curl_sys::CURL_FORMADD_ILLEGAL_ARRAY ++ } ++ ++ /// Returns whether this error corresponds to CURL_FORMADD_DISABLED. ++ pub fn is_disabled(&self) -> bool { ++ self.code == curl_sys::CURL_FORMADD_DISABLED ++ } ++ ++ /// Returns the value of the underlying error corresponding to libcurl. ++ pub fn code(&self) -> curl_sys::CURLFORMcode { ++ self.code ++ } ++} ++ ++impl fmt::Display for FormError { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ error::Error::description(self).fmt(f) ++ } ++} ++ ++impl fmt::Debug for FormError { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ write!(f, "FormError {{ description: {:?}, code: {} }}", ++ error::Error::description(self), ++ self.code) ++ } ++} ++ ++impl error::Error for FormError { ++ fn description(&self) -> &str { ++ match self.code { ++ curl_sys::CURL_FORMADD_MEMORY => "allocation failure", ++ curl_sys::CURL_FORMADD_OPTION_TWICE => "one option passed twice", ++ curl_sys::CURL_FORMADD_NULL => "null pointer given for string", ++ curl_sys::CURL_FORMADD_UNKNOWN_OPTION => "unknown option", ++ curl_sys::CURL_FORMADD_INCOMPLETE => "form information not complete", ++ curl_sys::CURL_FORMADD_ILLEGAL_ARRAY => "illegal array in option", ++ curl_sys::CURL_FORMADD_DISABLED => { ++ "libcurl does not have support for this option compiled in" ++ } ++ _ => "unknown form error", ++ } ++ } ++} ++ ++impl From for Error { ++ fn from(_: ffi::NulError) -> Error { ++ Error { code: curl_sys::CURLE_CONV_FAILED, extra: None } ++ } ++} ++ ++impl From for io::Error { ++ fn from(e: Error) -> io::Error { ++ io::Error::new(io::ErrorKind::Other, e) ++ } ++} ++ ++impl From for io::Error { ++ fn from(e: ShareError) -> io::Error { ++ io::Error::new(io::ErrorKind::Other, e) ++ } ++} ++ ++impl From for io::Error { ++ fn from(e: MultiError) -> io::Error { ++ io::Error::new(io::ErrorKind::Other, e) ++ } ++} ++ ++impl From for io::Error { ++ fn from(e: FormError) -> io::Error { ++ io::Error::new(io::ErrorKind::Other, e) ++ } ++} diff --cc vendor/curl-0.4.8/src/lib.rs index 000000000,000000000..1849b93a8 new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/src/lib.rs @@@ -1,0 -1,0 +1,123 @@@ ++//! Rust bindings to the libcurl C library ++//! ++//! This crate contains bindings for an HTTP/HTTPS client which is powered by ++//! [libcurl], the same library behind the `curl` command line tool. The API ++//! currently closely matches that of libcurl itself, except that a Rustic layer ++//! of safety is applied on top. ++//! ++//! [libcurl]: https://curl.haxx.se/libcurl/ ++//! ++//! # The "Easy" API ++//! ++//! The easiest way to send a request is to use the `Easy` api which corresponds ++//! to `CURL` in libcurl. This handle supports a wide variety of options and can ++//! be used to make a single blocking request in a thread. Callbacks can be ++//! specified to deal with data as it arrives and a handle can be reused to ++//! cache connections and such. ++//! ++//! ```rust,no_run ++//! use std::io::{stdout, Write}; ++//! ++//! use curl::easy::Easy; ++//! ++//! // Write the contents of rust-lang.org to stdout ++//! let mut easy = Easy::new(); ++//! easy.url("https://www.rust-lang.org/").unwrap(); ++//! easy.write_function(|data| { ++//! Ok(stdout().write(data).unwrap()) ++//! }).unwrap(); ++//! easy.perform().unwrap(); ++//! ``` ++//! ++//! # What about multiple concurrent HTTP requests? ++//! ++//! One option you have currently is to send multiple requests in multiple ++//! threads, but otherwise libcurl has a "multi" interface for doing this ++//! operation. Initial bindings of this interface can be found in the `multi` ++//! module, but feedback is welcome! ++//! ++//! # Where does libcurl come from? ++//! ++//! This crate links to the `curl-sys` crate which is in turn responsible for ++//! acquiring and linking to the libcurl library. Currently this crate will ++//! build libcurl from source if one is not already detected on the system. ++//! ++//! There is a large number of releases for libcurl, all with different sets of ++//! capabilities. Robust programs may wish to inspect `Version::get()` to test ++//! what features are implemented in the linked build of libcurl at runtime. ++ ++#![deny(missing_docs, missing_debug_implementations)] ++#![doc(html_root_url = "https://docs.rs/curl/0.4")] ++ ++extern crate curl_sys; ++extern crate libc; ++extern crate socket2; ++ ++#[cfg(all(unix, not(target_os = "macos")))] ++extern crate openssl_sys; ++#[cfg(all(unix, not(target_os = "macos")))] ++extern crate openssl_probe; ++#[cfg(windows)] ++extern crate winapi; ++ ++use std::ffi::CStr; ++use std::str; ++use std::sync::{Once, ONCE_INIT}; ++ ++pub use error::{Error, ShareError, MultiError, FormError}; ++mod error; ++ ++pub use version::{Version, Protocols}; ++mod version; ++ ++mod panic; ++pub mod easy; ++pub mod multi; ++ ++/// Initializes the underlying libcurl library. ++/// ++/// It's not required to call this before the library is used, but it's ++/// recommended to do so as soon as the program starts. ++pub fn init() { ++ static INIT: Once = ONCE_INIT; ++ INIT.call_once(|| { ++ platform_init(); ++ unsafe { ++ assert_eq!(curl_sys::curl_global_init(curl_sys::CURL_GLOBAL_ALL), 0); ++ } ++ ++ // Note that we explicitly don't schedule a call to ++ // `curl_global_cleanup`. The documentation for that function says ++ // ++ // > You must not call it when any other thread in the program (i.e. a ++ // > thread sharing the same memory) is running. This doesn't just mean ++ // > no other thread that is using libcurl. ++ // ++ // We can't ever be sure of that, so unfortunately we can't call the ++ // function. ++ }); ++ ++ #[cfg(all(unix, not(target_os = "macos")))] ++ fn platform_init() { ++ openssl_sys::init(); ++ } ++ ++ #[cfg(not(all(unix, not(target_os = "macos"))))] ++ fn platform_init() {} ++} ++ ++unsafe fn opt_str<'a>(ptr: *const libc::c_char) -> Option<&'a str> { ++ if ptr.is_null() { ++ None ++ } else { ++ Some(str::from_utf8(CStr::from_ptr(ptr).to_bytes()).unwrap()) ++ } ++} ++ ++fn cvt(r: curl_sys::CURLcode) -> Result<(), Error> { ++ if r == curl_sys::CURLE_OK { ++ Ok(()) ++ } else { ++ Err(Error::new(r)) ++ } ++} diff --cc vendor/curl-0.4.8/src/multi.rs index 000000000,000000000..e721948ab new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/src/multi.rs @@@ -1,0 -1,0 +1,891 @@@ ++//! Multi - initiating multiple requests simultaneously ++ ++use std::fmt; ++use std::marker; ++use std::time::Duration; ++ ++use libc::{c_int, c_char, c_void, c_long, c_short}; ++use curl_sys; ++ ++#[cfg(windows)] ++use winapi::fd_set; ++#[cfg(unix)] ++use libc::{fd_set, pollfd, POLLIN, POLLPRI, POLLOUT}; ++ ++use {MultiError, Error}; ++use easy::{Easy, Easy2}; ++use panic; ++ ++/// A multi handle for initiating multiple connections simultaneously. ++/// ++/// This structure corresponds to `CURLM` in libcurl and provides the ability to ++/// have multiple transfers in flight simultaneously. This handle is then used ++/// to manage each transfer. The main purpose of a `CURLM` is for the ++/// *application* to drive the I/O rather than libcurl itself doing all the ++/// blocking. Methods like `action` allow the application to inform libcurl of ++/// when events have happened. ++/// ++/// Lots more documentation can be found on the libcurl [multi tutorial] where ++/// the APIs correspond pretty closely with this crate. ++/// ++/// [multi tutorial]: https://curl.haxx.se/libcurl/c/libcurl-multi.html ++pub struct Multi { ++ raw: *mut curl_sys::CURLM, ++ data: Box, ++} ++ ++struct MultiData { ++ socket: Box, ++ timer: Box) -> bool + Send>, ++} ++ ++/// Message from the `messages` function of a multi handle. ++/// ++/// Currently only indicates whether a transfer is done. ++pub struct Message<'multi> { ++ ptr: *mut curl_sys::CURLMsg, ++ _multi: &'multi Multi, ++} ++ ++/// Wrapper around an easy handle while it's owned by a multi handle. ++/// ++/// Once an easy handle has been added to a multi handle then it can no longer ++/// be used via `perform`. This handle is also used to remove the easy handle ++/// from the multi handle when desired. ++pub struct EasyHandle { ++ easy: Easy, ++ // This is now effecitvely bound to a `Multi`, so it is no longer sendable. ++ _marker: marker::PhantomData<&'static Multi>, ++} ++ ++/// Wrapper around an easy handle while it's owned by a multi handle. ++/// ++/// Once an easy handle has been added to a multi handle then it can no longer ++/// be used via `perform`. This handle is also used to remove the easy handle ++/// from the multi handle when desired. ++pub struct Easy2Handle { ++ easy: Easy2, ++ // This is now effecitvely bound to a `Multi`, so it is no longer sendable. ++ _marker: marker::PhantomData<&'static Multi>, ++} ++ ++/// Notification of the events that have happened on a socket. ++/// ++/// This type is passed as an argument to the `action` method on a multi handle ++/// to indicate what events have occurred on a socket. ++pub struct Events { ++ bits: c_int, ++} ++ ++/// Notification of events that are requested on a socket. ++/// ++/// This type is yielded to the `socket_function` callback to indicate what ++/// events are requested on a socket. ++pub struct SocketEvents { ++ bits: c_int, ++} ++ ++/// Raw underlying socket type that the multi handles use ++pub type Socket = curl_sys::curl_socket_t; ++ ++/// File descriptor to wait on for use with the `wait` method on a multi handle. ++pub struct WaitFd { ++ inner: curl_sys::curl_waitfd, ++} ++ ++impl Multi { ++ /// Creates a new multi session through which multiple HTTP transfers can be ++ /// initiated. ++ pub fn new() -> Multi { ++ unsafe { ++ ::init(); ++ let ptr = curl_sys::curl_multi_init(); ++ assert!(!ptr.is_null()); ++ Multi { ++ raw: ptr, ++ data: Box::new(MultiData { ++ socket: Box::new(|_, _, _| ()), ++ timer: Box::new(|_| true), ++ }), ++ } ++ } ++ } ++ ++ /// Set the callback informed about what to wait for ++ /// ++ /// When the `action` function runs, it informs the application about ++ /// updates in the socket (file descriptor) status by doing none, one, or ++ /// multiple calls to the socket callback. The callback gets status updates ++ /// with changes since the previous time the callback was called. See ++ /// `action` for more details on how the callback is used and should work. ++ /// ++ /// The `SocketEvents` parameter informs the callback on the status of the ++ /// given socket, and the methods on that type can be used to learn about ++ /// what's going on with the socket. ++ /// ++ /// The third `usize` parameter is a custom value set by the `assign` method ++ /// below. ++ pub fn socket_function(&mut self, f: F) -> Result<(), MultiError> ++ where F: FnMut(Socket, SocketEvents, usize) + Send + 'static, ++ { ++ self._socket_function(Box::new(f)) ++ } ++ ++ fn _socket_function(&mut self, ++ f: Box) ++ -> Result<(), MultiError> ++ { ++ self.data.socket = f; ++ let cb: curl_sys::curl_socket_callback = cb; ++ try!(self.setopt_ptr(curl_sys::CURLMOPT_SOCKETFUNCTION, ++ cb as usize as *const c_char)); ++ let ptr = &*self.data as *const _; ++ try!(self.setopt_ptr(curl_sys::CURLMOPT_SOCKETDATA, ++ ptr as *const c_char)); ++ return Ok(()); ++ ++ // TODO: figure out how to expose `_easy` ++ extern fn cb(_easy: *mut curl_sys::CURL, ++ socket: curl_sys::curl_socket_t, ++ what: c_int, ++ userptr: *mut c_void, ++ socketp: *mut c_void) -> c_int { ++ panic::catch(|| unsafe { ++ let f = &mut (*(userptr as *mut MultiData)).socket; ++ f(socket, SocketEvents { bits: what }, socketp as usize) ++ }); ++ 0 ++ } ++ } ++ ++ /// Set data to associate with an internal socket ++ /// ++ /// This function creates an association in the multi handle between the ++ /// given socket and a private token of the application. This is designed ++ /// for `action` uses. ++ /// ++ /// When set, the token will be passed to all future socket callbacks for ++ /// the specified socket. ++ /// ++ /// If the given socket isn't already in use by libcurl, this function will ++ /// return an error. ++ /// ++ /// libcurl only keeps one single token associated with a socket, so ++ /// calling this function several times for the same socket will make the ++ /// last set token get used. ++ /// ++ /// The idea here being that this association (socket to token) is something ++ /// that just about every application that uses this API will need and then ++ /// libcurl can just as well do it since it already has an internal hash ++ /// table lookup for this. ++ /// ++ /// # Typical Usage ++ /// ++ /// In a typical application you allocate a struct or at least use some kind ++ /// of semi-dynamic data for each socket that we must wait for action on ++ /// when using the `action` approach. ++ /// ++ /// When our socket-callback gets called by libcurl and we get to know about ++ /// yet another socket to wait for, we can use `assign` to point out the ++ /// particular data so that when we get updates about this same socket ++ /// again, we don't have to find the struct associated with this socket by ++ /// ourselves. ++ pub fn assign(&self, ++ socket: Socket, ++ token: usize) -> Result<(), MultiError> { ++ unsafe { ++ try!(cvt(curl_sys::curl_multi_assign(self.raw, socket, ++ token as *mut _))); ++ Ok(()) ++ } ++ } ++ ++ /// Set callback to receive timeout values ++ /// ++ /// Certain features, such as timeouts and retries, require you to call ++ /// libcurl even when there is no activity on the file descriptors. ++ /// ++ /// Your callback function should install a non-repeating timer with the ++ /// interval specified. Each time that timer fires, call either `action` or ++ /// `perform`, depending on which interface you use. ++ /// ++ /// A timeout value of `None` means you should delete your timer. ++ /// ++ /// A timeout value of 0 means you should call `action` or `perform` (once) ++ /// as soon as possible. ++ /// ++ /// This callback will only be called when the timeout changes. ++ /// ++ /// The timer callback should return `true` on success, and `false` on ++ /// error. This callback can be used instead of, or in addition to, ++ /// `get_timeout`. ++ pub fn timer_function(&mut self, f: F) -> Result<(), MultiError> ++ where F: FnMut(Option) -> bool + Send + 'static, ++ { ++ self._timer_function(Box::new(f)) ++ } ++ ++ fn _timer_function(&mut self, ++ f: Box) -> bool + Send>) ++ -> Result<(), MultiError> ++ { ++ self.data.timer = f; ++ let cb: curl_sys::curl_multi_timer_callback = cb; ++ try!(self.setopt_ptr(curl_sys::CURLMOPT_TIMERFUNCTION, ++ cb as usize as *const c_char)); ++ let ptr = &*self.data as *const _; ++ try!(self.setopt_ptr(curl_sys::CURLMOPT_TIMERDATA, ++ ptr as *const c_char)); ++ return Ok(()); ++ ++ // TODO: figure out how to expose `_multi` ++ extern fn cb(_multi: *mut curl_sys::CURLM, ++ timeout_ms: c_long, ++ user: *mut c_void) -> c_int { ++ let keep_going = panic::catch(|| unsafe { ++ let f = &mut (*(user as *mut MultiData)).timer; ++ if timeout_ms == -1 { ++ f(None) ++ } else { ++ f(Some(Duration::from_millis(timeout_ms as u64))) ++ } ++ }).unwrap_or(false); ++ if keep_going {0} else {-1} ++ } ++ } ++ ++ fn setopt_ptr(&mut self, ++ opt: curl_sys::CURLMoption, ++ val: *const c_char) -> Result<(), MultiError> { ++ unsafe { ++ cvt(curl_sys::curl_multi_setopt(self.raw, opt, val)) ++ } ++ } ++ ++ /// Add an easy handle to a multi session ++ /// ++ /// Adds a standard easy handle to the multi stack. This function call will ++ /// make this multi handle control the specified easy handle. ++ /// ++ /// When an easy interface is added to a multi handle, it will use a shared ++ /// connection cache owned by the multi handle. Removing and adding new easy ++ /// handles will not affect the pool of connections or the ability to do ++ /// connection re-use. ++ /// ++ /// If you have `timer_function` set in the multi handle (and you really ++ /// should if you're working event-based with `action` and friends), that ++ /// callback will be called from within this function to ask for an updated ++ /// timer so that your main event loop will get the activity on this handle ++ /// to get started. ++ /// ++ /// The easy handle will remain added to the multi handle until you remove ++ /// it again with `remove` on the returned handle - even when a transfer ++ /// with that specific easy handle is completed. ++ pub fn add(&self, mut easy: Easy) -> Result { ++ // Clear any configuration set by previous transfers because we're ++ // moving this into a `Send+'static` situation now basically. ++ easy.transfer(); ++ ++ unsafe { ++ try!(cvt(curl_sys::curl_multi_add_handle(self.raw, easy.raw()))); ++ } ++ Ok(EasyHandle { ++ easy: easy, ++ _marker: marker::PhantomData, ++ }) ++ } ++ ++ /// Same as `add`, but works with the `Easy2` type. ++ pub fn add2(&self, easy: Easy2) -> Result, MultiError> { ++ unsafe { ++ try!(cvt(curl_sys::curl_multi_add_handle(self.raw, easy.raw()))); ++ } ++ Ok(Easy2Handle { ++ easy: easy, ++ _marker: marker::PhantomData, ++ }) ++ } ++ ++ /// Remove an easy handle from this multi session ++ /// ++ /// Removes the easy handle from this multi handle. This will make the ++ /// returned easy handle be removed from this multi handle's control. ++ /// ++ /// When the easy handle has been removed from a multi stack, it is again ++ /// perfectly legal to invoke `perform` on it. ++ /// ++ /// Removing an easy handle while being used is perfectly legal and will ++ /// effectively halt the transfer in progress involving that easy handle. ++ /// All other easy handles and transfers will remain unaffected. ++ pub fn remove(&self, easy: EasyHandle) -> Result { ++ unsafe { ++ try!(cvt(curl_sys::curl_multi_remove_handle(self.raw, ++ easy.easy.raw()))); ++ } ++ Ok(easy.easy) ++ } ++ ++ /// Same as `remove`, but for `Easy2Handle`. ++ pub fn remove2(&self, easy: Easy2Handle) -> Result, MultiError> { ++ unsafe { ++ try!(cvt(curl_sys::curl_multi_remove_handle(self.raw, ++ easy.easy.raw()))); ++ } ++ Ok(easy.easy) ++ } ++ ++ /// Read multi stack informationals ++ /// ++ /// Ask the multi handle if there are any messages/informationals from the ++ /// individual transfers. Messages may include informationals such as an ++ /// error code from the transfer or just the fact that a transfer is ++ /// completed. More details on these should be written down as well. ++ pub fn messages(&self, mut f: F) where F: FnMut(Message) { ++ self._messages(&mut f) ++ } ++ ++ fn _messages(&self, mut f: &mut FnMut(Message)) { ++ let mut queue = 0; ++ unsafe { ++ loop { ++ let ptr = curl_sys::curl_multi_info_read(self.raw, &mut queue); ++ if ptr.is_null() { ++ break ++ } ++ f(Message { ptr: ptr, _multi: self }) ++ } ++ } ++ } ++ ++ /// Inform of reads/writes available data given an action ++ /// ++ /// When the application has detected action on a socket handled by libcurl, ++ /// it should call this function with the sockfd argument set to ++ /// the socket with the action. When the events on a socket are known, they ++ /// can be passed `events`. When the events on a socket are unknown, pass ++ /// `Events::new()` instead, and libcurl will test the descriptor ++ /// internally. ++ /// ++ /// The returned integer will contain the number of running easy handles ++ /// within the multi handle. When this number reaches zero, all transfers ++ /// are complete/done. When you call `action` on a specific socket and the ++ /// counter decreases by one, it DOES NOT necessarily mean that this exact ++ /// socket/transfer is the one that completed. Use `messages` to figure out ++ /// which easy handle that completed. ++ /// ++ /// The `action` function informs the application about updates in the ++ /// socket (file descriptor) status by doing none, one, or multiple calls to ++ /// the socket callback function set with the `socket_function` method. They ++ /// update the status with changes since the previous time the callback was ++ /// called. ++ pub fn action(&self, socket: Socket, events: &Events) ++ -> Result { ++ let mut remaining = 0; ++ unsafe { ++ try!(cvt(curl_sys::curl_multi_socket_action(self.raw, ++ socket, ++ events.bits, ++ &mut remaining))); ++ Ok(remaining as u32) ++ } ++ } ++ ++ /// Inform libcurl that a timeout has expired and sockets should be tested. ++ /// ++ /// The returned integer will contain the number of running easy handles ++ /// within the multi handle. When this number reaches zero, all transfers ++ /// are complete/done. When you call `action` on a specific socket and the ++ /// counter decreases by one, it DOES NOT necessarily mean that this exact ++ /// socket/transfer is the one that completed. Use `messages` to figure out ++ /// which easy handle that completed. ++ /// ++ /// Get the timeout time by calling the `timer_function` method. Your ++ /// application will then get called with information on how long to wait ++ /// for socket actions at most before doing the timeout action: call the ++ /// `timeout` method. You can also use the `get_timeout` function to ++ /// poll the value at any given time, but for an event-based system using ++ /// the callback is far better than relying on polling the timeout value. ++ pub fn timeout(&self) -> Result { ++ let mut remaining = 0; ++ unsafe { ++ try!(cvt(curl_sys::curl_multi_socket_action(self.raw, ++ curl_sys::CURL_SOCKET_BAD, ++ 0, ++ &mut remaining))); ++ Ok(remaining as u32) ++ } ++ } ++ ++ /// Get how long to wait for action before proceeding ++ /// ++ /// An application using the libcurl multi interface should call ++ /// `get_timeout` to figure out how long it should wait for socket actions - ++ /// at most - before proceeding. ++ /// ++ /// Proceeding means either doing the socket-style timeout action: call the ++ /// `timeout` function, or call `perform` if you're using the simpler and ++ /// older multi interface approach. ++ /// ++ /// The timeout value returned is the duration at this very moment. If 0, it ++ /// means you should proceed immediately without waiting for anything. If it ++ /// returns `None`, there's no timeout at all set. ++ /// ++ /// Note: if libcurl returns a `None` timeout here, it just means that ++ /// libcurl currently has no stored timeout value. You must not wait too ++ /// long (more than a few seconds perhaps) before you call `perform` again. ++ pub fn get_timeout(&self) -> Result, MultiError> { ++ let mut ms = 0; ++ unsafe { ++ try!(cvt(curl_sys::curl_multi_timeout(self.raw, &mut ms))); ++ if ms == -1 { ++ Ok(None) ++ } else { ++ Ok(Some(Duration::from_millis(ms as u64))) ++ } ++ } ++ } ++ ++ /// Block until activity is detected or a timeout passes. ++ /// ++ /// The timeout is used in millisecond-precision. Large durations are ++ /// clamped at the maximum value curl accepts. ++ /// ++ /// The returned integer will contain the number of internal file ++ /// descriptors on which interesting events occured. ++ /// ++ /// This function is a simpler alternative to using `fdset()` and `select()` ++ /// and does not suffer from file descriptor limits. ++ /// ++ /// # Example ++ /// ++ /// ``` ++ /// use curl::multi::Multi; ++ /// use std::time::Duration; ++ /// ++ /// let m = Multi::new(); ++ /// ++ /// // Add some Easy handles... ++ /// ++ /// while m.perform().unwrap() > 0 { ++ /// m.wait(&mut [], Duration::from_secs(1)).unwrap(); ++ /// } ++ /// ``` ++ pub fn wait(&self, waitfds: &mut [WaitFd], timeout: Duration) ++ -> Result { ++ let timeout_ms = { ++ let secs = timeout.as_secs(); ++ if secs > (i32::max_value() / 1000) as u64 { ++ // Duration too large, clamp at maximum value. ++ i32::max_value() ++ } else { ++ secs as i32 * 1000 + timeout.subsec_nanos() as i32 / 1000_000 ++ } ++ }; ++ unsafe { ++ let mut ret = 0; ++ try!(cvt(curl_sys::curl_multi_wait(self.raw, ++ waitfds.as_mut_ptr() as *mut _, ++ waitfds.len() as u32, ++ timeout_ms, ++ &mut ret))); ++ Ok(ret as u32) ++ } ++ } ++ ++ /// Reads/writes available data from each easy handle. ++ /// ++ /// This function handles transfers on all the added handles that need ++ /// attention in an non-blocking fashion. ++ /// ++ /// When an application has found out there's data available for this handle ++ /// or a timeout has elapsed, the application should call this function to ++ /// read/write whatever there is to read or write right now etc. This ++ /// method returns as soon as the reads/writes are done. This function does ++ /// not require that there actually is any data available for reading or ++ /// that data can be written, it can be called just in case. It will return ++ /// the number of handles that still transfer data. ++ /// ++ /// If the amount of running handles is changed from the previous call (or ++ /// is less than the amount of easy handles you've added to the multi ++ /// handle), you know that there is one or more transfers less "running". ++ /// You can then call `info` to get information about each individual ++ /// completed transfer, and that returned info includes `Error` and more. ++ /// If an added handle fails very quickly, it may never be counted as a ++ /// running handle. ++ /// ++ /// When running_handles is set to zero (0) on the return of this function, ++ /// there is no longer any transfers in progress. ++ /// ++ /// # Return ++ /// ++ /// Before libcurl version 7.20.0: If you receive `is_call_perform`, this ++ /// basically means that you should call `perform` again, before you select ++ /// on more actions. You don't have to do it immediately, but the return ++ /// code means that libcurl may have more data available to return or that ++ /// there may be more data to send off before it is "satisfied". Do note ++ /// that `perform` will return `is_call_perform` only when it wants to be ++ /// called again immediately. When things are fine and there is nothing ++ /// immediate it wants done, it'll return `Ok` and you need to wait for ++ /// "action" and then call this function again. ++ /// ++ /// This function only returns errors etc regarding the whole multi stack. ++ /// Problems still might have occurred on individual transfers even when ++ /// this function returns `Ok`. Use `info` to figure out how individual ++ /// transfers did. ++ pub fn perform(&self) -> Result { ++ unsafe { ++ let mut ret = 0; ++ try!(cvt(curl_sys::curl_multi_perform(self.raw, &mut ret))); ++ Ok(ret as u32) ++ } ++ } ++ ++ /// Extracts file descriptor information from a multi handle ++ /// ++ /// This function extracts file descriptor information from a given ++ /// handle, and libcurl returns its `fd_set` sets. The application can use ++ /// these to `select()` on, but be sure to `FD_ZERO` them before calling ++ /// this function as curl_multi_fdset only adds its own descriptors, it ++ /// doesn't zero or otherwise remove any others. The curl_multi_perform ++ /// function should be called as soon as one of them is ready to be read ++ /// from or written to. ++ /// ++ /// If no file descriptors are set by libcurl, this function will return ++ /// `Ok(None)`. Otherwise `Ok(Some(n))` will be returned where `n` the ++ /// highest descriptor number libcurl set. When `Ok(None)` is returned it ++ /// is because libcurl currently does something that isn't possible for ++ /// your application to monitor with a socket and unfortunately you can ++ /// then not know exactly when the current action is completed using ++ /// `select()`. You then need to wait a while before you proceed and call ++ /// `perform` anyway. ++ /// ++ /// When doing `select()`, you should use `get_timeout` to figure out ++ /// how long to wait for action. Call `perform` even if no activity has ++ /// been seen on the `fd_set`s after the timeout expires as otherwise ++ /// internal retries and timeouts may not work as you'd think and want. ++ /// ++ /// If one of the sockets used by libcurl happens to be larger than what ++ /// can be set in an `fd_set`, which on POSIX systems means that the file ++ /// descriptor is larger than `FD_SETSIZE`, then libcurl will try to not ++ /// set it. Setting a too large file descriptor in an `fd_set` implies an out ++ /// of bounds write which can cause crashes, or worse. The effect of NOT ++ /// storing it will possibly save you from the crash, but will make your ++ /// program NOT wait for sockets it should wait for... ++ pub fn fdset(&self, ++ read: Option<&mut fd_set>, ++ write: Option<&mut fd_set>, ++ except: Option<&mut fd_set>) -> Result, MultiError> { ++ unsafe { ++ let mut ret = 0; ++ let read = read.map(|r| r as *mut _).unwrap_or(0 as *mut _); ++ let write = write.map(|r| r as *mut _).unwrap_or(0 as *mut _); ++ let except = except.map(|r| r as *mut _).unwrap_or(0 as *mut _); ++ try!(cvt(curl_sys::curl_multi_fdset(self.raw, read, write, except, ++ &mut ret))); ++ if ret == -1 { ++ Ok(None) ++ } else { ++ Ok(Some(ret)) ++ } ++ } ++ } ++ ++ /// Attempt to close the multi handle and clean up all associated resources. ++ /// ++ /// Cleans up and removes a whole multi stack. It does not free or touch any ++ /// individual easy handles in any way - they still need to be closed ++ /// individually. ++ pub fn close(&self) -> Result<(), MultiError> { ++ unsafe { ++ cvt(curl_sys::curl_multi_cleanup(self.raw)) ++ } ++ } ++} ++ ++fn cvt(code: curl_sys::CURLMcode) -> Result<(), MultiError> { ++ if code == curl_sys::CURLM_OK { ++ Ok(()) ++ } else { ++ Err(MultiError::new(code)) ++ } ++} ++ ++impl fmt::Debug for Multi { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ f.debug_struct("Multi") ++ .field("raw", &self.raw) ++ .finish() ++ } ++} ++ ++impl Drop for Multi { ++ fn drop(&mut self) { ++ let _ = self.close(); ++ } ++} ++ ++impl EasyHandle { ++ /// Sets an internal private token for this `EasyHandle`. ++ /// ++ /// This function will set the `CURLOPT_PRIVATE` field on the underlying ++ /// easy handle. ++ pub fn set_token(&mut self, token: usize) -> Result<(), Error> { ++ unsafe { ++ ::cvt(curl_sys::curl_easy_setopt(self.easy.raw(), ++ curl_sys::CURLOPT_PRIVATE, ++ token)) ++ } ++ } ++} ++ ++impl fmt::Debug for EasyHandle { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ self.easy.fmt(f) ++ } ++} ++ ++impl Easy2Handle { ++ /// Same as `EasyHandle::set_token` ++ pub fn set_token(&mut self, token: usize) -> Result<(), Error> { ++ unsafe { ++ ::cvt(curl_sys::curl_easy_setopt(self.easy.raw(), ++ curl_sys::CURLOPT_PRIVATE, ++ token)) ++ } ++ } ++} ++ ++impl fmt::Debug for Easy2Handle { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ self.easy.fmt(f) ++ } ++} ++ ++impl<'multi> Message<'multi> { ++ /// If this message indicates that a transfer has finished, returns the ++ /// result of the transfer in `Some`. ++ /// ++ /// If the message doesn't indicate that a transfer has finished, then ++ /// `None` is returned. ++ pub fn result(&self) -> Option> { ++ unsafe { ++ if (*self.ptr).msg == curl_sys::CURLMSG_DONE { ++ Some(::cvt((*self.ptr).data as curl_sys::CURLcode)) ++ } else { ++ None ++ } ++ } ++ } ++ ++ /// Returns whether this easy message was for the specified easy handle or ++ /// not. ++ pub fn is_for(&self, handle: &EasyHandle) -> bool { ++ unsafe { (*self.ptr).easy_handle == handle.easy.raw() } ++ } ++ ++ /// Same as `is_for`, but for `Easy2Handle`. ++ pub fn is_for2(&self, handle: &Easy2Handle) -> bool { ++ unsafe { (*self.ptr).easy_handle == handle.easy.raw() } ++ } ++ ++ /// Returns the token associated with the easy handle that this message ++ /// represents a completion for. ++ /// ++ /// This function will return the token assigned with ++ /// `EasyHandle::set_token`. This reads the `CURLINFO_PRIVATE` field of the ++ /// underlying `*mut CURL`. ++ pub fn token(&self) -> Result { ++ unsafe { ++ let mut p = 0usize; ++ try!(::cvt(curl_sys::curl_easy_getinfo((*self.ptr).easy_handle, ++ curl_sys::CURLINFO_PRIVATE, ++ &mut p))); ++ Ok(p) ++ } ++ } ++} ++ ++impl<'a> fmt::Debug for Message<'a> { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ f.debug_struct("Message") ++ .field("ptr", &self.ptr) ++ .finish() ++ } ++} ++ ++impl Events { ++ /// Creates a new blank event bit mask. ++ pub fn new() -> Events { ++ Events { bits: 0 } ++ } ++ ++ /// Set or unset the whether these events indicate that input is ready. ++ pub fn input(&mut self, val: bool) -> &mut Events { ++ self.flag(curl_sys::CURL_CSELECT_IN, val) ++ } ++ ++ /// Set or unset the whether these events indicate that output is ready. ++ pub fn output(&mut self, val: bool) -> &mut Events { ++ self.flag(curl_sys::CURL_CSELECT_OUT, val) ++ } ++ ++ /// Set or unset the whether these events indicate that an error has ++ /// happened. ++ pub fn error(&mut self, val: bool) -> &mut Events { ++ self.flag(curl_sys::CURL_CSELECT_ERR, val) ++ } ++ ++ fn flag(&mut self, flag: c_int, val: bool) -> &mut Events { ++ if val { ++ self.bits |= flag; ++ } else { ++ self.bits &= !flag; ++ } ++ self ++ } ++} ++ ++impl fmt::Debug for Events { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ f.debug_struct("Events") ++ .field("input", &(self.bits & curl_sys::CURL_CSELECT_IN != 0)) ++ .field("output", &(self.bits & curl_sys::CURL_CSELECT_IN != 0)) ++ .field("error", &(self.bits & curl_sys::CURL_CSELECT_IN != 0)) ++ .finish() ++ } ++} ++ ++impl SocketEvents { ++ /// Wait for incoming data. For the socket to become readable. ++ pub fn input(&self) -> bool { ++ self.bits & curl_sys::CURL_POLL_IN == curl_sys::CURL_POLL_IN ++ } ++ ++ /// Wait for outgoing data. For the socket to become writable. ++ pub fn output(&self) -> bool { ++ self.bits & curl_sys::CURL_POLL_OUT == curl_sys::CURL_POLL_OUT ++ } ++ ++ /// Wait for incoming and outgoing data. For the socket to become readable ++ /// or writable. ++ pub fn input_and_output(&self) -> bool { ++ self.bits & curl_sys::CURL_POLL_INOUT == curl_sys::CURL_POLL_INOUT ++ } ++ ++ /// The specified socket/file descriptor is no longer used by libcurl. ++ pub fn remove(&self) -> bool { ++ self.bits & curl_sys::CURL_POLL_REMOVE == curl_sys::CURL_POLL_REMOVE ++ } ++} ++ ++impl fmt::Debug for SocketEvents { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ f.debug_struct("Events") ++ .field("input", &self.input()) ++ .field("output", &self.output()) ++ .field("remove", &self.remove()) ++ .finish() ++ } ++} ++ ++impl WaitFd { ++ /// Constructs an empty (invalid) WaitFd. ++ pub fn new() -> WaitFd { ++ WaitFd { ++ inner: curl_sys::curl_waitfd { ++ fd: 0, ++ events: 0, ++ revents: 0, ++ } ++ } ++ } ++ ++ /// Set the file descriptor to wait for. ++ pub fn set_fd(&mut self, fd: Socket) { ++ self.inner.fd = fd; ++ } ++ ++ /// Indicate that the socket should poll on read events such as new data ++ /// received. ++ /// ++ /// Corresponds to `CURL_WAIT_POLLIN`. ++ pub fn poll_on_read(&mut self, val: bool) -> &mut WaitFd { ++ self.flag(curl_sys::CURL_WAIT_POLLIN, val) ++ } ++ ++ /// Indicate that the socket should poll on high priority read events such ++ /// as out of band data. ++ /// ++ /// Corresponds to `CURL_WAIT_POLLPRI`. ++ pub fn poll_on_priority_read(&mut self, val: bool) -> &mut WaitFd { ++ self.flag(curl_sys::CURL_WAIT_POLLPRI, val) ++ } ++ ++ /// Indicate that the socket should poll on write events such as the socket ++ /// being clear to write without blocking. ++ /// ++ /// Corresponds to `CURL_WAIT_POLLOUT`. ++ pub fn poll_on_write(&mut self, val: bool) -> &mut WaitFd { ++ self.flag(curl_sys::CURL_WAIT_POLLOUT, val) ++ } ++ ++ fn flag(&mut self, flag: c_short, val: bool) -> &mut WaitFd { ++ if val { ++ self.inner.events |= flag; ++ } else { ++ self.inner.events &= !flag; ++ } ++ self ++ } ++ ++ /// After a call to `wait`, returns `true` if `poll_on_read` was set and a ++ /// read event occured. ++ pub fn received_read(&self) -> bool { ++ self.inner.revents & curl_sys::CURL_WAIT_POLLIN == curl_sys::CURL_WAIT_POLLIN ++ } ++ ++ /// After a call to `wait`, returns `true` if `poll_on_priority_read` was set and a ++ /// priority read event occured. ++ pub fn received_priority_read(&self) -> bool { ++ self.inner.revents & curl_sys::CURL_WAIT_POLLPRI == curl_sys::CURL_WAIT_POLLPRI ++ } ++ ++ /// After a call to `wait`, returns `true` if `poll_on_write` was set and a ++ /// write event occured. ++ pub fn received_write(&self) -> bool { ++ self.inner.revents & curl_sys::CURL_WAIT_POLLOUT == curl_sys::CURL_WAIT_POLLOUT ++ } ++} ++ ++#[cfg(unix)] ++impl From for WaitFd { ++ fn from(pfd: pollfd) -> WaitFd { ++ let mut events = 0; ++ if pfd.events & POLLIN == POLLIN { ++ events |= curl_sys::CURL_WAIT_POLLIN; ++ } ++ if pfd.events & POLLPRI == POLLPRI { ++ events |= curl_sys::CURL_WAIT_POLLPRI; ++ } ++ if pfd.events & POLLOUT == POLLOUT { ++ events |= curl_sys::CURL_WAIT_POLLOUT; ++ } ++ WaitFd { ++ inner: curl_sys::curl_waitfd { ++ fd: pfd.fd, ++ events: events, ++ revents: 0, ++ } ++ } ++ } ++} ++ ++impl fmt::Debug for WaitFd { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ f.debug_struct("WaitFd") ++ .field("fd", &self.inner.fd) ++ .field("events", &self.inner.fd) ++ .field("revents", &self.inner.fd) ++ .finish() ++ } ++} diff --cc vendor/curl-0.4.8/src/panic.rs index 000000000,000000000..1da217985 new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/src/panic.rs @@@ -1,0 -1,0 +1,30 @@@ ++use std::any::Any; ++use std::cell::RefCell; ++use std::panic::{self, AssertUnwindSafe}; ++ ++thread_local!(static LAST_ERROR: RefCell>> = { ++ RefCell::new(None) ++}); ++ ++pub fn catch T>(f: F) -> Option { ++ if LAST_ERROR.with(|slot| slot.borrow().is_some()) { ++ return None ++ } ++ ++ // Note that `AssertUnwindSafe` is used here as we prevent reentering ++ // arbitrary code due to the `LAST_ERROR` check above plus propagation of a ++ // panic after we return back to user code from C. ++ match panic::catch_unwind(AssertUnwindSafe(f)) { ++ Ok(ret) => Some(ret), ++ Err(e) => { ++ LAST_ERROR.with(|slot| *slot.borrow_mut() = Some(e)); ++ None ++ } ++ } ++} ++ ++pub fn propagate() { ++ if let Some(t) = LAST_ERROR.with(|slot| slot.borrow_mut().take()) { ++ panic::resume_unwind(t) ++ } ++} diff --cc vendor/curl-0.4.8/src/version.rs index 000000000,000000000..aaa734f6a new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/src/version.rs @@@ -1,0 -1,0 +1,303 @@@ ++use std::ffi::CStr; ++use std::fmt; ++use std::str; ++ ++use curl_sys; ++use libc::{c_int, c_char}; ++ ++/// Version information about libcurl and the capabilities that it supports. ++pub struct Version { ++ inner: *mut curl_sys::curl_version_info_data, ++} ++ ++unsafe impl Send for Version {} ++unsafe impl Sync for Version {} ++ ++/// An iterator over the list of protocols a version supports. ++#[derive(Clone)] ++pub struct Protocols<'a> { ++ cur: *const *const c_char, ++ _inner: &'a Version, ++} ++ ++impl Version { ++ /// Returns the libcurl version that this library is currently linked against. ++ pub fn num() -> &'static str { ++ unsafe { ++ let s = CStr::from_ptr(curl_sys::curl_version() as *const _); ++ str::from_utf8(s.to_bytes()).unwrap() ++ } ++ } ++ ++ /// Returns the libcurl version that this library is currently linked against. ++ pub fn get() -> Version { ++ unsafe { ++ let ptr = curl_sys::curl_version_info(curl_sys::CURLVERSION_FOURTH); ++ assert!(!ptr.is_null()); ++ Version { inner: ptr } ++ } ++ } ++ ++ /// Returns the human readable version string, ++ pub fn version(&self) -> &str { ++ unsafe { ++ ::opt_str((*self.inner).version).unwrap() ++ } ++ } ++ ++ /// Returns a numeric representation of the version number ++ /// ++ /// This is a 24 bit number made up of the major number, minor, and then ++ /// patch number. For example 7.9.8 will return 0x070908. ++ pub fn version_num(&self) -> u32 { ++ unsafe { ++ (*self.inner).version_num as u32 ++ } ++ } ++ ++ /// Returns a human readable string of the host libcurl is built for. ++ /// ++ /// This is discovered as part of the build environment. ++ pub fn host(&self) -> &str { ++ unsafe { ++ ::opt_str((*self.inner).host).unwrap() ++ } ++ } ++ ++ /// Returns whether libcurl supports IPv6 ++ pub fn feature_ipv6(&self) -> bool { ++ self.flag(curl_sys::CURL_VERSION_IPV6) ++ } ++ ++ /// Returns whether libcurl supports SSL ++ pub fn feature_ssl(&self) -> bool { ++ self.flag(curl_sys::CURL_VERSION_SSL) ++ } ++ ++ /// Returns whether libcurl supports HTTP deflate via libz ++ pub fn feature_libz(&self) -> bool { ++ self.flag(curl_sys::CURL_VERSION_LIBZ) ++ } ++ ++ /// Returns whether libcurl supports HTTP NTLM ++ pub fn feature_ntlm(&self) -> bool { ++ self.flag(curl_sys::CURL_VERSION_NTLM) ++ } ++ ++ /// Returns whether libcurl supports HTTP GSSNEGOTIATE ++ pub fn feature_gss_negotiate(&self) -> bool { ++ self.flag(curl_sys::CURL_VERSION_GSSNEGOTIATE) ++ } ++ ++ /// Returns whether libcurl was built with debug capabilities ++ pub fn feature_debug(&self) -> bool { ++ self.flag(curl_sys::CURL_VERSION_DEBUG) ++ } ++ ++ /// Returns whether libcurl was built with SPNEGO authentication ++ pub fn feature_spnego(&self) -> bool { ++ self.flag(curl_sys::CURL_VERSION_SPNEGO) ++ } ++ ++ /// Returns whether libcurl was built with large file support ++ pub fn feature_largefile(&self) -> bool { ++ self.flag(curl_sys::CURL_VERSION_LARGEFILE) ++ } ++ ++ /// Returns whether libcurl was built with support for IDNA, domain names ++ /// with international letters. ++ pub fn feature_idn(&self) -> bool { ++ self.flag(curl_sys::CURL_VERSION_IDN) ++ } ++ ++ /// Returns whether libcurl was built with support for SSPI. ++ pub fn feature_sspi(&self) -> bool { ++ self.flag(curl_sys::CURL_VERSION_SSPI) ++ } ++ ++ /// Returns whether libcurl was built with asynchronous name lookups. ++ pub fn feature_async_dns(&self) -> bool { ++ self.flag(curl_sys::CURL_VERSION_ASYNCHDNS) ++ } ++ ++ /// Returns whether libcurl was built with support for character ++ /// conversions. ++ pub fn feature_conv(&self) -> bool { ++ self.flag(curl_sys::CURL_VERSION_CONV) ++ } ++ ++ /// Returns whether libcurl was built with support for TLS-SRP. ++ pub fn feature_tlsauth_srp(&self) -> bool { ++ self.flag(curl_sys::CURL_VERSION_TLSAUTH_SRP) ++ } ++ ++ /// Returns whether libcurl was built with support for NTLM delegation to ++ /// winbind helper. ++ pub fn feature_ntlm_wb(&self) -> bool { ++ self.flag(curl_sys::CURL_VERSION_NTLM_WB) ++ } ++ ++ /// Returns whether libcurl was built with support for unix domain socket ++ pub fn feature_unix_domain_socket(&self) -> bool { ++ self.flag(curl_sys::CURL_VERSION_UNIX_SOCKETS) ++ } ++ ++ // /// Returns whether libcurl was built with support for HTTP2. ++ // pub fn feature_http2(&self) -> bool { ++ // self.flag(curl_sys::CURL_VERSION_HTTP2) ++ // } ++ fn flag(&self, flag: c_int) -> bool { ++ unsafe { ++ (*self.inner).features & flag != 0 ++ } ++ } ++ ++ /// Returns the version of OpenSSL that is used, or None if there is no SSL ++ /// support. ++ pub fn ssl_version(&self) -> Option<&str> { ++ unsafe { ++ ::opt_str((*self.inner).ssl_version) ++ } ++ } ++ ++ /// Returns the version of libz that is used, or None if there is no libz ++ /// support. ++ pub fn libz_version(&self) -> Option<&str> { ++ unsafe { ++ ::opt_str((*self.inner).libz_version) ++ } ++ } ++ ++ /// Returns an iterator over the list of protocols that this build of ++ /// libcurl supports. ++ pub fn protocols(&self) -> Protocols { ++ unsafe { ++ Protocols { _inner: self, cur: (*self.inner).protocols } ++ } ++ } ++ ++ /// If available, the human readable version of ares that libcurl is linked ++ /// against. ++ pub fn ares_version(&self) -> Option<&str> { ++ unsafe { ++ if (*self.inner).age >= 1 { ++ ::opt_str((*self.inner).ares) ++ } else { ++ None ++ } ++ } ++ } ++ ++ /// If available, the version of ares that libcurl is linked against. ++ pub fn ares_version_num(&self) -> Option { ++ unsafe { ++ if (*self.inner).age >= 1 { ++ Some((*self.inner).ares_num as u32) ++ } else { ++ None ++ } ++ } ++ } ++ ++ /// If available, the version of libidn that libcurl is linked against. ++ pub fn libidn_version(&self) -> Option<&str> { ++ unsafe { ++ if (*self.inner).age >= 2 { ++ ::opt_str((*self.inner).libidn) ++ } else { ++ None ++ } ++ } ++ } ++ ++ /// If available, the version of iconv libcurl is linked against. ++ pub fn iconv_version_num(&self) -> Option { ++ unsafe { ++ if (*self.inner).age >= 3 { ++ Some((*self.inner).iconv_ver_num as u32) ++ } else { ++ None ++ } ++ } ++ } ++ ++ /// If available, the version of iconv libcurl is linked against. ++ pub fn libssh_version(&self) -> Option<&str> { ++ unsafe { ++ if (*self.inner).age >= 3 { ++ ::opt_str((*self.inner).libssh_version) ++ } else { ++ None ++ } ++ } ++ } ++} ++ ++impl fmt::Debug for Version { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ let mut f = f.debug_struct("Version"); ++ f.field("version", &self.version()) ++ .field("host", &self.host()) ++ .field("feature_ipv6", &self.feature_ipv6()) ++ .field("feature_ssl", &self.feature_ssl()) ++ .field("feature_libz", &self.feature_libz()) ++ .field("feature_ntlm", &self.feature_ntlm()) ++ .field("feature_gss_negotiate", &self.feature_gss_negotiate()) ++ .field("feature_debug", &self.feature_debug()) ++ .field("feature_spnego", &self.feature_debug()) ++ .field("feature_largefile", &self.feature_debug()) ++ .field("feature_idn", &self.feature_debug()) ++ .field("feature_sspi", &self.feature_debug()) ++ .field("feature_async_dns", &self.feature_debug()) ++ .field("feature_conv", &self.feature_debug()) ++ .field("feature_tlsauth_srp", &self.feature_debug()) ++ .field("feature_ntlm_wb", &self.feature_debug()) ++ .field("feature_unix_domain_socket", &self.feature_debug()); ++ ++ if let Some(s) = self.ssl_version() { ++ f.field("ssl_version", &s); ++ } ++ if let Some(s) = self.libz_version() { ++ f.field("libz_version", &s); ++ } ++ if let Some(s) = self.ares_version() { ++ f.field("ares_version", &s); ++ } ++ if let Some(s) = self.libidn_version() { ++ f.field("libidn_version", &s); ++ } ++ if let Some(s) = self.iconv_version_num() { ++ f.field("iconv_version_num", &format!("{:x}", s)); ++ } ++ if let Some(s) = self.libssh_version() { ++ f.field("libssh_version", &s); ++ } ++ ++ f.field("protocols", &self.protocols().collect::>()); ++ ++ f.finish() ++ } ++} ++ ++impl<'a> Iterator for Protocols<'a> { ++ type Item = &'a str; ++ ++ fn next(&mut self) -> Option<&'a str> { ++ unsafe { ++ if (*self.cur).is_null() { ++ return None ++ } ++ let ret = ::opt_str(*self.cur).unwrap(); ++ self.cur = self.cur.offset(1); ++ Some(ret) ++ } ++ } ++} ++ ++impl<'a> fmt::Debug for Protocols<'a> { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ f.debug_list() ++ .entries(self.clone()) ++ .finish() ++ } ++} diff --cc vendor/curl-0.4.8/tests/easy.rs index 000000000,000000000..e98193ac7 new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/tests/easy.rs @@@ -1,0 -1,0 +1,675 @@@ ++extern crate curl; ++ ++use std::cell::{RefCell, Cell}; ++use std::io::Read; ++use std::rc::Rc; ++use std::str; ++use std::time::Duration; ++ ++macro_rules! t { ++ ($e:expr) => (match $e { ++ Ok(e) => e, ++ Err(e) => panic!("{} failed with {:?}", stringify!($e), e), ++ }) ++} ++ ++use curl::easy::{Easy, List, WriteError, ReadError, Transfer}; ++ ++use server::Server; ++mod server; ++ ++fn handle() -> Easy { ++ let mut e = Easy::new(); ++ t!(e.timeout(Duration::new(20, 0))); ++ return e ++} ++ ++fn sink(data: &[u8]) -> Result { ++ Ok(data.len()) ++} ++ ++#[test] ++fn get_smoke() { ++ let s = Server::new(); ++ s.receive("\ ++GET / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s.send("HTTP/1.1 200 OK\r\n\r\n"); ++ ++ let mut handle = handle(); ++ t!(handle.url(&s.url("/"))); ++ t!(handle.perform()); ++} ++ ++#[test] ++fn get_path() { ++ let s = Server::new(); ++ s.receive("\ ++GET /foo HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s.send("HTTP/1.1 200 OK\r\n\r\n"); ++ ++ let mut handle = handle(); ++ t!(handle.url(&s.url("/foo"))); ++ t!(handle.perform()); ++} ++ ++#[test] ++fn write_callback() { ++ let s = Server::new(); ++ s.receive("\ ++GET / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s.send("HTTP/1.1 200 OK\r\n\r\nhello!"); ++ ++ let mut all = Vec::::new(); ++ { ++ let mut handle = handle(); ++ t!(handle.url(&s.url("/"))); ++ let mut handle = handle.transfer(); ++ t!(handle.write_function(|data| { ++ all.extend(data); ++ Ok(data.len()) ++ })); ++ t!(handle.perform()); ++ } ++ assert_eq!(all, b"hello!"); ++} ++ ++#[test] ++fn progress() { ++ let s = Server::new(); ++ s.receive("\ ++GET /foo HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s.send("HTTP/1.1 200 OK\r\n\r\nHello!"); ++ ++ let mut hits = 0; ++ let mut dl = 0.0; ++ { ++ let mut handle = handle(); ++ t!(handle.url(&s.url("/foo"))); ++ t!(handle.progress(true)); ++ t!(handle.write_function(sink)); ++ ++ let mut handle = handle.transfer(); ++ t!(handle.progress_function(|_, a, _, _| { ++ hits += 1; ++ dl = a; ++ true ++ })); ++ t!(handle.perform()); ++ } ++ assert!(hits > 0); ++ assert_eq!(dl, 6.0); ++} ++ ++#[test] ++fn headers() { ++ let s = Server::new(); ++ s.receive("\ ++GET / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s.send("\ ++HTTP/1.1 200 OK\r\n\ ++Foo: bar\r\n\ ++Bar: baz\r\n\ ++\r\n ++Hello!"); ++ ++ let mut headers = Vec::new(); ++ { ++ let mut handle = handle(); ++ t!(handle.url(&s.url("/"))); ++ ++ let mut handle = handle.transfer(); ++ t!(handle.header_function(|h| { ++ headers.push(str::from_utf8(h).unwrap().to_string()); ++ true ++ })); ++ t!(handle.write_function(sink)); ++ t!(handle.perform()); ++ } ++ assert_eq!(headers, vec![ ++ "HTTP/1.1 200 OK\r\n".to_string(), ++ "Foo: bar\r\n".to_string(), ++ "Bar: baz\r\n".to_string(), ++ "\r\n".to_string(), ++ ]); ++} ++ ++#[test] ++fn fail_on_error() { ++ let s = Server::new(); ++ s.receive("\ ++GET / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s.send("\ ++HTTP/1.1 401 Not so good\r\n\ ++\r\n"); ++ ++ let mut h = handle(); ++ t!(h.url(&s.url("/"))); ++ t!(h.fail_on_error(true)); ++ assert!(h.perform().is_err()); ++ ++ let s = Server::new(); ++ s.receive("\ ++GET / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s.send("\ ++HTTP/1.1 401 Not so good\r\n\ ++\r\n"); ++ ++ let mut h = handle(); ++ t!(h.url(&s.url("/"))); ++ t!(h.fail_on_error(false)); ++ t!(h.perform()); ++} ++ ++#[test] ++fn port() { ++ let s = Server::new(); ++ s.receive("\ ++GET / HTTP/1.1\r\n\ ++Host: localhost:$PORT\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s.send("\ ++HTTP/1.1 200 OK\r\n\ ++\r\n"); ++ ++ let mut h = handle(); ++ t!(h.url("http://localhost/")); ++ t!(h.port(s.addr().port())); ++ t!(h.perform()); ++} ++ ++#[test] ++fn proxy() { ++ let s = Server::new(); ++ s.receive("\ ++GET http://example.com/ HTTP/1.1\r\n\ ++Host: example.com\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s.send("\ ++HTTP/1.1 200 OK\r\n\ ++\r\n"); ++ ++ let mut h = handle(); ++ t!(h.url("http://example.com/")); ++ t!(h.proxy(&s.url("/"))); ++ t!(h.perform()); ++} ++ ++#[test] ++#[ignore] // fails on newer curl versions? seems benign ++fn noproxy() { ++ let s = Server::new(); ++ s.receive("\ ++GET / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s.send("\ ++HTTP/1.1 200 OK\r\n\ ++\r\n"); ++ ++ let mut h = handle(); ++ t!(h.url(&s.url("/"))); ++ t!(h.proxy(&s.url("/"))); ++ t!(h.noproxy("127.0.0.1")); ++ t!(h.perform()); ++} ++ ++#[test] ++fn misc() { ++ let mut h = handle(); ++ t!(h.tcp_nodelay(true)); ++ // t!(h.tcp_keepalive(true)); ++ // t!(h.tcp_keepidle(Duration::new(3, 0))); ++ // t!(h.tcp_keepintvl(Duration::new(3, 0))); ++ t!(h.buffer_size(10)); ++ t!(h.dns_cache_timeout(Duration::new(1, 0))); ++} ++ ++#[test] ++fn userpass() { ++ let s = Server::new(); ++ s.receive("\ ++GET / HTTP/1.1\r\n\ ++Authorization: Basic YmFyOg==\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s.send("\ ++HTTP/1.1 200 OK\r\n\ ++\r\n"); ++ ++ let mut h = handle(); ++ t!(h.url(&s.url("/"))); ++ t!(h.username("foo")); ++ t!(h.username("bar")); ++ t!(h.perform()); ++} ++ ++#[test] ++fn accept_encoding() { ++ let s = Server::new(); ++ s.receive("\ ++GET / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++Accept-Encoding: gzip\r\n\ ++\r\n"); ++ s.send("\ ++HTTP/1.1 200 OK\r\n\ ++\r\n"); ++ ++ let mut h = handle(); ++ t!(h.url(&s.url("/"))); ++ t!(h.accept_encoding("gzip")); ++ t!(h.perform()); ++} ++ ++#[test] ++fn follow_location() { ++ let s1 = Server::new(); ++ let s2 = Server::new(); ++ s1.receive("\ ++GET / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s1.send(&format!("\ ++HTTP/1.1 301 Moved Permanently\r\n\ ++Location: http://{}/foo\r\n\ ++\r\n", s2.addr())); ++ ++ s2.receive("\ ++GET /foo HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s2.send("\ ++HTTP/1.1 200 OK\r\n\ ++\r\n"); ++ ++ let mut h = handle(); ++ t!(h.url(&s1.url("/"))); ++ t!(h.follow_location(true)); ++ t!(h.perform()); ++} ++ ++#[test] ++fn put() { ++ let s = Server::new(); ++ s.receive("\ ++PUT / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++Content-Length: 5\r\n\ ++\r\n\ ++data\n"); ++ s.send("\ ++HTTP/1.1 200 OK\r\n\ ++\r\n"); ++ ++ let mut data = "data\n".as_bytes(); ++ let mut list = List::new(); ++ t!(list.append("Expect:")); ++ let mut h = handle(); ++ t!(h.url(&s.url("/"))); ++ t!(h.put(true)); ++ t!(h.in_filesize(5)); ++ t!(h.upload(true)); ++ t!(h.http_headers(list)); ++ let mut h = h.transfer(); ++ t!(h.read_function(|buf| { ++ Ok(data.read(buf).unwrap()) ++ })); ++ t!(h.perform()); ++} ++ ++#[test] ++fn post1() { ++ let s = Server::new(); ++ s.receive("\ ++POST / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++Content-Length: 5\r\n\ ++Content-Type: application/x-www-form-urlencoded\r\n\ ++\r\n\ ++data\n"); ++ s.send("\ ++HTTP/1.1 200 OK\r\n\ ++\r\n"); ++ ++ let mut h = handle(); ++ t!(h.url(&s.url("/"))); ++ t!(h.post(true)); ++ t!(h.post_fields_copy(b"data\n")); ++ t!(h.perform()); ++} ++ ++#[test] ++fn post2() { ++ let s = Server::new(); ++ s.receive("\ ++POST / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++Content-Length: 5\r\n\ ++Content-Type: application/x-www-form-urlencoded\r\n\ ++\r\n\ ++data\n"); ++ s.send("\ ++HTTP/1.1 200 OK\r\n\ ++\r\n"); ++ ++ let mut h = handle(); ++ t!(h.url(&s.url("/"))); ++ t!(h.post(true)); ++ t!(h.post_fields_copy(b"data\n")); ++ t!(h.write_function(sink)); ++ t!(h.perform()); ++} ++ ++#[test] ++fn post3() { ++ let s = Server::new(); ++ s.receive("\ ++POST / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++Content-Length: 5\r\n\ ++Content-Type: application/x-www-form-urlencoded\r\n\ ++\r\n\ ++data\n"); ++ s.send("\ ++HTTP/1.1 200 OK\r\n\ ++\r\n"); ++ ++ let mut data = "data\n".as_bytes(); ++ let mut h = handle(); ++ t!(h.url(&s.url("/"))); ++ t!(h.post(true)); ++ t!(h.post_field_size(5)); ++ let mut h = h.transfer(); ++ t!(h.read_function(|buf| { ++ Ok(data.read(buf).unwrap()) ++ })); ++ t!(h.perform()); ++} ++ ++#[test] ++fn referer() { ++ let s = Server::new(); ++ s.receive("\ ++GET / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++Referer: foo\r\n\ ++\r\n"); ++ s.send("\ ++HTTP/1.1 200 OK\r\n\ ++\r\n"); ++ ++ let mut h = handle(); ++ t!(h.url(&s.url("/"))); ++ t!(h.referer("foo")); ++ t!(h.perform()); ++} ++ ++#[test] ++fn useragent() { ++ let s = Server::new(); ++ s.receive("\ ++GET / HTTP/1.1\r\n\ ++User-Agent: foo\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s.send("\ ++HTTP/1.1 200 OK\r\n\ ++\r\n"); ++ ++ let mut h = handle(); ++ t!(h.url(&s.url("/"))); ++ t!(h.useragent("foo")); ++ t!(h.perform()); ++} ++ ++#[test] ++fn custom_headers() { ++ let s = Server::new(); ++ s.receive("\ ++GET / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Foo: bar\r\n\ ++\r\n"); ++ s.send("\ ++HTTP/1.1 200 OK\r\n\ ++\r\n"); ++ ++ let mut custom = List::new(); ++ t!(custom.append("Foo: bar")); ++ t!(custom.append("Accept:")); ++ let mut h = handle(); ++ t!(h.url(&s.url("/"))); ++ t!(h.http_headers(custom)); ++ t!(h.perform()); ++} ++ ++#[test] ++fn cookie() { ++ let s = Server::new(); ++ s.receive("\ ++GET / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++Cookie: foo\r\n\ ++\r\n"); ++ s.send("\ ++HTTP/1.1 200 OK\r\n\ ++\r\n"); ++ ++ let mut h = handle(); ++ t!(h.url(&s.url("/"))); ++ t!(h.cookie("foo")); ++ t!(h.perform()); ++} ++ ++#[test] ++fn url_encoding() { ++ let mut h = handle(); ++ assert_eq!(h.url_encode(b"foo"), "foo"); ++ assert_eq!(h.url_encode(b"foo bar"), "foo%20bar"); ++ assert_eq!(h.url_encode(b"foo bar\xff"), "foo%20bar%FF"); ++ assert_eq!(h.url_encode(b""), ""); ++ assert_eq!(h.url_decode("foo"), b"foo"); ++ assert_eq!(h.url_decode("foo%20bar"), b"foo bar"); ++ assert_eq!(h.url_decode("foo%2"), b"foo%2"); ++ assert_eq!(h.url_decode("foo%xx"), b"foo%xx"); ++ assert_eq!(h.url_decode("foo%ff"), b"foo\xff"); ++ assert_eq!(h.url_decode(""), b""); ++} ++ ++#[test] ++fn getters() { ++ let s = Server::new(); ++ s.receive("\ ++GET / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s.send("\ ++HTTP/1.1 200 OK\r\n\ ++\r\n"); ++ ++ let mut h = handle(); ++ t!(h.url(&s.url("/"))); ++ t!(h.cookie_file("/dev/null")); ++ t!(h.perform()); ++ assert_eq!(t!(h.response_code()), 200); ++ assert_eq!(t!(h.redirect_count()), 0); ++ assert_eq!(t!(h.redirect_url()), None); ++ assert_eq!(t!(h.content_type()), None); ++ ++ let addr = format!("http://{}/", s.addr()); ++ assert_eq!(t!(h.effective_url()), Some(&addr[..])); ++ ++ // TODO: test this ++ // let cookies = t!(h.cookies()).iter() ++ // .map(|s| s.to_vec()) ++ // .collect::>(); ++ // assert_eq!(cookies.len(), 1); ++} ++ ++#[test] ++#[should_panic] ++fn panic_in_callback() { ++ let s = Server::new(); ++ s.receive("\ ++GET / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s.send("\ ++HTTP/1.1 200 OK\r\n\ ++\r\n"); ++ ++ let mut h = handle(); ++ t!(h.url(&s.url("/"))); ++ t!(h.header_function(|_| panic!())); ++ t!(h.perform()); ++} ++ ++#[test] ++fn abort_read() { ++ let s = Server::new(); ++ s.receive("\ ++PUT / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++Content-Length: 2\r\n\ ++\r\n"); ++ s.send("\ ++HTTP/1.1 200 OK\r\n\ ++\r\n"); ++ ++ let mut h = handle(); ++ t!(h.url(&s.url("/"))); ++ t!(h.read_function(|_| Err(ReadError::Abort))); ++ t!(h.put(true)); ++ t!(h.in_filesize(2)); ++ let mut list = List::new(); ++ t!(list.append("Expect:")); ++ t!(h.http_headers(list)); ++ let err = h.perform().unwrap_err(); ++ assert!(err.is_aborted_by_callback()); ++} ++ ++#[test] ++fn pause_write_then_resume() { ++ let s = Server::new(); ++ s.receive("\ ++GET / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s.send("\ ++HTTP/1.1 200 OK\r\n\ ++\r\n ++a\n ++b"); ++ ++ let mut h = handle(); ++ t!(h.url(&s.url("/"))); ++ t!(h.progress(true)); ++ ++ struct State<'a, 'b> { ++ paused: Cell, ++ unpaused: Cell, ++ transfer: RefCell>, ++ } ++ ++ let h = Rc::new(State { ++ paused: Cell::new(false), ++ unpaused: Cell::new(false), ++ transfer: RefCell::new(h.transfer()), ++ }); ++ ++ let h2 = h.clone(); ++ t!(h.transfer.borrow_mut().write_function(move |data| { ++ if h2.unpaused.get() { ++ h2.unpaused.set(false); ++ Ok(data.len()) ++ } else { ++ h2.paused.set(true); ++ Err(WriteError::Pause) ++ } ++ })); ++ let h2 = h.clone(); ++ t!(h.transfer.borrow_mut().progress_function(move |_, _, _, _| { ++ if h2.paused.get() { ++ h2.paused.set(false); ++ h2.unpaused.set(true); ++ t!(h2.transfer.borrow().unpause_write()); ++ } ++ true ++ })); ++ t!(h.transfer.borrow().perform()); ++} ++ ++#[test] ++fn perform_in_perform_is_bad() { ++ let s = Server::new(); ++ s.receive("\ ++GET / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s.send("\ ++HTTP/1.1 200 OK\r\n\ ++\r\n ++a\n ++b"); ++ ++ let mut h = handle(); ++ t!(h.url(&s.url("/"))); ++ t!(h.progress(true)); ++ ++ let h = Rc::new(RefCell::new(h.transfer())); ++ ++ let h2 = h.clone(); ++ t!(h.borrow_mut().write_function(move |data| { ++ assert!(h2.borrow().perform().is_err()); ++ Ok(data.len()) ++ })); ++ t!(h.borrow().perform()); ++} ++ ++// Stupid test to check if unix_socket is callable ++#[test] ++fn check_unix_socket() { ++ let mut h = handle(); ++ h.unix_socket("/var/something.socks").is_ok(); ++} ++ diff --cc vendor/curl-0.4.8/tests/formdata index 000000000,000000000..ce0136250 new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/tests/formdata @@@ -1,0 -1,0 +1,1 @@@ ++hello diff --cc vendor/curl-0.4.8/tests/multi.rs index 000000000,000000000..4238b73b4 new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/tests/multi.rs @@@ -1,0 -1,0 +1,252 @@@ ++#![cfg(unix)] ++ ++extern crate curl; ++extern crate mio; ++ ++use std::collections::HashMap; ++use std::io::{Read, Cursor}; ++use std::time::Duration; ++ ++use curl::easy::{Easy, List}; ++use curl::multi::Multi; ++ ++macro_rules! t { ++ ($e:expr) => (match $e { ++ Ok(e) => e, ++ Err(e) => panic!("{} failed with {:?}", stringify!($e), e), ++ }) ++} ++ ++use server::Server; ++mod server; ++ ++#[test] ++fn smoke() { ++ let m = Multi::new(); ++ let mut e = Easy::new(); ++ ++ let s = Server::new(); ++ s.receive("\ ++GET / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s.send("HTTP/1.1 200 OK\r\n\r\n"); ++ ++ t!(e.url(&s.url("/"))); ++ let _e = t!(m.add(e)); ++ while t!(m.perform()) > 0 { ++ t!(m.wait(&mut [], Duration::from_secs(1))); ++ } ++} ++ ++#[test] ++fn smoke2() { ++ let m = Multi::new(); ++ ++ let s1 = Server::new(); ++ s1.receive("\ ++GET / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s1.send("HTTP/1.1 200 OK\r\n\r\n"); ++ ++ let s2 = Server::new(); ++ s2.receive("\ ++GET / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++\r\n"); ++ s2.send("HTTP/1.1 200 OK\r\n\r\n"); ++ ++ let mut e1 = Easy::new(); ++ t!(e1.url(&s1.url("/"))); ++ let _e1 = t!(m.add(e1)); ++ let mut e2 = Easy::new(); ++ t!(e2.url(&s2.url("/"))); ++ let _e2 = t!(m.add(e2)); ++ ++ while t!(m.perform()) > 0 { ++ t!(m.wait(&mut [], Duration::from_secs(1))); ++ } ++ ++ let mut done = 0; ++ m.messages(|msg| { ++ msg.result().unwrap().unwrap(); ++ done += 1; ++ }); ++ assert_eq!(done, 2); ++} ++ ++#[test] ++fn upload_lots() { ++ use curl::multi::{Socket, SocketEvents, Events}; ++ ++ #[derive(Debug)] ++ enum Message { ++ Timeout(Option), ++ Wait(Socket, SocketEvents, usize), ++ } ++ ++ let mut m = Multi::new(); ++ let poll = t!(mio::Poll::new()); ++ let (tx, rx) = mio::channel::channel(); ++ let tx2 = tx.clone(); ++ t!(m.socket_function(move |socket, events, token| { ++ t!(tx2.send(Message::Wait(socket, events, token))); ++ })); ++ t!(m.timer_function(move |dur| { ++ t!(tx.send(Message::Timeout(dur))); ++ true ++ })); ++ ++ let s = Server::new(); ++ s.receive(&format!("\ ++PUT / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++Content-Length: 131072\r\n\ ++\r\n\ ++{}\n", vec!["a"; 128 * 1024 - 1].join(""))); ++ s.send("\ ++HTTP/1.1 200 OK\r\n\ ++\r\n"); ++ ++ let mut data = vec![b'a'; 128 * 1024 - 1]; ++ data.push(b'\n'); ++ let mut data = Cursor::new(data); ++ let mut list = List::new(); ++ t!(list.append("Expect:")); ++ let mut h = Easy::new(); ++ t!(h.url(&s.url("/"))); ++ t!(h.put(true)); ++ t!(h.read_function(move |buf| { ++ Ok(data.read(buf).unwrap()) ++ })); ++ t!(h.in_filesize(128 * 1024)); ++ t!(h.upload(true)); ++ t!(h.http_headers(list)); ++ ++ t!(poll.register(&rx, ++ mio::Token(0), ++ mio::Ready::all(), ++ mio::PollOpt::level())); ++ ++ let e = t!(m.add(h)); ++ ++ assert!(t!(m.perform()) > 0); ++ let mut next_token = 1; ++ let mut token_map = HashMap::new(); ++ let mut cur_timeout = None; ++ let mut events = mio::Events::with_capacity(128); ++ let mut running = true; ++ ++ while running { ++ let n = t!(poll.poll(&mut events, cur_timeout)); ++ ++ if n == 0 { ++ if t!(m.timeout()) == 0 { ++ running = false; ++ } ++ } ++ ++ for event in events.iter() { ++ while event.token() == mio::Token(0) { ++ match rx.try_recv() { ++ Ok(Message::Timeout(dur)) => cur_timeout = dur, ++ Ok(Message::Wait(socket, events, token)) => { ++ let evented = mio::unix::EventedFd(&socket); ++ if events.remove() { ++ token_map.remove(&token).unwrap(); ++ } else { ++ let mut e = mio::Ready::none(); ++ if events.input() { ++ e = e | mio::Ready::readable(); ++ } ++ if events.output() { ++ e = e | mio::Ready::writable(); ++ } ++ if token == 0 { ++ let token = next_token; ++ next_token += 1; ++ t!(m.assign(socket, token)); ++ token_map.insert(token, socket); ++ t!(poll.register(&evented, ++ mio::Token(token), ++ e, ++ mio::PollOpt::level())); ++ } else { ++ t!(poll.reregister(&evented, ++ mio::Token(token), ++ e, ++ mio::PollOpt::level())); ++ } ++ } ++ } ++ Err(_) => break, ++ } ++ } ++ ++ if event.token() == mio::Token(0) { ++ continue ++ } ++ ++ let token = event.token(); ++ let socket = token_map[&token.into()]; ++ let mut e = Events::new(); ++ if event.kind().is_readable() { ++ e.input(true); ++ } ++ if event.kind().is_writable() { ++ e.output(true); ++ } ++ if event.kind().is_error() { ++ e.error(true); ++ } ++ let remaining = t!(m.action(socket, &e)); ++ if remaining == 0 { ++ running = false; ++ } ++ } ++ } ++ ++ let mut done = 0; ++ m.messages(|m| { ++ m.result().unwrap().unwrap(); ++ done += 1; ++ }); ++ assert_eq!(done, 1); ++ ++ let mut e = t!(m.remove(e)); ++ assert_eq!(t!(e.response_code()), 200); ++} ++ ++// Tests passing raw file descriptors to Multi::wait. The test is limited to Linux only as the ++// semantics of the underlying poll(2) system call used by curl apparently differ on other ++// platforms, making the test fail. ++#[cfg(target_os = "linux")] ++#[test] ++fn waitfds() { ++ use std::fs::File; ++ use std::os::unix::io::AsRawFd; ++ use curl::multi::WaitFd; ++ ++ let filenames = ["/dev/null", "/dev/zero", "/dev/urandom"]; ++ let files: Vec = filenames.iter() ++ .map(|filename| File::open(filename).unwrap()) ++ .collect(); ++ let mut waitfds: Vec = files.iter().map(|f| { ++ let mut waitfd = WaitFd::new(); ++ waitfd.set_fd(f.as_raw_fd()); ++ waitfd.poll_on_read(true); ++ waitfd ++ }).collect(); ++ ++ let m = Multi::new(); ++ let events = t!(m.wait(&mut waitfds, Duration::from_secs(1))); ++ assert_eq!(events, 3); ++ for waitfd in waitfds { ++ assert!(waitfd.received_read()); ++ } ++} diff --cc vendor/curl-0.4.8/tests/post.rs index 000000000,000000000..8b3413b24 new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/tests/post.rs @@@ -1,0 -1,0 +1,108 @@@ ++extern crate curl; ++ ++use std::str; ++use std::time::Duration; ++ ++macro_rules! t { ++ ($e:expr) => (match $e { ++ Ok(e) => e, ++ Err(e) => panic!("{} failed with {:?}", stringify!($e), e), ++ }) ++} ++ ++use curl::easy::{Easy, Form}; ++ ++use server::Server; ++mod server; ++ ++fn handle() -> Easy { ++ let mut e = Easy::new(); ++ t!(e.timeout(Duration::new(20, 0))); ++ return e ++} ++ ++#[test] ++fn custom() { ++ let s = Server::new(); ++ s.receive("\ ++POST / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++Content-Length: 142\r\n\ ++Expect: 100-continue\r\n\ ++Content-Type: multipart/form-data; boundary=--[..]\r\n\ ++\r\n\ ++--[..]\r\n\ ++Content-Disposition: form-data; name=\"foo\"\r\n\ ++\r\n\ ++1234\r\n\ ++--[..]\r\n"); ++ s.send("HTTP/1.1 200 OK\r\n\r\n"); ++ ++ let mut handle = handle(); ++ let mut form = Form::new(); ++ t!(form.part("foo").contents(b"1234").add()); ++ t!(handle.url(&s.url("/"))); ++ t!(handle.httppost(form)); ++ t!(handle.perform()); ++} ++ ++#[test] ++fn buffer() { ++ let s = Server::new(); ++ s.receive("\ ++POST / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++Content-Length: 181\r\n\ ++Expect: 100-continue\r\n\ ++Content-Type: multipart/form-data; boundary=--[..]\r\n\ ++\r\n\ ++--[..]\r\n\ ++Content-Disposition: form-data; name=\"foo\"; filename=\"bar\"\r\n\ ++Content-Type: foo/bar\r\n\ ++\r\n\ ++1234\r\n\ ++--[..]\r\n"); ++ s.send("HTTP/1.1 200 OK\r\n\r\n"); ++ ++ let mut handle = handle(); ++ let mut form = Form::new(); ++ t!(form.part("foo") ++ .buffer("bar", b"1234".to_vec()) ++ .content_type("foo/bar") ++ .add()); ++ t!(handle.url(&s.url("/"))); ++ t!(handle.httppost(form)); ++ t!(handle.perform()); ++} ++ ++#[test] ++fn file() { ++ let s = Server::new(); ++ s.receive("\ ++POST / HTTP/1.1\r\n\ ++Host: 127.0.0.1:$PORT\r\n\ ++Accept: */*\r\n\ ++Content-Length: 205\r\n\ ++Expect: 100-continue\r\n\ ++Content-Type: multipart/form-data; boundary=--[..]\r\n\ ++\r\n\ ++--[..]\r\n\ ++Content-Disposition: form-data; name=\"foo\"; filename=\"formdata\"\r\n\ ++Content-Type: application/octet-stream\r\n\ ++\r\n\ ++hello\n\ ++\r\n\ ++--[..]\r\n"); ++ s.send("HTTP/1.1 200 OK\r\n\r\n"); ++ ++ let mut handle = handle(); ++ let mut form = Form::new(); ++ t!(form.part("foo") ++ .file("tests/formdata") ++ .add()); ++ t!(handle.url(&s.url("/"))); ++ t!(handle.httppost(form)); ++ t!(handle.perform()); ++} diff --cc vendor/curl-0.4.8/tests/server/mod.rs index 000000000,000000000..445cf901e new file mode 100644 --- /dev/null +++ b/vendor/curl-0.4.8/tests/server/mod.rs @@@ -1,0 -1,0 +1,175 @@@ ++#![allow(dead_code)] ++ ++use std::collections::HashSet; ++use std::net::{TcpListener, SocketAddr, TcpStream}; ++use std::io::prelude::*; ++use std::thread; ++use std::sync::mpsc::{Sender, Receiver, channel}; ++use std::io::BufReader; ++ ++pub struct Server { ++ messages: Option>, ++ addr: SocketAddr, ++ thread: Option>, ++} ++ ++enum Message { ++ Read(String), ++ Write(String), ++} ++ ++fn run(listener: &TcpListener, rx: &Receiver) { ++ let mut socket = BufReader::new(listener.accept().unwrap().0); ++ for msg in rx.iter() { ++ match msg { ++ Message::Read(ref expected) => { ++ let mut expected = &expected[..]; ++ let mut expected_headers = HashSet::new(); ++ while let Some(i) = expected.find("\n") { ++ let line = &expected[..i + 1]; ++ expected = &expected[i + 1..]; ++ expected_headers.insert(line); ++ if line == "\r\n" { ++ break ++ } ++ } ++ ++ let mut expected_len = None; ++ while expected_headers.len() > 0 { ++ let mut actual = String::new(); ++ t!(socket.read_line(&mut actual)); ++ if actual.starts_with("Content-Length") { ++ let len = actual.split(": ").skip(1).next().unwrap(); ++ expected_len = len.trim().parse().ok(); ++ } ++ // various versions of libcurl do different things here ++ if actual == "Proxy-Connection: Keep-Alive\r\n" { ++ continue ++ } ++ if expected_headers.remove(&actual[..]) { ++ continue ++ } ++ ++ let mut found = None; ++ for header in expected_headers.iter() { ++ if lines_match(header, &actual) { ++ found = Some(header.clone()); ++ break ++ } ++ } ++ if let Some(found) = found { ++ expected_headers.remove(&found); ++ continue ++ } ++ panic!("unexpected header: {:?} (remaining headers {:?})", ++ actual, expected_headers); ++ } ++ for header in expected_headers { ++ panic!("expected header but not found: {:?}", header); ++ } ++ ++ let mut line = String::new(); ++ let mut socket = match expected_len { ++ Some(amt) => socket.by_ref().take(amt), ++ None => socket.by_ref().take(expected.len() as u64), ++ }; ++ while socket.limit() > 0 { ++ line.truncate(0); ++ t!(socket.read_line(&mut line)); ++ if line.len() == 0 { ++ break ++ } ++ if expected.len() == 0 { ++ panic!("unexpected line: {:?}", line); ++ } ++ let i = expected.find("\n").unwrap_or(expected.len() - 1); ++ let expected_line = &expected[..i + 1]; ++ expected = &expected[i + 1..]; ++ if lines_match(expected_line, &line) { ++ continue ++ } ++ panic!("lines didn't match:\n\ ++ expected: {:?}\n\ ++ actual: {:?}\n", expected_line, line) ++ } ++ if expected.len() != 0 { ++ println!("didn't get expected data: {:?}", expected); ++ } ++ } ++ Message::Write(ref to_write) => { ++ t!(socket.get_mut().write_all(to_write.as_bytes())); ++ return ++ } ++ } ++ } ++ ++ let mut dst = Vec::new(); ++ t!(socket.read_to_end(&mut dst)); ++ assert!(dst.len() == 0); ++} ++ ++fn lines_match(expected: &str, mut actual: &str) -> bool { ++ for (i, part) in expected.split("[..]").enumerate() { ++ match actual.find(part) { ++ Some(j) => { ++ if i == 0 && j != 0 { ++ return false ++ } ++ actual = &actual[j + part.len()..]; ++ } ++ None => { ++ return false ++ } ++ } ++ } ++ actual.is_empty() || expected.ends_with("[..]") ++} ++ ++impl Server { ++ pub fn new() -> Server { ++ let listener = t!(TcpListener::bind("127.0.0.1:0")); ++ let addr = t!(listener.local_addr()); ++ let (tx, rx) = channel(); ++ let thread = thread::spawn(move || run(&listener, &rx)); ++ Server { ++ messages: Some(tx), ++ addr: addr, ++ thread: Some(thread), ++ } ++ } ++ ++ pub fn receive(&self, msg: &str) { ++ let msg = msg.replace("$PORT", &self.addr.port().to_string()); ++ self.msg(Message::Read(msg)); ++ } ++ ++ pub fn send(&self, msg: &str) { ++ let msg = msg.replace("$PORT", &self.addr.port().to_string()); ++ self.msg(Message::Write(msg)); ++ } ++ ++ fn msg(&self, msg: Message) { ++ t!(self.messages.as_ref().unwrap().send(msg)); ++ } ++ ++ pub fn addr(&self) -> &SocketAddr { ++ &self.addr ++ } ++ ++ pub fn url(&self, path: &str) -> String { ++ format!("http://{}{}", self.addr, path) ++ } ++} ++ ++impl Drop for Server { ++ fn drop(&mut self) { ++ drop(TcpStream::connect(&self.addr)); ++ drop(self.messages.take()); ++ let res = self.thread.take().unwrap().join(); ++ if !thread::panicking() { ++ t!(res); ++ } else if let Err(e) = res { ++ println!("child server thread also failed: {:?}", e); ++ } ++ } ++} diff --cc vendor/custom_derive-0.1.7/.cargo-checksum.json index 000000000,000000000..34b8aaf67 new file mode 100644 --- /dev/null +++ b/vendor/custom_derive-0.1.7/.cargo-checksum.json @@@ -1,0 -1,0 +1,1 @@@ ++{"files":{},"package":"ef8ae57c4978a2acd8b869ce6b9ca1dfe817bff704c220209fdef2c0b75a01b9"} diff --cc vendor/custom_derive-0.1.7/.cargo-ok index 000000000,000000000..e69de29bb new file mode 100644 --- /dev/null +++ b/vendor/custom_derive-0.1.7/.cargo-ok diff --cc vendor/custom_derive-0.1.7/Cargo.toml index 000000000,000000000..610fdd9b2 new file mode 100644 --- /dev/null +++ b/vendor/custom_derive-0.1.7/Cargo.toml @@@ -1,0 -1,0 +1,33 @@@ ++[package] ++name = "custom_derive" ++version = "0.1.7" ++authors = ["Daniel Keep "] ++ ++description = "(Note: superseded by `macro-attr`) This crate provides a macro that enables the use of custom derive attributes." ++repository = "https://github.com/DanielKeep/rust-custom-derive/tree/custom_derive-master" ++documentation = "https://docs.rs/crate/custom_derive/" ++license = "MIT/Apache-2.0" ++keywords = ["custom", "derive", "macro"] ++ ++exclude = [ ++ ".cargo/*", ++ "local/*", ++ "doc-pkg/*", ++ "scripts/*", ++ ".gitmodules", ++ ".travis.yml", ++ "update-docs.py", ++] ++ ++[workspace] ++members = [ ++ "enum_derive", ++ "newtype_derive", ++] ++ ++[features] ++default = ["std"] ++std = [] ++ ++[dev-dependencies] ++rustc-serialize = "0.3.15" diff --cc vendor/custom_derive-0.1.7/LICENSE index 000000000,000000000..6843892c9 new file mode 100644 --- /dev/null +++ b/vendor/custom_derive-0.1.7/LICENSE @@@ -1,0 -1,0 +1,237 @@@ ++Copyright ⓒ 2015, 2016 rust-custom-derive contributors. ++ ++Licensed under either of: ++ ++* MIT license, or ++* Apache License, Version 2.0 ++ ++at your option. ++ ++Unless you explicitly state otherwise, any contribution intentionally ++submitted for inclusion in the work by you shall be dual licensed as ++above, without any additional terms or conditions. ++ ++# MIT License ++ ++Permission is hereby granted, free of charge, to any person obtaining ++a copy of this software and associated documentation files (the ++"Software"), to deal in the Software without restriction, including ++without limitation the rights to use, copy, modify, merge, publish, ++distribute, sublicense, and/or sell copies of the Software, and to ++permit persons to whom the Software is furnished to do so, subject ++to the following conditions: ++ ++The above copyright notice and this permission notice shall be included ++in all copies or substantial portions of the Software. ++ ++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS ++OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, ++FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL ++THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR ++OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ++ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR ++OTHER DEALINGS IN THE SOFTWARE. ++ ++# Apache License, Version 2.0 ++ ++ Apache License ++ Version 2.0, January 2004 ++ http://www.apache.org/licenses/ ++ ++TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION ++ ++1. Definitions. ++ ++ "License" shall mean the terms and conditions for use, reproduction, ++ and distribution as defined by Sections 1 through 9 of this document. ++ ++ "Licensor" shall mean the copyright owner or entity authorized by ++ the copyright owner that is granting the License. ++ ++ "Legal Entity" shall mean the union of the acting entity and all ++ other entities that control, are controlled by, or are under common ++ control with that entity. For the purposes of this definition, ++ "control" means (i) the power, direct or indirect, to cause the ++ direction or management of such entity, whether by contract or ++ otherwise, or (ii) ownership of fifty percent (50%) or more of the ++ outstanding shares, or (iii) beneficial ownership of such entity. ++ ++ "You" (or "Your") shall mean an individual or Legal Entity ++ exercising permissions granted by this License. ++ ++ "Source" form shall mean the preferred form for making modifications, ++ including but not limited to software source code, documentation ++ source, and configuration files. ++ ++ "Object" form shall mean any form resulting from mechanical ++ transformation or translation of a Source form, including but ++ not limited to compiled object code, generated documentation, ++ and conversions to other media types. ++ ++ "Work" shall mean the work of authorship, whether in Source or ++ Object form, made available under the License, as indicated by a ++ copyright notice that is included in or attached to the work ++ (an example is provided in the Appendix below). ++ ++ "Derivative Works" shall mean any work, whether in Source or Object ++ form, that is based on (or derived from) the Work and for which the ++ editorial revisions, annotations, elaborations, or other modifications ++ represent, as a whole, an original work of authorship. For the purposes ++ of this License, Derivative Works shall not include works that remain ++ separable from, or merely link (or bind by name) to the interfaces of, ++ the Work and Derivative Works thereof. ++ ++ "Contribution" shall mean any work of authorship, including ++ the original version of the Work and any modifications or additions ++ to that Work or Derivative Works thereof, that is intentionally ++ submitted to Licensor for inclusion in the Work by the copyright owner ++ or by an individual or Legal Entity authorized to submit on behalf of ++ the copyright owner. For the purposes of this definition, "submitted" ++ means any form of electronic, verbal, or written communication sent ++ to the Licensor or its representatives, including but not limited to ++ communication on electronic mailing lists, source code control systems, ++ and issue tracking systems that are managed by, or on behalf of, the ++ Licensor for the purpose of discussing and improving the Work, but ++ excluding communication that is conspicuously marked or otherwise ++ designated in writing by the copyright owner as "Not a Contribution." ++ ++ "Contributor" shall mean Licensor and any individual or Legal Entity ++ on behalf of whom a Contribution has been received by Licensor and ++ subsequently incorporated within the Work. ++ ++2. Grant of Copyright License. Subject to the terms and conditions of ++ this License, each Contributor hereby grants to You a perpetual, ++ worldwide, non-exclusive, no-charge, royalty-free, irrevocable ++ copyright license to reproduce, prepare Derivative Works of, ++ publicly display, publicly perform, sublicense, and distribute the ++ Work and such Derivative Works in Source or Object form. ++ ++3. Grant of Patent License. Subject to the terms and conditions of ++ this License, each Contributor hereby grants to You a perpetual, ++ worldwide, non-exclusive, no-charge, royalty-free, irrevocable ++ (except as stated in this section) patent license to make, have made, ++ use, offer to sell, sell, import, and otherwise transfer the Work, ++ where such license applies only to those patent claims licensable ++ by such Contributor that are necessarily infringed by their ++ Contribution(s) alone or by combination of their Contribution(s) ++ with the Work to which such Contribution(s) was submitted. If You ++ institute patent litigation against any entity (including a ++ cross-claim or counterclaim in a lawsuit) alleging that the Work ++ or a Contribution incorporated within the Work constitutes direct ++ or contributory patent infringement, then any patent licenses ++ granted to You under this License for that Work shall terminate ++ as of the date such litigation is filed. ++ ++4. Redistribution. You may reproduce and distribute copies of the ++ Work or Derivative Works thereof in any medium, with or without ++ modifications, and in Source or Object form, provided that You ++ meet the following conditions: ++ ++ (a) You must give any other recipients of the Work or ++ Derivative Works a copy of this License; and ++ ++ (b) You must cause any modified files to carry prominent notices ++ stating that You changed the files; and ++ ++ (c) You must retain, in the Source form of any Derivative Works ++ that You distribute, all copyright, patent, trademark, and ++ attribution notices from the Source form of the Work, ++ excluding those notices that do not pertain to any part of ++ the Derivative Works; and ++ ++ (d) If the Work includes a "NOTICE" text file as part of its ++ distribution, then any Derivative Works that You distribute must ++ include a readable copy of the attribution notices contained ++ within such NOTICE file, excluding those notices that do not ++ pertain to any part of the Derivative Works, in at least one ++ of the following places: within a NOTICE text file distributed ++ as part of the Derivative Works; within the Source form or ++ documentation, if provided along with the Derivative Works; or, ++ within a display generated by the Derivative Works, if and ++ wherever such third-party notices normally appear. The contents ++ of the NOTICE file are for informational purposes only and ++ do not modify the License. You may add Your own attribution ++ notices within Derivative Works that You distribute, alongside ++ or as an addendum to the NOTICE text from the Work, provided ++ that such additional attribution notices cannot be construed ++ as modifying the License. ++ ++ You may add Your own copyright statement to Your modifications and ++ may provide additional or different license terms and conditions ++ for use, reproduction, or distribution of Your modifications, or ++ for any such Derivative Works as a whole, provided Your use, ++ reproduction, and distribution of the Work otherwise complies with ++ the conditions stated in this License. ++ ++5. Submission of Contributions. Unless You explicitly state otherwise, ++ any Contribution intentionally submitted for inclusion in the Work ++ by You to the Licensor shall be under the terms and conditions of ++ this License, without any additional terms or conditions. ++ Notwithstanding the above, nothing herein shall supersede or modify ++ the terms of any separate license agreement you may have executed ++ with Licensor regarding such Contributions. ++ ++6. Trademarks. This License does not grant permission to use the trade ++ names, trademarks, service marks, or product names of the Licensor, ++ except as required for reasonable and customary use in describing the ++ origin of the Work and reproducing the content of the NOTICE file. ++ ++7. Disclaimer of Warranty. Unless required by applicable law or ++ agreed to in writing, Licensor provides the Work (and each ++ Contributor provides its Contributions) on an "AS IS" BASIS, ++ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or ++ implied, including, without limitation, any warranties or conditions ++ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A ++ PARTICULAR PURPOSE. You are solely responsible for determining the ++ appropriateness of using or redistributing the Work and assume any ++ risks associated with Your exercise of permissions under this License. ++ ++8. Limitation of Liability. In no event and under no legal theory, ++ whether in tort (including negligence), contract, or otherwise, ++ unless required by applicable law (such as deliberate and grossly ++ negligent acts) or agreed to in writing, shall any Contributor be ++ liable to You for damages, including any direct, indirect, special, ++ incidental, or consequential damages of any character arising as a ++ result of this License or out of the use or inability to use the ++ Work (including but not limited to damages for loss of goodwill, ++ work stoppage, computer failure or malfunction, or any and all ++ other commercial damages or losses), even if such Contributor ++ has been advised of the possibility of such damages. ++ ++9. Accepting Warranty or Additional Liability. While redistributing ++ the Work or Derivative Works thereof, You may choose to offer, ++ and charge a fee for, acceptance of support, warranty, indemnity, ++ or other liability obligations and/or rights consistent with this ++ License. However, in accepting such obligations, You may act only ++ on Your own behalf and on Your sole responsibility, not on behalf ++ of any other Contributor, and only if You agree to indemnify, ++ defend, and hold each Contributor harmless for any liability ++ incurred by, or claims asserted against, such Contributor by reason ++ of your accepting any such warranty or additional liability. ++ ++END OF TERMS AND CONDITIONS ++ ++APPENDIX: How to apply the Apache License to your work. ++ ++ To apply the Apache License to your work, attach the following ++ boilerplate notice, with the fields enclosed by brackets "[]" ++ replaced with your own identifying information. (Don't include ++ the brackets!) The text should be enclosed in the appropriate ++ comment syntax for the file format. We also recommend that a ++ file or class name and description of purpose be included on the ++ same "printed page" as the copyright notice for easier ++ identification within third-party archives. ++ ++Copyright [yyyy] [name of copyright owner] ++ ++Licensed under the Apache License, Version 2.0 (the "License"); ++you may not use this file except in compliance with the License. ++You may obtain a copy of the License at ++ ++ http://www.apache.org/licenses/LICENSE-2.0 ++ ++Unless required by applicable law or agreed to in writing, software ++distributed under the License is distributed on an "AS IS" BASIS, ++WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++See the License for the specific language governing permissions and ++limitations under the License. diff --cc vendor/custom_derive-0.1.7/README.md index 000000000,000000000..203a2be16 new file mode 100644 --- /dev/null +++ b/vendor/custom_derive-0.1.7/README.md @@@ -1,0 -1,0 +1,24 @@@ ++# `custom_derive!` ++ ++**Note**: This crate has been superseded by `macro-attr`. ++ ++This crate provides a macro that enables the use of custom `derive` attributes. ++ ++**Links** ++ ++* [Latest Release](https://crates.io/crates/custom_derive/) ++* [Latest Docs](https://docs.rs/crate/custom_derive/) ++* [Repository](https://github.com/DanielKeep/rust-custom-derive/tree/custom_derive-master/) ++ ++## License ++ ++Licensed under either of ++ ++* MIT license (see [LICENSE](LICENSE) or ) ++* Apache License, Version 2.0 (see [LICENSE](LICENSE) or ) ++ ++at your option. ++ ++### Contribution ++ ++Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you shall be dual licensed as above, without any additional terms or conditions. diff --cc vendor/custom_derive-0.1.7/src/lib.rs index 000000000,000000000..860945cff new file mode 100644 --- /dev/null +++ b/vendor/custom_derive-0.1.7/src/lib.rs @@@ -1,0 -1,0 +1,462 @@@ ++/* ++Copyright ⓒ 2016 rust-custom-derive contributors. ++ ++Licensed under the MIT license (see LICENSE or ) or the Apache License, Version 2.0 (see LICENSE of ++), at your option. All ++files in the project carrying such notice may not be copied, modified, ++or distributed except according to those terms. ++*/ ++/*! ++**Note**: This crate has been superseded by `macro-attr`. ++ ++This crate provides a macro that enables the use of custom `derive` attributes. ++ ++To use it, make sure you link to the crate like so: ++ ++```rust ++#[macro_use] extern crate custom_derive; ++# macro_rules! Dummy { (() struct $name:ident;) => {}; } ++# custom_derive! { #[derive(Clone, Dummy)] struct Foo; } ++# fn main() { let _ = Foo; } ++``` ++ ++> **Note**: the `custom_derive!` macro itself is not documented, as the automatic documentation for it would be uselessly huge and incomprehensible. ++ ++ ++ ++ ++# Usage ++ ++The macro should be used to wrap an entire *single* `enum` or `struct` declaration, including its attributes (both `derive` and others). All derivation attributes which the macro does *not* recognise will be assumed to be custom, and treated accordingly. ++ ++`custom_derive!` assumes that custom derivations are implemented as macros (of the same name). For example, here is a simple derivation macro: ++ ++```rust ++#[macro_use] extern crate custom_derive; ++ ++trait TypeName { ++ fn type_name() -> &'static str; ++} ++ ++trait ReprType { ++ type Repr; ++} ++ ++macro_rules! TypeName { ++ (() $(pub)* enum $name:ident $($tail:tt)*) => { TypeName! { @impl $name } }; ++ (() $(pub)* struct $name:ident $($tail:tt)*) => { TypeName! { @impl $name } }; ++ ++ (@impl $name:ident) => { ++ impl TypeName for $name { ++ fn type_name() -> &'static str { stringify!($name) } ++ } ++ }; ++} ++ ++macro_rules! TryFrom { ++ (($repr:ty) $(pub)* enum $name:ident $($tail:tt)*) => { ++ impl ReprType for $name { ++ type Repr = $repr; ++ } ++ }; ++} ++ ++custom_derive! { ++ #[allow(dead_code)] ++ #[repr(u8)] ++ #[derive(Clone, Copy, Debug, TryFrom(u8), TypeName)] ++ enum Foo { A, B } ++} ++ ++fn main() { ++ let foo = Foo::B; ++ let v = foo as ::Repr; ++ let msg = format!("{}: {:?} ({:?})", Foo::type_name(), foo, v); ++ assert_eq!(msg, "Foo: B (1)"); ++} ++``` ++ ++First, note that `custom_derive!` passes any arguments on the derivation attribute to the macro. In the case of attributes *without* any arguments, `()` is passed instead. ++ ++Secondly, the macro is passed the entire item, *sans* attributes. It is the derivation macro's job to parse the item correctly. ++ ++Third, each derivation macro is expected to result in zero or more items, not including the item itself. As a result, it is *not* possible to mutate the item in any way, or attach additional attributes to it. ++ ++Finally, `@impl` is merely a trick to pack multiple, different functions into a single macro. The sequence has no special meaning; it is simply *distinct* from the usual invocation syntax. ++*/ ++#![cfg_attr(not(feature = "std"), no_std)] ++ ++#[doc(hidden)] ++#[macro_export] ++macro_rules! custom_derive { ++ /* ++ ++ > **Convention**: a capture named `$fixed` is used for any part of a recursive rule that is needed in the terminal case, but is not actually being *used* for the recursive part. This avoids having to constantly repeat the full capture pattern (and makes changing it easier). ++ ++ # Primary Invocation Forms ++ ++ These need to catch any valid form of struct or enum. ++ ++ */ ++ ( ++ $(#[$($attrs:tt)*])* ++ enum $($it:tt)* ++ ) => { ++ custom_derive! { ++ @split_attrs ++ ($(#[$($attrs)*],)*), (), (), ++ (enum $($it)*) ++ } ++ }; ++ ++ ( ++ $(#[$($attrs:tt)*])* ++ pub $($it:tt)* ++ ) => { ++ custom_derive! { ++ @split_attrs ++ ($(#[$($attrs)*],)*), (), (), ++ (pub $($it)*) ++ } ++ }; ++ ++ ( ++ $(#[$($attrs:tt)*])* ++ struct $($it:tt)* ++ ) => { ++ custom_derive! { ++ @split_attrs ++ ($(#[$($attrs)*],)*), (), (), ++ (struct $($it)*) ++ } ++ }; ++ ++ /* ++ ++ # `@split_attrs` ++ ++ This is responsible for dividing all attributes on an item into two groups: ++ ++ - `#[derive(...)]` ++ - Everything else. ++ ++ As part of this, it also explodes `#[derive(A, B(..), C, ...)]` into `A, B(..), C, ...`. This is to simplify the next stage. ++ ++ */ ++ ( ++ @split_attrs ++ (), ++ $non_derives:tt, ++ $derives:tt, ++ $it:tt ++ ) => { ++ custom_derive! { ++ @split_derive_attrs ++ { $non_derives, $it }, ++ $derives, ++ (), ++ () ++ } ++ }; ++ ++ ( ++ @split_attrs ++ (#[derive($($new_drv:ident $(($($new_drv_args:tt)*))*),* $(,)*)], $(#[$($attrs:tt)*],)*), ++ $non_derives:tt, ++ ($($derives:ident,)*), ++ $it:tt ++ ) => { ++ custom_derive! { ++ @split_attrs ++ ($(#[$($attrs)*],)*), ++ $non_derives, ++ ($($derives,)* $($new_drv $(($($new_drv_args)*))*,)*), ++ $it ++ } ++ }; ++ ++ ( ++ @split_attrs ++ (#[$new_attr:meta], $(#[$($attrs:tt)*],)*), ++ ($($non_derives:tt)*), ++ $derives:tt, ++ $it:tt ++ ) => { ++ custom_derive! { ++ @split_attrs ++ ($(#[$($attrs)*],)*), ++ ($($non_derives)* #[$new_attr],), ++ $derives, ++ $it ++ } ++ }; ++ ++ /* ++ ++ # `@split_derive_attrs` ++ ++ This is responsible for taking the list of derivation attributes and splitting them into "built-in" and "custom" groups. ++ ++ The list of built-in derives currently supported is: Clone, Hash, RustcEncodable, RustcDecodable, PartialEq, Eq, PartialOrd, Ord, Debug, Default, Send, Sync, Copy. ++ ++ Anything not on that list is considered "custom". ++ ++ And yes, as far as I can see, we *have* to have a separate rule for each of those. What I wouldn't give for an alternation pattern... ++ */ ++ ++ (@split_derive_attrs ++ { ($(#[$($non_derives:tt)*],)*), ($($it:tt)*) }, ++ (), (), ($($user_drvs:tt)*) ++ ) => { ++ custom_derive! { ++ @as_item ++ $(#[$($non_derives)*])* ++ $($it)* ++ } ++ ++ custom_derive! { ++ @expand_user_drvs ++ ($($user_drvs)*), ($($it)*) ++ } ++ }; ++ ++ (@split_derive_attrs ++ { ($(#[$($non_derives:tt)*],)*), ($($it:tt)*) }, ++ (), ($($bi_drvs:ident,)+), ($($user_drvs:tt)*) ++ ) => { ++ custom_derive! { ++ @as_item ++ #[derive($($bi_drvs,)+)] ++ $(#[$($non_derives)*])* ++ $($it)* ++ } ++ ++ custom_derive! { ++ @expand_user_drvs ++ ($($user_drvs)*), ($($it)*) ++ } ++ }; ++ ++ (@split_derive_attrs ++ $fixed:tt, ++ (Hash, $($tail:tt)*), ($($bi_drvs:ident,)*), $user_drvs:tt ++ ) => { ++ custom_derive! { ++ @split_derive_attrs ++ $fixed, ++ ($($tail)*), ($($bi_drvs,)* Hash,), $user_drvs ++ } ++ }; ++ ++ (@split_derive_attrs ++ $fixed:tt, ++ (Clone, $($tail:tt)*), ($($bi_drvs:ident,)*), $user_drvs:tt ++ ) => { ++ custom_derive! { ++ @split_derive_attrs ++ $fixed, ++ ($($tail)*), ($($bi_drvs,)* Clone,), $user_drvs ++ } ++ }; ++ ++ (@split_derive_attrs ++ $fixed:tt, ++ (RustcEncodable, $($tail:tt)*), ($($bi_drvs:ident,)*), $user_drvs:tt ++ ) => { ++ custom_derive! { ++ @split_derive_attrs ++ $fixed, ++ ($($tail)*), ($($bi_drvs,)* RustcEncodable,), $user_drvs ++ } ++ }; ++ ++ (@split_derive_attrs ++ $fixed:tt, ++ (RustcDecodable, $($tail:tt)*), ($($bi_drvs:ident,)*), $user_drvs:tt ++ ) => { ++ custom_derive! { ++ @split_derive_attrs ++ $fixed, ++ ($($tail)*), ($($bi_drvs,)* RustcDecodable,), $user_drvs ++ } ++ }; ++ ++ (@split_derive_attrs ++ $fixed:tt, ++ (PartialEq, $($tail:tt)*), ($($bi_drvs:ident,)*), $user_drvs:tt ++ ) => { ++ custom_derive! { ++ @split_derive_attrs ++ $fixed, ++ ($($tail)*), ($($bi_drvs,)* PartialEq,), $user_drvs ++ } ++ }; ++ ++ (@split_derive_attrs ++ $fixed:tt, ++ (Eq, $($tail:tt)*), ($($bi_drvs:ident,)*), $user_drvs:tt ++ ) => { ++ custom_derive! { ++ @split_derive_attrs ++ $fixed, ++ ($($tail)*), ($($bi_drvs,)* Eq,), $user_drvs ++ } ++ }; ++ ++ (@split_derive_attrs ++ $fixed:tt, ++ (PartialOrd, $($tail:tt)*), ($($bi_drvs:ident,)*), $user_drvs:tt ++ ) => { ++ custom_derive! { ++ @split_derive_attrs ++ $fixed, ++ ($($tail)*), ($($bi_drvs,)* PartialOrd,), $user_drvs ++ } ++ }; ++ ++ (@split_derive_attrs ++ $fixed:tt, ++ (Ord, $($tail:tt)*), ($($bi_drvs:ident,)*), $user_drvs:tt ++ ) => { ++ custom_derive! { ++ @split_derive_attrs ++ $fixed, ++ ($($tail)*), ($($bi_drvs,)* Ord,), $user_drvs ++ } ++ }; ++ ++ (@split_derive_attrs ++ $fixed:tt, ++ (Debug, $($tail:tt)*), ($($bi_drvs:ident,)*), $user_drvs:tt ++ ) => { ++ custom_derive! { ++ @split_derive_attrs ++ $fixed, ++ ($($tail)*), ($($bi_drvs,)* Debug,), $user_drvs ++ } ++ }; ++ ++ (@split_derive_attrs ++ $fixed:tt, ++ (Default, $($tail:tt)*), ($($bi_drvs:ident,)*), $user_drvs:tt ++ ) => { ++ custom_derive! { ++ @split_derive_attrs ++ $fixed, ++ ($($tail)*), ($($bi_drvs,)* Default,), $user_drvs ++ } ++ }; ++ ++ (@split_derive_attrs ++ $fixed:tt, ++ (Send ,$($tail:tt)*), ($($bi_drvs:ident,)*), $user_drvs:tt ++ ) => { ++ custom_derive! { ++ @split_derive_attrs ++ $fixed, ++ ($($tail)*), ($($bi_drvs,)* Send,), $user_drvs ++ } ++ }; ++ ++ (@split_derive_attrs ++ $fixed:tt, ++ (Sync, $($tail:tt)*), ($($bi_drvs:ident,)*), $user_drvs:tt ++ ) => { ++ custom_derive! { ++ @split_derive_attrs ++ $fixed, ++ ($($tail)*), ($($bi_drvs,)* Sync,), $user_drvs ++ } ++ }; ++ ++ (@split_derive_attrs ++ $fixed:tt, ++ (Copy, $($tail:tt)*), ($($bi_drvs:ident,)*), $user_drvs:tt ++ ) => { ++ custom_derive! { ++ @split_derive_attrs ++ $fixed, ++ ($($tail)*), ($($bi_drvs,)* Copy,), $user_drvs ++ } ++ }; ++ ++ /* ++ ++ ## Custom Derivations ++ ++ Now we can handle the custom derivations. There are two forms we care about: those *with* an argument, and those *without*. ++ ++ The *reason* we care is that, in order to simplify the derivation macros, we want to detect the argument-less case and generate an empty pair of parens. ++ ++ */ ++ (@split_derive_attrs ++ $fixed:tt, ++ ($new_user:ident, $($tail:tt)*), $bi_drvs:tt, ($($user_drvs:tt)*) ++ ) => { ++ custom_derive! { ++ @split_derive_attrs ++ $fixed, ($($tail)*), $bi_drvs, ($($user_drvs)* $new_user(),) ++ } ++ }; ++ ++ (@split_derive_attrs ++ $fixed:tt, ++ ($new_user:ident ($($new_user_args:tt)*), $($tail:tt)*), $bi_drvs:tt, ($($user_drvs:tt)*) ++ ) => { ++ custom_derive! { ++ @split_derive_attrs ++ $fixed, ($($tail)*), $bi_drvs, ($($user_drvs)* $new_user($($new_user_args)*),) ++ } ++ }; ++ ++ /* ++ ++ # `@expand_user_drvs` ++ ++ Finally, we have a recursive rule for expanding user derivations. This is basically just using the derivation name as a macro identifier. ++ ++ This *has* to be recursive because we need to expand two independent repetition sequences simultaneously, and this causes `macro_rules!` to throw a wobbly. Don't want that. So, recursive it is. ++ ++ */ ++ (@expand_user_drvs ++ (), ($($it:tt)*) ++ ) => {}; ++ ++ (@expand_user_drvs ++ ($user_drv:ident $arg:tt, $($tail:tt)*), ($($it:tt)*) ++ ) => { ++ $user_drv! { $arg $($it)* } ++ custom_derive! { ++ @expand_user_drvs ++ ($($tail)*), ($($it)*) ++ } ++ }; ++ ++ /* ++ ++ # Miscellaneous Rules ++ ++ */ ++ (@as_item $($i:item)*) => {$($i)*}; ++} diff --cc vendor/custom_derive-0.1.7/tests/empty_bi_derives.rs index 000000000,000000000..6af19ab32 new file mode 100644 --- /dev/null +++ b/vendor/custom_derive-0.1.7/tests/empty_bi_derives.rs @@@ -1,0 -1,0 +1,24 @@@ ++/* ++Copyright ⓒ 2015 rust-custom-derive contributors. ++ ++Licensed under the MIT license (see LICENSE or ) or the Apache License, Version 2.0 (see LICENSE of ++), at your option. All ++files in the project carrying such notice may not be copied, modified, ++or distributed except according to those terms. ++*/ ++#[macro_use] extern crate custom_derive; ++ ++macro_rules! Dummy { ++ ($($tts:tt)*) => {}; ++} ++ ++custom_derive! { ++ #[derive(Dummy)] ++ enum Foo { Bar } ++} ++ ++#[test] ++fn test_empty_bi_derives() { ++ let _ = Foo::Bar; ++} diff --cc vendor/custom_derive-0.1.7/tests/enum_iterator.rs index 000000000,000000000..65cef29c0 new file mode 100644 --- /dev/null +++ b/vendor/custom_derive-0.1.7/tests/enum_iterator.rs @@@ -1,0 -1,0 +1,73 @@@ ++/* ++Copyright ⓒ 2015 rust-custom-derive contributors. ++ ++Licensed under the MIT license (see LICENSE or ) or the Apache License, Version 2.0 (see LICENSE of ++), at your option. All ++files in the project carrying such notice may not be copied, modified, ++or distributed except according to those terms. ++*/ ++#[macro_use] extern crate custom_derive; ++ ++macro_rules! EnumIterator { ++ (() $(pub)* enum $name:ident { $($body:tt)* }) => { ++ EnumIterator! { ++ @collect_variants ($name), ++ ($($body)*,) -> () ++ } ++ }; ++ ++ ( ++ @collect_variants ($name:ident), ++ ($(,)*) -> ($($var_names:ident,)*) ++ ) => { ++ type NameIter = ::std::vec::IntoIter<&'static str>; ++ type VariantIter = ::std::vec::IntoIter<$name>; ++ ++ impl $name { ++ #[allow(dead_code)] ++ pub fn iter_variants() -> VariantIter { ++ vec![$($name::$var_names),*].into_iter() ++ } ++ ++ #[allow(dead_code)] ++ pub fn iter_variant_names() -> NameIter { ++ vec![$(stringify!($var_names)),*].into_iter() ++ } ++ } ++ }; ++ ++ ( ++ @collect_variants $fixed:tt, ++ ($var:ident $(= $_val:expr)*, $($tail:tt)*) -> ($($var_names:tt)*) ++ ) => { ++ EnumIterator! { ++ @collect_variants $fixed, ++ ($($tail)*) -> ($($var_names)* $var,) ++ } ++ }; ++ ++ ( ++ @collect_variants ($name:ident), ++ ($var:ident $_struct:tt, $($tail:tt)*) -> ($($var_names:tt)*) ++ ) => { ++ const _error: () = concat!( ++ "cannot derive EnumIterator for ", ++ stringify!($name), ++ ", due to non-unitary variant ", ++ stringify!($var), ++ "." ++ ); ++ }; ++} ++ ++custom_derive! { ++ #[derive(Debug, PartialEq, EnumIterator)] ++ enum Get { Up, Down, AllAround } ++} ++ ++#[test] ++fn test_enum_iterator() { ++ let vs: Vec<_> = Get::iter_variant_names().zip(Get::iter_variants()).collect(); ++ assert_eq!(&*vs, &[("Up", Get::Up), ("Down", Get::Down), ("AllAround", Get::AllAround)]); ++} diff --cc vendor/custom_derive-0.1.7/tests/enum_try_from.rs index 000000000,000000000..41ad00455 new file mode 100644 --- /dev/null +++ b/vendor/custom_derive-0.1.7/tests/enum_try_from.rs @@@ -1,0 -1,0 +1,77 @@@ ++/* ++Copyright ⓒ 2015 rust-custom-derive contributors. ++ ++Licensed under the MIT license (see LICENSE or ) or the Apache License, Version 2.0 (see LICENSE of ++), at your option. All ++files in the project carrying such notice may not be copied, modified, ++or distributed except according to those terms. ++*/ ++#[macro_use] extern crate custom_derive; ++ ++trait TryFrom: Sized { ++ type Err; ++ fn try_from(src: Src) -> Result; ++} ++ ++macro_rules! TryFrom { ++ (($prim:ty) $(pub)* enum $name:ident { $($body:tt)* }) => { ++ TryFrom! { ++ @collect_variants ($name, $prim), ++ ($($body)*,) -> () ++ } ++ }; ++ ++ ( ++ @collect_variants ($name:ident, $prim:ty), ++ ($(,)*) -> ($($var_names:ident,)*) ++ ) => { ++ impl TryFrom<$prim> for $name { ++ type Err = $prim; ++ fn try_from(src: $prim) -> Result<$name, $prim> { ++ $( ++ if src == $name::$var_names as $prim { ++ return Ok($name::$var_names); ++ } ++ )* ++ Err(src) ++ } ++ } ++ }; ++ ++ ( ++ @collect_variants $fixed:tt, ++ ($var:ident $(= $_val:expr)*, $($tail:tt)*) -> ($($var_names:tt)*) ++ ) => { ++ TryFrom! { ++ @collect_variants $fixed, ++ ($($tail)*) -> ($($var_names)* $var,) ++ } ++ }; ++ ++ ( ++ @collect_variants ($name:ident), ++ ($var:ident $_struct:tt, $($tail:tt)*) -> ($($var_names:tt)*) ++ ) => { ++ const _error: () = concat!( ++ "cannot derive TryFrom for ", ++ stringify!($name), ++ ", due to non-unitary variant ", ++ stringify!($var), ++ "." ++ ); ++ }; ++} ++ ++custom_derive! { ++ #[derive(Debug, PartialEq, TryFrom(u8))] ++ enum Get { Up, Down, AllAround } ++} ++ ++#[test] ++fn test_try_from() { ++ assert_eq!(Get::try_from(0u8), Ok(Get::Up)); ++ assert_eq!(Get::try_from(1u8), Ok(Get::Down)); ++ assert_eq!(Get::try_from(2u8), Ok(Get::AllAround)); ++ assert_eq!(Get::try_from(3u8), Err(3u8)); ++} diff --cc vendor/custom_derive-0.1.7/tests/passthru_derive.rs index 000000000,000000000..3ff6242c1 new file mode 100644 --- /dev/null +++ b/vendor/custom_derive-0.1.7/tests/passthru_derive.rs @@@ -1,0 -1,0 +1,19 @@@ ++/* ++Copyright ⓒ 2015 rust-custom-derive contributors. ++ ++Licensed under the MIT license (see LICENSE or ) or the Apache License, Version 2.0 (see LICENSE of ++), at your option. All ++files in the project carrying such notice may not be copied, modified, ++or distributed except according to those terms. ++*/ ++#[macro_use] extern crate custom_derive; ++extern crate rustc_serialize; ++ ++custom_derive! { ++ #[derive(Clone, Hash, RustcEncodable, RustcDecodable, PartialEq, Eq, PartialOrd, Ord, Debug, Default, Copy)] ++ pub struct Dummy(u32); ++} ++ ++#[test] ++fn test_passthru_derive() {} diff --cc vendor/custom_derive-0.1.7/tests/stable_encodable.rs index 000000000,000000000..fddefca0a new file mode 100644 --- /dev/null +++ b/vendor/custom_derive-0.1.7/tests/stable_encodable.rs @@@ -1,0 -1,0 +1,378 @@@ ++/* ++Copyright ⓒ 2015 rust-custom-derive contributors. ++ ++Licensed under the MIT license (see LICENSE or ) or the Apache License, Version 2.0 (see LICENSE of ++), at your option. All ++files in the project carrying such notice may not be copied, modified, ++or distributed except according to those terms. ++*/ ++#[macro_use] extern crate custom_derive; ++extern crate rustc_serialize; ++ ++macro_rules! StableEncodable { ++ ( ++ () $(pub)* enum $name:ident < $($tail:tt)* ++ ) => { ++ StableEncodable! { ++ @extract_gen_args (enum $name), ++ ($($tail)*) ++ -> bounds(), ty_clss(where) ++ } ++ }; ++ ++ ( ++ () $(pub)* enum $name:ident { $($body:tt)* } ++ ) => { ++ StableEncodable! { ++ @impl enum $name, ++ bounds(), ++ ty_clss(), ++ { $($body)* } ++ } ++ }; ++ ++ ( ++ () $(pub)* struct $name:ident { $($body:tt)* } ++ ) => { ++ StableEncodable! { ++ @impl struct $name, ++ bounds(), ++ ty_clss(), ++ { $($body)* } ++ } ++ }; ++ ++ ( ++ () $(pub)* struct $name:ident < $($tail:tt)* ++ ) => { ++ StableEncodable! { ++ @extract_gen_args (struct $name), ++ ($($tail)*) ++ -> bounds(), ty_clss(where) ++ } ++ }; ++ ++ ( ++ @impl enum $name:ident, ++ bounds($($bounds:tt)*), ++ ty_clss($($ty_clss:tt)*), ++ { $($body:tt)* } ++ ) => { ++ StableEncodable! { ++ @parse_variants (enum $name, bounds($($bounds)*), ty_clss($($ty_clss)*)), ++ 0usize, ($($body)*,) -> () ++ } ++ }; ++ ++ ( ++ @impl struct $name:ident, ++ bounds($($bounds:tt)*), ++ ty_clss($($ty_clss:tt)*), ++ { $($fnames:ident: $_ftys:ty),* $(,)* } ++ ) => { ++ StableEncodable! { ++ @as_item ++ impl<$($bounds)*> rustc_serialize::Encodable for $name<$($bounds)*> ++ $($ty_clss)* { ++ fn encode( ++ &self, ++ s: &mut StableEncodableEncoder ++ ) -> Result<(), StableEncodableEncoder::Error> ++ where StableEncodableEncoder: rustc_serialize::Encoder { ++ const NUM_FIELDS: usize = StableEncodable!(@count_tts $($fnames)*); ++ try!(s.emit_struct(stringify!($name), NUM_FIELDS, |s| { ++ // Poor man's enumerate!($($fnames)): ++ let mut idx = 0; ++ $( ++ try!(s.emit_struct_field(stringify!($fnames), idx, |s| { ++ self.$fnames.encode(s) ++ })); ++ idx += 1; ++ )* ++ let _ = idx; ++ Ok(()) ++ })); ++ Ok(()) ++ } ++ } ++ } ++ }; ++ ++ (@as_item $i:item) => {$i}; ++ ++ ( ++ @extract_gen_args ($kind:ident $name:ident), ++ (> { $($tail:tt)* }) ++ -> bounds($($bounds:tt)*), ty_clss($($ty_clss:tt)*) ++ ) => { ++ StableEncodable! { ++ @impl $kind $name, ++ bounds($($bounds)*), ++ ty_clss($($ty_clss)*), ++ { $($tail)* } ++ } ++ }; ++ ++ ( ++ @extract_gen_args $fixed:tt, ++ ($ty_name:ident: $($tail)*) ++ -> bounds($($bounds:tt)*), ty_clss($($ty_clss:tt)*) ++ ) => { ++ StableEncodable! { ++ @skip_inline_bound $fixed, ++ ($($tail)*) ++ -> bounds($($bounds)* $ty_name:), ++ ty_clss($($ty_clss)* $ty_name: ::rustc_serialize::Encodable,) ++ } ++ }; ++ ++ ( ++ @extract_gen_args $fixed:tt, ++ ($ty_name:ident $($tail:tt)*) ++ -> bounds($($bounds:tt)*), ty_clss($($ty_clss:tt)*) ++ ) => { ++ StableEncodable! { ++ @extract_gen_args $fixed, ++ ($($tail)*) ++ -> bounds($($bounds)* $ty_name), ++ ty_clss($($ty_clss)* $ty_name: ::rustc_serialize::Encodable,) ++ } ++ }; ++ ++ ( ++ @extract_gen_args $fixed:tt, ++ (, $($tail:tt)*) ++ -> bounds($($bounds:tt)*), ty_clss($($ty_clss:tt)*) ++ ) => { ++ StableEncodable! { ++ @extract_gen_args $fixed, ++ ($($tail)*) ++ -> bounds($($bounds)* ,), ty_clss($($ty_clss)*) ++ } ++ }; ++ ++ ( ++ @extract_gen_args $fixed:tt, ++ ($lt:tt $($tail:tt)*) ++ -> bounds($($bounds:tt)*), ty_clss($($ty_clss:tt)*) ++ ) => { ++ StableEncodable! { ++ @extract_gen_args $fixed, ++ ($($tail)*) ++ -> bounds($($bounds)* $lt), ty_clss($($ty_clss)*) ++ } ++ }; ++ ++ ( ++ @skip_inline_bound $fixed:tt, ++ (, $($tail:tt)*) ++ -> bounds($($bounds:tt)*), ty_clss($($ty_clss:tt)*) ++ ) => { ++ StableEncodable! { ++ @extract_gen_args $fixed, ++ ($($tail)*) ++ -> bounds($($bounds)* ,), ty_clss($($ty_clss)*) ++ } ++ }; ++ ++ ( ++ @skip_inline_bound $fixed:tt, ++ (> { $($tail:tt)* }) ++ -> bounds($($bounds:tt)*), ty_clss($($ty_clss:tt)*) ++ ) => { ++ StableEncodable! { ++ @impl $fixed, ++ bounds($($bounds)*), ++ ty_clss($($ty_clss)*), ++ { $($tail)* } ++ } ++ }; ++ ++ ( ++ @parse_variants (enum $name:ident, bounds($($bounds:tt)*), ty_clss($($ty_clss:tt)*)), ++ $_id:expr, ($(,)*) -> ($($variants:tt)*) ++ ) => { ++ StableEncodable! { ++ @as_item ++ impl<$($bounds)*> rustc_serialize::Encodable for $name<$($bounds)*> ++ $($ty_clss)* { ++ fn encode( ++ &self, ++ s: &mut StableEncodableEncoder) ++ -> Result<(), StableEncodableEncoder::Error> ++ where StableEncodableEncoder: rustc_serialize::Encoder { ++ s.emit_enum(stringify!($name), |s| { ++ $( ++ StableEncodable!(@encode_variant $name, $variants, self, s); ++ )* ++ unreachable!(); ++ }) ++ } ++ } ++ } ++ }; ++ ++ ( ++ @parse_variants $fixed:tt, ++ $id:expr, ($var_name:ident, $($tail:tt)*) -> ($($variants:tt)*) ++ ) => { ++ StableEncodable! { ++ @parse_variants $fixed, ++ ($id + 1usize), ($($tail)*) -> ($($variants)* ($var_name, $id)) ++ } ++ }; ++ ++ ( ++ @parse_variants $fixed:tt, ++ $id:expr, ($var_name:ident($(,)*), $($tail:tt)*) -> ($($variants:tt)*) ++ ) => { ++ StableEncodable! { ++ @parse_variants $fixed, ++ ($id + 1usize), ($($tail)*) -> ($($variants)* ++ ($var_name, $id)) ++ } ++ }; ++ ++ ( ++ @parse_variants $fixed:tt, ++ $id:expr, ($var_name:ident($_vta:ty), $($tail:tt)*) -> ($($variants:tt)*) ++ ) => { ++ StableEncodable! { ++ @parse_variants $fixed, ++ ($id + 1usize), ($($tail)*) -> ($($variants)* ++ ($var_name, $id, (a))) ++ } ++ }; ++ ++ ( ++ @parse_variants $fixed:tt, ++ $id:expr, ($var_name:ident($_vta:ty, $_vtb:ty), $($tail:tt)*) -> ($($variants:tt)*) ++ ) => { ++ StableEncodable! { ++ @parse_variants $fixed, ++ ($id + 1usize), ($($tail)*) -> ($($variants)* ++ ($var_name, $id, (a, b))) ++ } ++ }; ++ ++ ( ++ @parse_variants $fixed:tt, ++ $id:expr, ($var_name:ident($_vta:ty, $_vtb:ty, $_vtc:ty), $($tail:tt)*) -> ($($variants:tt)*) ++ ) => { ++ StableEncodable! { ++ @parse_variants $fixed, ++ ($id + 1usize), ($($tail)*) -> ($($variants)* ++ ($var_name, $id, (a, b, c))) ++ } ++ }; ++ ++ ( ++ @parse_variants $fixed:tt, ++ $id:expr, ($var_name:ident { $($vfn:ident: $_vft:ty),* $(,)* }, $($tail:tt)*) -> ($($variants:tt)*) ++ ) => { ++ StableEncodable! { ++ @parse_variants $fixed, ++ ($id + 1usize), ($($tail)*) -> ($($variants)* ++ ($var_name, $id, {$($vfn),*})) ++ } ++ }; ++ ++ ( ++ @encode_variant $name:ident, ++ ($var_name:ident, $var_id:expr), ++ $self_:expr, $s:ident ++ ) => { ++ { ++ if let $name::$var_name = *$self_ { ++ return $s.emit_enum_variant(stringify!($var_name), $var_id, 0, |_| Ok(())); ++ } ++ } ++ }; ++ ++ ( ++ @encode_variant $name:ident, ++ ($var_name:ident, $var_id:expr, ($($tup_elems:ident),*)), ++ $self_:expr, $s:ident ++ ) => { ++ { ++ if let $name::$var_name($(ref $tup_elems),*) = *$self_ { ++ return $s.emit_enum_variant( ++ stringify!($var_name), ++ $var_id, ++ StableEncodable!(@count_tts $($tup_elems)*), ++ |s| { ++ let mut idx = 0; ++ $( ++ try!(s.emit_enum_variant_arg(idx, |s| $tup_elems.encode(s))); ++ idx += 1; ++ )* ++ let _ = idx; ++ Ok(()) ++ } ++ ); ++ } ++ } ++ }; ++ ++ ( ++ @encode_variant $name:ident, ++ ($var_name:ident, $var_id:expr, {$($str_fields:ident),*}), ++ $self_:expr, $s:ident ++ ) => { ++ { ++ if let $name::$var_name { $(ref $str_fields),* } = *$self_ { ++ return $s.emit_enum_struct_variant( ++ stringify!($var_name), ++ $var_id, ++ StableEncodable!(@count_tts $($str_fields)*), ++ |s| { ++ let mut idx = 0; ++ $( ++ try!(s.emit_enum_struct_variant_field( ++ stringify!($str_fields), ++ idx, ++ |s| $str_fields.encode(s) ++ )); ++ idx += 1; ++ )* ++ let _ = idx; ++ Ok(()) ++ } ++ ); ++ } ++ } ++ }; ++ ++ (@count_tts) => {0usize}; ++ (@count_tts $_tt:tt $($tail:tt)*) => {1usize + StableEncodable!(@count_tts $($tail)*)}; ++} ++ ++custom_derive! { ++ #[derive(Debug, StableEncodable)] ++ struct LazyEg { a: A, b: i32, c: (u8, u8, u8) } ++} ++ ++custom_derive! { ++ #[derive(Clone, StableEncodable)] ++ enum Wonky { Flim, Flam, Flom(i32), Bees { say: S } } ++} ++ ++#[test] ++fn test_stable_encodable() { ++ macro_rules! json { ++ ($e:expr) => (rustc_serialize::json::encode(&$e).unwrap()); ++ } ++ ++ let lazy_eg = LazyEg { ++ a: String::from("Oh hai!"), ++ b: 42, ++ c: (1, 3, 0), ++ }; ++ assert_eq!(&*json!(lazy_eg), r#"{"a":"Oh hai!","b":42,"c":[1,3,0]}"#); ++ ++ assert_eq!(&*json!(Wonky::Flim::<()>), r#""Flim""#); ++ assert_eq!(&*json!(Wonky::Flam::<()>), r#""Flam""#); ++ assert_eq!(&*json!(Wonky::Flom::<()>(42)), r#"{"variant":"Flom","fields":[42]}"#); ++ assert_eq!(&*json!(Wonky::Bees{say:"aaaaah!"}), r#"{"variant":"Bees","fields":["aaaaah!"]}"#); ++} diff --cc vendor/custom_derive-0.1.7/tests/trailing_comma.rs index 000000000,000000000..ba5d09727 new file mode 100644 --- /dev/null +++ b/vendor/custom_derive-0.1.7/tests/trailing_comma.rs @@@ -1,0 -1,0 +1,24 @@@ ++/* ++Copyright ⓒ 2015 rust-custom-derive contributors. ++ ++Licensed under the MIT license (see LICENSE or ) or the Apache License, Version 2.0 (see LICENSE of ++), at your option. All ++files in the project carrying such notice may not be copied, modified, ++or distributed except according to those terms. ++*/ ++#[macro_use] extern crate custom_derive; ++ ++macro_rules! Dummy { ++ ($($tts:tt)*) => {}; ++} ++ ++custom_derive! { ++ #[derive(Dummy,)] ++ enum Foo { Bar } ++} ++ ++#[test] ++fn test_trailing_comma() { ++ let _ = Foo::Bar; ++} diff --cc vendor/error-chain-0.11.0-rc.2/.cargo-checksum.json index 000000000,000000000..206082771 new file mode 100644 --- /dev/null +++ b/vendor/error-chain-0.11.0-rc.2/.cargo-checksum.json @@@ -1,0 -1,0 +1,1 @@@ ++{"files":{},"package":"38d3a55d9a7a456748f2a3912c0941a5d9a68006eb15b3c3c9836b8420dc102d"} diff --cc vendor/error-chain-0.11.0-rc.2/.cargo-ok index 000000000,000000000..e69de29bb new file mode 100644 --- /dev/null +++ b/vendor/error-chain-0.11.0-rc.2/.cargo-ok diff --cc vendor/error-chain-0.11.0-rc.2/.travis.yml index 000000000,000000000..31d929703 new file mode 100644 --- /dev/null +++ b/vendor/error-chain-0.11.0-rc.2/.travis.yml @@@ -1,0 -1,0 +1,45 @@@ ++language: rust ++rust: ++- stable ++- beta ++- nightly ++# Oldest supported version for all features. ++# Use of https://github.com/rust-lang/rfcs/pull/16 ++- 1.13.0 ++# Oldest supported version as dependency, with no features, tests, or examples. ++- 1.10.0 ++ ++sudo: false ++cache: cargo ++addons: ++ apt: ++ packages: ++ - libcurl4-openssl-dev ++ - libelf-dev ++ - libdw-dev ++ ++before_script: ++- | ++ pip install 'travis-cargo<0.2' --user && ++ export PATH=$HOME/.local/bin:$PATH ++ ++script: ++- travis-cargo build -- $FEATURES ++- travis-cargo --skip 1.10.0 test -- $FEATURES ++ ++after_success: ++- travis-cargo --only stable doc ++- travis-cargo --only stable doc-upload ++ ++env: ++ global: ++ - secure: ncxJbvJM1vCZfcEftjsFKJMxxhKLgWKaR8Go9AMo0VB5fB2XVW/6NYO5bQEEYpOf1Nc/+2FbI2+Dkz0S/mJpUcNSfBgablCHgwU2sHse7KsoaqfHj2mf1E3exjzSHoP96hPGicC5zAjSXFjCgJPOUSGqqRaJ7z5AsJLhJT6LuK7QpvwPBZzklUN8T+n1sVmws8TNmRIbaniq/q6wYHANHcy6Dl59dx4sKwniUGiZdUhCiddVpoxbECSxc0A8mN2pk7/aW+WGxK3goBs5ZF7+JXF318F62pDcXQmR5CX6WdpenIcJ25g1Vg1WhQ4Ifpe17CN0bfxV8ShuzrQUThCDMffZCo9XySBtODdEowwK1UIpjnFLfIxjOs45Cd8o3tM2j0CfvtnjOz6BCdUU0qiwNPPNx0wFkx3ZiOfSh+FhBhvyPM12HN2tdN0esgVBItFmEci+sSIIXqjVL6DNiu5zTjbu0bs6COwlUWdmL6vmsZtq5tl7Cno9+C3szxRVAkShGydd04l9NYjqNEzTa1EPG50OsnVRKGdRiFzSxhc3BWExNKvcQ4v867t6/PpPkW6s4oXmYI3+De+8O7ExWc6a4alcrDXKlMs5fCb5Pcd4Ju9kowcjkoJo5yf2wW3Ox5R8SJpaEEpvyhx5O/qtIxjhHNzeo8Wsr/6gdNDv20r91TI= ++ - TRAVIS_CARGO_NIGHTLY_FEATURE="" ++ matrix: ++ - FEATURES=--features=backtrace ++ - FEATURES=--no-default-features ++ ++matrix: ++ exclude: ++ - env: FEATURES=--features=backtrace ++ rust: 1.10.0 diff --cc vendor/error-chain-0.11.0-rc.2/CHANGELOG.md index 000000000,000000000..5e1422941 new file mode 100644 --- /dev/null +++ b/vendor/error-chain-0.11.0-rc.2/CHANGELOG.md @@@ -1,0 -1,0 +1,109 @@@ ++# 0.11.0-rc.2 ++ ++- [Make `ErrorChainIter`'s field private](https://github.com/brson/error-chain/issues/178) ++- [Rename `ErrorChainIter` to `Iter`](https://github.com/brson/error-chain/issues/168) ++- [Implement `Debug` for `ErrorChainIter`](https://github.com/brson/error-chain/issues/169) ++- [Rename `ChainedError::display` to `display_chain`](https://github.com/brson/error-chain/issues/180) ++- [Add a new method for `Error`: `chain_err`.](https://github.com/brson/error-chain/pull/141) ++- [Allow `chain_err` to be used on `Option`](https://github.com/brson/error-chain/pull/156) ++- [Add support for creating an error chain on boxed trait errors (`Box`)](https://github.com/brson/error-chain/pull/156) ++- [Remove lint for unused doc comment.](https://github.com/brson/error-chain/pull/199) ++ ++# 0.10.0 ++ ++- [Add a new constructor for `Error`: `with_chain`.](https://github.com/brson/error-chain/pull/126) ++- [Add the `ensure!` macro.](https://github.com/brson/error-chain/pull/135) ++ ++# 0.9.0 ++ ++- Revert [Add a `Sync` bound to errors](https://github.com/brson/error-chain/pull/110) ++ ++# 0.8.1 ++ ++- Add crates.io categorie. ++ ++# 0.8.0 ++ ++- [Add a `Sync` bound to errors](https://github.com/brson/error-chain/pull/110) ++- [Add `ChainedError::display` to format error chains](https://github.com/brson/error-chain/pull/113) ++ ++# 0.7.2 ++ ++- Add `quick_main!` (#88). ++- `allow(unused)` for the `Result` wrapper. ++- Minimum rust version supported is now 1.10 on some conditions (#103). ++ ++# 0.7.1 ++ ++- [Add the `bail!` macro](https://github.com/brson/error-chain/pull/76) ++ ++# 0.7.0 ++ ++- [Rollback several design changes to fix regressions](https://github.com/brson/error-chain/pull/75) ++- New `Variant(Error) #[attrs]` for `links` and `foreign_links`. ++- Hide implementation details from the doc. ++- Always generate `Error::backtrace`. ++ ++# 0.6.2 ++ ++- Allow dead code. ++ ++# 0.6.1 ++ ++- Fix wrong trait constraint in ResultExt implementation (#66). ++ ++# 0.6.0 ++ ++- Conditional compilation for error variants. ++- Backtrace generation is now a feature. ++- More standard trait implementations for extra convenience. ++- Remove ChainErr. ++- Remove need to specify `ErrorKind` in `links {}`. ++- Add ResultExt trait. ++- Error.1 is a struct instead of a tuple. ++- Error is now a struct. ++- The declarations order is more flexible. ++- Way better error reporting when there is a syntax error in the macro call. ++- `Result` generation can be disabled. ++- At most one declaration of each type can be present. ++ ++# 0.5.0 ++ ++- [Only generate backtraces with RUST_BACKTRACE set](https://github.com/brson/error-chain/pull/27) ++- [Fixup matching, disallow repeating "types" section](https://github.com/brson/error-chain/pull/26) ++- [Fix tests on stable/beta](https://github.com/brson/error-chain/pull/28) ++- [Only deploy docs when tagged](https://github.com/brson/error-chain/pull/30) ++ ++Contributors: benaryorg, Brian Anderson, Georg Brandl ++ ++# 0.4.2 ++ ++- [Fix the resolution of the ErrorKind description method](https://github.com/brson/error-chain/pull/24) ++ ++Contributors: Brian Anderson ++ ++# 0.4.1 (yanked) ++ ++- [Fix a problem with resolving methods of the standard Error type](https://github.com/brson/error-chain/pull/22) ++ ++Contributors: Brian Anderson ++ ++# 0.4.0 (yanked) ++ ++- [Remove the foreign link description and forward to the foreign error](https://github.com/brson/error-chain/pull/19) ++- [Allow missing sections](https://github.com/brson/error-chain/pull/17) ++ ++Contributors: Brian Anderson, Taylor Cramer ++ ++# 0.3.0 ++ ++- [Forward Display implementation for foreign errors](https://github.com/brson/error-chain/pull/13) ++ ++Contributors: Brian Anderson, Taylor Cramer ++ ++# 0.2.2 ++ ++- [Don't require `types` section in macro invocation](https://github.com/brson/error-chain/pull/8) ++- [Add "quick start" to README](https://github.com/brson/error-chain/pull/9) ++ ++Contributors: Brian Anderson, Jake Shadle, Nate Mara diff --cc vendor/error-chain-0.11.0-rc.2/Cargo.toml index 000000000,000000000..1cfe7b3cd new file mode 100644 --- /dev/null +++ b/vendor/error-chain-0.11.0-rc.2/Cargo.toml @@@ -1,0 -1,0 +1,32 @@@ ++# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO ++# ++# When uploading crates to the registry Cargo will automatically ++# "normalize" Cargo.toml files for maximal compatibility ++# with all versions of Cargo and also rewrite `path` dependencies ++# to registry (e.g. crates.io) dependencies ++# ++# If you believe there's an error in this file please file an ++# issue against the rust-lang/cargo repository. If you're ++# editing this file be aware that the upstream Cargo.toml ++# will likely look very different (and much more reasonable) ++ ++[package] ++name = "error-chain" ++version = "0.11.0-rc.2" ++authors = ["Brian Anderson ", "Paul Colomiets ", "Colin Kiegel ", "Yamakaky "] ++description = "Yet another error boilerplate library." ++documentation = "https://docs.rs/error-chain" ++readme = "README.md" ++keywords = ["error"] ++categories = ["rust-patterns"] ++license = "MIT/Apache-2.0" ++repository = "https://github.com/brson/error-chain" ++[dependencies.backtrace] ++version = "0.3" ++optional = true ++ ++[features] ++example_generated = [] ++default = ["backtrace", "example_generated"] ++[badges.travis-ci] ++repository = "brson/error-chain" diff --cc vendor/error-chain-0.11.0-rc.2/LICENSE-APACHE index 000000000,000000000..16fe87b06 new file mode 100644 --- /dev/null +++ b/vendor/error-chain-0.11.0-rc.2/LICENSE-APACHE @@@ -1,0 -1,0 +1,201 @@@ ++ Apache License ++ Version 2.0, January 2004 ++ http://www.apache.org/licenses/ ++ ++TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION ++ ++1. Definitions. ++ ++ "License" shall mean the terms and conditions for use, reproduction, ++ and distribution as defined by Sections 1 through 9 of this document. ++ ++ "Licensor" shall mean the copyright owner or entity authorized by ++ the copyright owner that is granting the License. ++ ++ "Legal Entity" shall mean the union of the acting entity and all ++ other entities that control, are controlled by, or are under common ++ control with that entity. For the purposes of this definition, ++ "control" means (i) the power, direct or indirect, to cause the ++ direction or management of such entity, whether by contract or ++ otherwise, or (ii) ownership of fifty percent (50%) or more of the ++ outstanding shares, or (iii) beneficial ownership of such entity. ++ ++ "You" (or "Your") shall mean an individual or Legal Entity ++ exercising permissions granted by this License. ++ ++ "Source" form shall mean the preferred form for making modifications, ++ including but not limited to software source code, documentation ++ source, and configuration files. ++ ++ "Object" form shall mean any form resulting from mechanical ++ transformation or translation of a Source form, including but ++ not limited to compiled object code, generated documentation, ++ and conversions to other media types. ++ ++ "Work" shall mean the work of authorship, whether in Source or ++ Object form, made available under the License, as indicated by a ++ copyright notice that is included in or attached to the work ++ (an example is provided in the Appendix below). ++ ++ "Derivative Works" shall mean any work, whether in Source or Object ++ form, that is based on (or derived from) the Work and for which the ++ editorial revisions, annotations, elaborations, or other modifications ++ represent, as a whole, an original work of authorship. For the purposes ++ of this License, Derivative Works shall not include works that remain ++ separable from, or merely link (or bind by name) to the interfaces of, ++ the Work and Derivative Works thereof. ++ ++ "Contribution" shall mean any work of authorship, including ++ the original version of the Work and any modifications or additions ++ to that Work or Derivative Works thereof, that is intentionally ++ submitted to Licensor for inclusion in the Work by the copyright owner ++ or by an individual or Legal Entity authorized to submit on behalf of ++ the copyright owner. For the purposes of this definition, "submitted" ++ means any form of electronic, verbal, or written communication sent ++ to the Licensor or its representatives, including but not limited to ++ communication on electronic mailing lists, source code control systems, ++ and issue tracking systems that are managed by, or on behalf of, the ++ Licensor for the purpose of discussing and improving the Work, but ++ excluding communication that is conspicuously marked or otherwise ++ designated in writing by the copyright owner as "Not a Contribution." ++ ++ "Contributor" shall mean Licensor and any individual or Legal Entity ++ on behalf of whom a Contribution has been received by Licensor and ++ subsequently incorporated within the Work. ++ ++2. Grant of Copyright License. Subject to the terms and conditions of ++ this License, each Contributor hereby grants to You a perpetual, ++ worldwide, non-exclusive, no-charge, royalty-free, irrevocable ++ copyright license to reproduce, prepare Derivative Works of, ++ publicly display, publicly perform, sublicense, and distribute the ++ Work and such Derivative Works in Source or Object form. ++ ++3. Grant of Patent License. Subject to the terms and conditions of ++ this License, each Contributor hereby grants to You a perpetual, ++ worldwide, non-exclusive, no-charge, royalty-free, irrevocable ++ (except as stated in this section) patent license to make, have made, ++ use, offer to sell, sell, import, and otherwise transfer the Work, ++ where such license applies only to those patent claims licensable ++ by such Contributor that are necessarily infringed by their ++ Contribution(s) alone or by combination of their Contribution(s) ++ with the Work to which such Contribution(s) was submitted. If You ++ institute patent litigation against any entity (including a ++ cross-claim or counterclaim in a lawsuit) alleging that the Work ++ or a Contribution incorporated within the Work constitutes direct ++ or contributory patent infringement, then any patent licenses ++ granted to You under this License for that Work shall terminate ++ as of the date such litigation is filed. ++ ++4. Redistribution. You may reproduce and distribute copies of the ++ Work or Derivative Works thereof in any medium, with or without ++ modifications, and in Source or Object form, provided that You ++ meet the following conditions: ++ ++ (a) You must give any other recipients of the Work or ++ Derivative Works a copy of this License; and ++ ++ (b) You must cause any modified files to carry prominent notices ++ stating that You changed the files; and ++ ++ (c) You must retain, in the Source form of any Derivative Works ++ that You distribute, all copyright, patent, trademark, and ++ attribution notices from the Source form of the Work, ++ excluding those notices that do not pertain to any part of ++ the Derivative Works; and ++ ++ (d) If the Work includes a "NOTICE" text file as part of its ++ distribution, then any Derivative Works that You distribute must ++ include a readable copy of the attribution notices contained ++ within such NOTICE file, excluding those notices that do not ++ pertain to any part of the Derivative Works, in at least one ++ of the following places: within a NOTICE text file distributed ++ as part of the Derivative Works; within the Source form or ++ documentation, if provided along with the Derivative Works; or, ++ within a display generated by the Derivative Works, if and ++ wherever such third-party notices normally appear. The contents ++ of the NOTICE file are for informational purposes only and ++ do not modify the License. You may add Your own attribution ++ notices within Derivative Works that You distribute, alongside ++ or as an addendum to the NOTICE text from the Work, provided ++ that such additional attribution notices cannot be construed ++ as modifying the License. ++ ++ You may add Your own copyright statement to Your modifications and ++ may provide additional or different license terms and conditions ++ for use, reproduction, or distribution of Your modifications, or ++ for any such Derivative Works as a whole, provided Your use, ++ reproduction, and distribution of the Work otherwise complies with ++ the conditions stated in this License. ++ ++5. Submission of Contributions. Unless You explicitly state otherwise, ++ any Contribution intentionally submitted for inclusion in the Work ++ by You to the Licensor shall be under the terms and conditions of ++ this License, without any additional terms or conditions. ++ Notwithstanding the above, nothing herein shall supersede or modify ++ the terms of any separate license agreement you may have executed ++ with Licensor regarding such Contributions. ++ ++6. Trademarks. This License does not grant permission to use the trade ++ names, trademarks, service marks, or product names of the Licensor, ++ except as required for reasonable and customary use in describing the ++ origin of the Work and reproducing the content of the NOTICE file. ++ ++7. Disclaimer of Warranty. Unless required by applicable law or ++ agreed to in writing, Licensor provides the Work (and each ++ Contributor provides its Contributions) on an "AS IS" BASIS, ++ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or ++ implied, including, without limitation, any warranties or conditions ++ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A ++ PARTICULAR PURPOSE. You are solely responsible for determining the ++ appropriateness of using or redistributing the Work and assume any ++ risks associated with Your exercise of permissions under this License. ++ ++8. Limitation of Liability. In no event and under no legal theory, ++ whether in tort (including negligence), contract, or otherwise, ++ unless required by applicable law (such as deliberate and grossly ++ negligent acts) or agreed to in writing, shall any Contributor be ++ liable to You for damages, including any direct, indirect, special, ++ incidental, or consequential damages of any character arising as a ++ result of this License or out of the use or inability to use the ++ Work (including but not limited to damages for loss of goodwill, ++ work stoppage, computer failure or malfunction, or any and all ++ other commercial damages or losses), even if such Contributor ++ has been advised of the possibility of such damages. ++ ++9. Accepting Warranty or Additional Liability. While redistributing ++ the Work or Derivative Works thereof, You may choose to offer, ++ and charge a fee for, acceptance of support, warranty, indemnity, ++ or other liability obligations and/or rights consistent with this ++ License. However, in accepting such obligations, You may act only ++ on Your own behalf and on Your sole responsibility, not on behalf ++ of any other Contributor, and only if You agree to indemnify, ++ defend, and hold each Contributor harmless for any liability ++ incurred by, or claims asserted against, such Contributor by reason ++ of your accepting any such warranty or additional liability. ++ ++END OF TERMS AND CONDITIONS ++ ++APPENDIX: How to apply the Apache License to your work. ++ ++ To apply the Apache License to your work, attach the following ++ boilerplate notice, with the fields enclosed by brackets "[]" ++ replaced with your own identifying information. (Don't include ++ the brackets!) The text should be enclosed in the appropriate ++ comment syntax for the file format. We also recommend that a ++ file or class name and description of purpose be included on the ++ same "printed page" as the copyright notice for easier ++ identification within third-party archives. ++ ++Copyright [yyyy] [name of copyright owner] ++ ++Licensed under the Apache License, Version 2.0 (the "License"); ++you may not use this file except in compliance with the License. ++You may obtain a copy of the License at ++ ++ http://www.apache.org/licenses/LICENSE-2.0 ++ ++Unless required by applicable law or agreed to in writing, software ++distributed under the License is distributed on an "AS IS" BASIS, ++WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++See the License for the specific language governing permissions and ++limitations under the License. diff --cc vendor/error-chain-0.11.0-rc.2/LICENSE-MIT index 000000000,000000000..5f28864c8 new file mode 100644 --- /dev/null +++ b/vendor/error-chain-0.11.0-rc.2/LICENSE-MIT @@@ -1,0 -1,0 +1,26 @@@ ++Copyright (c) 2017 The Error-Chain Project Developers ++ ++Permission is hereby granted, free of charge, to any ++person obtaining a copy of this software and associated ++documentation files (the "Software"), to deal in the ++Software without restriction, including without ++limitation the rights to use, copy, modify, merge, ++publish, distribute, sublicense, and/or sell copies of ++the Software, and to permit persons to whom the Software ++is furnished to do so, subject to the following ++conditions: ++ ++The above copyright notice and this permission notice ++shall be included in all copies or substantial portions ++of the Software. ++ ++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ++ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED ++TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A ++PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT ++SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY ++CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION ++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR ++IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER ++DEALINGS IN THE SOFTWARE. ++ diff --cc vendor/error-chain-0.11.0-rc.2/README.md index 000000000,000000000..1e9bbf4ca new file mode 100644 --- /dev/null +++ b/vendor/error-chain-0.11.0-rc.2/README.md @@@ -1,0 -1,0 +1,36 @@@ ++# error-chain - Consistent error handling for Rust ++ ++[![Build Status](https://api.travis-ci.org/brson/error-chain.svg?branch=master)](https://travis-ci.org/brson/error-chain) ++[![Latest Version](https://img.shields.io/crates/v/error-chain.svg)](https://crates.io/crates/error-chain) ++[![License](https://img.shields.io/github/license/brson/error-chain.svg)](https://github.com/brson/error-chain) ++ ++`error-chain` makes it easy to take full advantage of Rust's error ++handling features without the overhead of maintaining boilerplate ++error types and conversions. It implements an opinionated strategy for ++defining your own error types, as well as conversions from others' ++error types. ++ ++[Documentation (crates.io)](https://docs.rs/error-chain). ++ ++[Documentation (master)](https://brson.github.io/error-chain). ++ ++## Quick start ++ ++If you just want to set up your new project with error-chain, ++follow the [quickstart.rs] template, and read this [intro] ++to error-chain. ++ ++[quickstart.rs]: https://github.com/brson/error-chain/blob/master/examples/quickstart.rs ++[intro]: http://brson.github.io/2016/11/30/starting-with-error-chain ++ ++## Supported Rust version ++ ++Please view the beginning of the [Travis configuration file](.travis.yml) ++to see the oldest supported Rust version. ++ ++Note that `error-chain` supports older versions of Rust when built with ++`default-features = false`. ++ ++## License ++ ++MIT/Apache-2.0 diff --cc vendor/error-chain-0.11.0-rc.2/examples/all.rs index 000000000,000000000..ccc3ab703 new file mode 100644 --- /dev/null +++ b/vendor/error-chain-0.11.0-rc.2/examples/all.rs @@@ -1,0 -1,0 +1,36 @@@ ++#[macro_use] ++extern crate error_chain; ++ ++pub mod inner { ++ error_chain!{} ++} ++ ++#[cfg(feature = "a_feature")] ++pub mod feature { ++ error_chain!{} ++} ++ ++error_chain! { ++ // Types generated by the macro. If empty or absent, it defaults to ++ // Error, ErrorKind, Result; ++ types { ++ // With custom names: ++ MyError, MyErrorKind, MyResult; ++ // Without the `Result` wrapper: ++ // Error, ErrorKind; ++ } ++ ++ // Automatic bindings to other error types generated by `error_chain!`. ++ links { ++ Inner(inner::Error, inner::ErrorKind); ++ // Attributes can be added at the end of the declaration. ++ Feature(feature::Error, feature::ErrorKind) #[cfg(feature = "a_feature")]; ++ } ++ ++ // Bindings to types implementing std::error::Error. ++ foreign_links { ++ Io(::std::io::Error); ++ } ++} ++ ++fn main() {} diff --cc vendor/error-chain-0.11.0-rc.2/examples/chain_err.rs index 000000000,000000000..bd8effdaf new file mode 100644 --- /dev/null +++ b/vendor/error-chain-0.11.0-rc.2/examples/chain_err.rs @@@ -1,0 -1,0 +1,69 @@@ ++//! Demonstrates usage of `Error::caused` method. This method enables chaining errors ++//! like `ResultExt::chain_err` but doesn't require the presence of a `Result` wrapper. ++ ++#[macro_use] ++extern crate error_chain; ++ ++use std::fs::File; ++ ++mod errors { ++ use std::io; ++ use super::LaunchStage; ++ ++ error_chain! { ++ foreign_links { ++ Io(io::Error) #[doc = "Error during IO"]; ++ } ++ ++ errors { ++ Launch(phase: LaunchStage) { ++ description("An error occurred during startup") ++ display("Startup aborted: {:?} did not complete successfully", phase) ++ } ++ ++ ConfigLoad(path: String) { ++ description("Config file not found") ++ display("Unable to read file `{}`", path) ++ } ++ } ++ } ++ ++ impl From for ErrorKind { ++ fn from(v: LaunchStage) -> Self { ++ ErrorKind::Launch(v) ++ } ++ } ++} ++ ++pub use errors::*; ++ ++#[derive(Debug, Clone, PartialEq, Eq)] ++pub enum LaunchStage { ++ ConfigLoad, ++ ConfigParse, ++ ConfigResolve, ++} ++ ++/// Read the service config from the file specified. ++fn load_config(rel_path: &str) -> Result<()> { ++ File::open(rel_path) ++ .map(|_| ()) ++ .chain_err(|| ErrorKind::ConfigLoad(rel_path.to_string())) ++} ++ ++/// Launch the service. ++fn launch(rel_path: &str) -> Result<()> { ++ load_config(rel_path).map_err(|e| match e { ++ e @ Error(ErrorKind::ConfigLoad(_), _) => { ++ e.chain_err(|| LaunchStage::ConfigLoad) ++ } ++ e => e.chain_err(|| "Unknown failure"), ++ }) ++} ++ ++fn main() { ++ let chain = launch("does_not_exist.json").unwrap_err(); ++ for err in chain.iter() { ++ println!("{}", err); ++ } ++} diff --cc vendor/error-chain-0.11.0-rc.2/examples/doc.rs index 000000000,000000000..999ac9cef new file mode 100644 --- /dev/null +++ b/vendor/error-chain-0.11.0-rc.2/examples/doc.rs @@@ -1,0 -1,0 +1,28 @@@ ++#![deny(missing_docs)] ++ ++//! This module is used to check that all generated items are documented. ++ ++#[macro_use] ++extern crate error_chain; ++ ++/// Inner module. ++pub mod inner { ++ error_chain!{} ++} ++ ++error_chain! { ++ links { ++ Inner(inner::Error, inner::ErrorKind) #[doc = "Doc"]; ++ } ++ foreign_links { ++ Io(::std::io::Error) #[doc = "Io"]; ++ } ++ errors { ++ /// Doc ++ Test2 { ++ ++ } ++ } ++} ++ ++fn main() {} diff --cc vendor/error-chain-0.11.0-rc.2/examples/quickstart.rs index 000000000,000000000..2e3e2b5d3 new file mode 100644 --- /dev/null +++ b/vendor/error-chain-0.11.0-rc.2/examples/quickstart.rs @@@ -1,0 -1,0 +1,80 @@@ ++// Simple and robust error handling with error-chain! ++// Use this as a template for new projects. ++ ++// `error_chain!` can recurse deeply ++#![recursion_limit = "1024"] ++ ++// Import the macro. Don't forget to add `error-chain` in your ++// `Cargo.toml`! ++#[macro_use] ++extern crate error_chain; ++ ++// We'll put our errors in an `errors` module, and other modules in ++// this crate will `use errors::*;` to get access to everything ++// `error_chain!` creates. ++mod errors { ++ // Create the Error, ErrorKind, ResultExt, and Result types ++ error_chain!{} ++} ++ ++// This only gives access within this module. Make this `pub use errors::*;` ++// instead if the types must be accessible from other modules (e.g., within ++// a `links` section). ++use errors::*; ++ ++fn main() { ++ if let Err(ref e) = run() { ++ use std::io::Write; ++ let stderr = &mut ::std::io::stderr(); ++ let errmsg = "Error writing to stderr"; ++ ++ writeln!(stderr, "error: {}", e).expect(errmsg); ++ ++ for e in e.iter().skip(1) { ++ writeln!(stderr, "caused by: {}", e).expect(errmsg); ++ } ++ ++ // The backtrace is not always generated. Try to run this example ++ // with `RUST_BACKTRACE=1`. ++ if let Some(backtrace) = e.backtrace() { ++ writeln!(stderr, "backtrace: {:?}", backtrace).expect(errmsg); ++ } ++ ++ ::std::process::exit(1); ++ } ++} ++ ++// The above main gives you maximum control over how the error is ++// formatted. If you don't care (i.e. you want to display the full ++// error during an assert) you can just call the `display_chain` method ++// on the error object ++#[allow(dead_code)] ++fn alternative_main() { ++ if let Err(ref e) = run() { ++ use std::io::Write; ++ use error_chain::ChainedError; // trait which holds `display_chain` ++ let stderr = &mut ::std::io::stderr(); ++ let errmsg = "Error writing to stderr"; ++ ++ writeln!(stderr, "{}", e.display_chain()).expect(errmsg); ++ ::std::process::exit(1); ++ } ++} ++ ++// Use this macro to auto-generate the main above. You may want to ++// set the `RUST_BACKTRACE` env variable to see a backtrace. ++// quick_main!(run); ++ ++ ++// Most functions will return the `Result` type, imported from the ++// `errors` module. It is a typedef of the standard `Result` type ++// for which the error type is always our own `Error`. ++fn run() -> Result<()> { ++ use std::fs::File; ++ ++ // This operation will fail ++ File::open("tretrete") ++ .chain_err(|| "unable to open tretrete file")?; ++ ++ Ok(()) ++} diff --cc vendor/error-chain-0.11.0-rc.2/examples/size.rs index 000000000,000000000..ae360d66e new file mode 100644 --- /dev/null +++ b/vendor/error-chain-0.11.0-rc.2/examples/size.rs @@@ -1,0 -1,0 +1,38 @@@ ++#[macro_use] ++extern crate error_chain; ++ ++use std::mem::{size_of, size_of_val}; ++ ++error_chain! { ++ errors { ++ AVariant ++ Another ++ } ++} ++ ++fn main() { ++ println!("Memory usage in bytes"); ++ println!("---------------------"); ++ println!("Result<()>: {}", size_of::>()); ++ println!(" (): {}", size_of::<()>()); ++ println!(" Error: {}", size_of::()); ++ println!(" ErrorKind: {}", size_of::()); ++ let msg = ErrorKind::Msg("test".into()); ++ println!(" ErrorKind::Msg: {}", size_of_val(&msg)); ++ println!(" String: {}", size_of::()); ++ println!(" State: {}", size_of::()); ++ #[cfg(feature = "backtrace")] ++ { ++ let state = error_chain::State { ++ next_error: None, ++ backtrace: None, ++ }; ++ println!(" State.next_error: {}", size_of_val(&state.next_error)); ++ println!(" State.backtrace: {}", size_of_val(&state.backtrace)); ++ } ++ #[cfg(not(feature = "backtrace"))] ++ { ++ let state = error_chain::State { next_error: None }; ++ println!(" State.next_error: {}", size_of_val(&state.next_error)); ++ } ++} diff --cc vendor/error-chain-0.11.0-rc.2/src/error_chain.rs index 000000000,000000000..5d1958f99 new file mode 100644 --- /dev/null +++ b/vendor/error-chain-0.11.0-rc.2/src/error_chain.rs @@@ -1,0 -1,0 +1,458 @@@ ++/// Prefer to use `error_chain` instead of this macro. ++#[macro_export] ++macro_rules! error_chain_processed { ++ // Default values for `types`. ++ ( ++ types {} ++ $( $rest: tt )* ++ ) => { ++ error_chain_processed! { ++ types { ++ Error, ErrorKind, ResultExt, Result; ++ } ++ $( $rest )* ++ } ++ }; ++ // With `Result` wrapper. ++ ( ++ types { ++ $error_name:ident, $error_kind_name:ident, ++ $result_ext_name:ident, $result_name:ident; ++ } ++ $( $rest: tt )* ++ ) => { ++ error_chain_processed! { ++ types { ++ $error_name, $error_kind_name, ++ $result_ext_name; ++ } ++ $( $rest )* ++ } ++ /// Convenient wrapper around `std::Result`. ++ #[allow(unused)] ++ pub type $result_name = ::std::result::Result; ++ }; ++ // Without `Result` wrapper. ++ ( ++ types { ++ $error_name:ident, $error_kind_name:ident, ++ $result_ext_name:ident; ++ } ++ ++ links { ++ $( $link_variant:ident ( $link_error_path:path, $link_kind_path:path ) ++ $( #[$meta_links:meta] )*; ) * ++ } ++ ++ foreign_links { ++ $( $foreign_link_variant:ident ( $foreign_link_error_path:path ) ++ $( #[$meta_foreign_links:meta] )*; )* ++ } ++ ++ errors { ++ $( $error_chunks:tt ) * ++ } ++ ++ ) => { ++ /// The Error type. ++ /// ++ /// This tuple struct is made of two elements: ++ /// ++ /// - an `ErrorKind` which is used to determine the type of the error. ++ /// - An internal `State`, not meant for direct use outside of `error_chain` ++ /// internals, containing: ++ /// - a backtrace, generated when the error is created. ++ /// - an error chain, used for the implementation of `Error::cause()`. ++ #[derive(Debug)] ++ pub struct $error_name( ++ // The members must be `pub` for `links`. ++ /// The kind of the error. ++ pub $error_kind_name, ++ /// Contains the error chain and the backtrace. ++ #[doc(hidden)] ++ pub $crate::State, ++ ); ++ ++ impl $crate::ChainedError for $error_name { ++ type ErrorKind = $error_kind_name; ++ ++ fn new(kind: $error_kind_name, state: $crate::State) -> $error_name { ++ $error_name(kind, state) ++ } ++ ++ fn from_kind(kind: Self::ErrorKind) -> Self { ++ Self::from_kind(kind) ++ } ++ ++ fn with_chain(error: E, kind: K) ++ -> Self ++ where E: ::std::error::Error + Send + 'static, ++ K: Into ++ { ++ Self::with_chain(error, kind) ++ } ++ ++ fn kind(&self) -> &Self::ErrorKind { ++ self.kind() ++ } ++ ++ fn iter(&self) -> $crate::Iter { ++ $crate::Iter::new(Some(self)) ++ } ++ ++ fn chain_err(self, error: F) -> Self ++ where F: FnOnce() -> EK, ++ EK: Into<$error_kind_name> { ++ self.chain_err(error) ++ } ++ ++ fn backtrace(&self) -> Option<&$crate::Backtrace> { ++ self.backtrace() ++ } ++ ++ impl_extract_backtrace!($error_name ++ $error_kind_name ++ $([$link_error_path, $(#[$meta_links])*])*); ++ } ++ ++ #[allow(dead_code)] ++ impl $error_name { ++ /// Constructs an error from a kind, and generates a backtrace. ++ pub fn from_kind(kind: $error_kind_name) -> $error_name { ++ $error_name( ++ kind, ++ $crate::State::default(), ++ ) ++ } ++ ++ /// Constructs a chained error from another error and a kind, and generates a backtrace. ++ pub fn with_chain(error: E, kind: K) ++ -> $error_name ++ where E: ::std::error::Error + Send + 'static, ++ K: Into<$error_kind_name> ++ { ++ $error_name::with_boxed_chain(Box::new(error), kind) ++ } ++ ++ /// Construct a chained error from another boxed error and a kind, and generates a backtrace ++ pub fn with_boxed_chain(error: Box<::std::error::Error + Send>, kind: K) ++ -> $error_name ++ where K: Into<$error_kind_name> ++ { ++ $error_name( ++ kind.into(), ++ $crate::State::new::<$error_name>(error, ), ++ ) ++ } ++ ++ /// Returns the kind of the error. ++ pub fn kind(&self) -> &$error_kind_name { ++ &self.0 ++ } ++ ++ /// Iterates over the error chain. ++ pub fn iter(&self) -> $crate::Iter { ++ $crate::ChainedError::iter(self) ++ } ++ ++ /// Returns the backtrace associated with this error. ++ pub fn backtrace(&self) -> Option<&$crate::Backtrace> { ++ self.1.backtrace() ++ } ++ ++ /// Extends the error chain with a new entry. ++ pub fn chain_err(self, error: F) -> $error_name ++ where F: FnOnce() -> EK, EK: Into<$error_kind_name> { ++ $error_name::with_chain(self, Self::from_kind(error().into())) ++ } ++ } ++ ++ impl ::std::error::Error for $error_name { ++ fn description(&self) -> &str { ++ self.0.description() ++ } ++ ++ #[allow(unused_doc_comment)] ++ fn cause(&self) -> Option<&::std::error::Error> { ++ match self.1.next_error { ++ Some(ref c) => Some(&**c), ++ None => { ++ match self.0 { ++ $( ++ $(#[$meta_foreign_links])* ++ $error_kind_name::$foreign_link_variant(ref foreign_err) => { ++ foreign_err.cause() ++ } ++ ) * ++ _ => None ++ } ++ } ++ } ++ } ++ } ++ ++ impl ::std::fmt::Display for $error_name { ++ fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { ++ ::std::fmt::Display::fmt(&self.0, f) ++ } ++ } ++ ++ $( ++ $(#[$meta_links])* ++ impl From<$link_error_path> for $error_name { ++ fn from(e: $link_error_path) -> Self { ++ $error_name( ++ $error_kind_name::$link_variant(e.0), ++ e.1, ++ ) ++ } ++ } ++ ) * ++ ++ $( ++ $(#[$meta_foreign_links])* ++ impl From<$foreign_link_error_path> for $error_name { ++ fn from(e: $foreign_link_error_path) -> Self { ++ $error_name::from_kind( ++ $error_kind_name::$foreign_link_variant(e) ++ ) ++ } ++ } ++ ) * ++ ++ impl From<$error_kind_name> for $error_name { ++ fn from(e: $error_kind_name) -> Self { ++ $error_name::from_kind(e) ++ } ++ } ++ ++ impl<'a> From<&'a str> for $error_name { ++ fn from(s: &'a str) -> Self { ++ $error_name::from_kind(s.into()) ++ } ++ } ++ ++ impl From for $error_name { ++ fn from(s: String) -> Self { ++ $error_name::from_kind(s.into()) ++ } ++ } ++ ++ impl ::std::ops::Deref for $error_name { ++ type Target = $error_kind_name; ++ ++ fn deref(&self) -> &Self::Target { ++ &self.0 ++ } ++ } ++ ++ ++ // The ErrorKind type ++ // -------------- ++ ++ quick_error! { ++ /// The kind of an error. ++ #[derive(Debug)] ++ pub enum $error_kind_name { ++ ++ /// A convenient variant for String. ++ Msg(s: String) { ++ description(&s) ++ display("{}", s) ++ } ++ ++ $( ++ $(#[$meta_links])* ++ $link_variant(e: $link_kind_path) { ++ description(e.description()) ++ display("{}", e) ++ } ++ ) * ++ ++ $( ++ $(#[$meta_foreign_links])* ++ $foreign_link_variant(err: $foreign_link_error_path) { ++ description(::std::error::Error::description(err)) ++ display("{}", err) ++ } ++ ) * ++ ++ $($error_chunks)* ++ } ++ } ++ ++ $( ++ $(#[$meta_links])* ++ impl From<$link_kind_path> for $error_kind_name { ++ fn from(e: $link_kind_path) -> Self { ++ $error_kind_name::$link_variant(e) ++ } ++ } ++ ) * ++ ++ impl<'a> From<&'a str> for $error_kind_name { ++ fn from(s: &'a str) -> Self { ++ $error_kind_name::Msg(s.to_string()) ++ } ++ } ++ ++ impl From for $error_kind_name { ++ fn from(s: String) -> Self { ++ $error_kind_name::Msg(s) ++ } ++ } ++ ++ impl From<$error_name> for $error_kind_name { ++ fn from(e: $error_name) -> Self { ++ e.0 ++ } ++ } ++ ++ // The ResultExt trait defines the `chain_err` method. ++ ++ /// Additional methods for `Result`, for easy interaction with this crate. ++ pub trait $result_ext_name { ++ /// If the `Result` is an `Err` then `chain_err` evaluates the closure, ++ /// which returns *some type that can be converted to `ErrorKind`*, boxes ++ /// the original error to store as the cause, then returns a new error ++ /// containing the original error. ++ fn chain_err(self, callback: F) -> ::std::result::Result ++ where F: FnOnce() -> EK, ++ EK: Into<$error_kind_name>; ++ } ++ ++ impl $result_ext_name for ::std::result::Result where E: ::std::error::Error + Send + 'static { ++ fn chain_err(self, callback: F) -> ::std::result::Result ++ where F: FnOnce() -> EK, ++ EK: Into<$error_kind_name> { ++ self.map_err(move |e| { ++ let state = $crate::State::new::<$error_name>(Box::new(e), ); ++ $crate::ChainedError::new(callback().into(), state) ++ }) ++ } ++ } ++ ++ impl $result_ext_name for ::std::option::Option { ++ fn chain_err(self, callback: F) -> ::std::result::Result ++ where F: FnOnce() -> EK, ++ EK: Into<$error_kind_name> { ++ self.ok_or_else(move || { ++ $crate::ChainedError::from_kind(callback().into()) ++ }) ++ } ++ } ++ ++ ++ }; ++} ++ ++/// Internal macro used for reordering of the fields. ++#[doc(hidden)] ++#[macro_export] ++macro_rules! error_chain_processing { ++ ( ++ ({}, $b:tt, $c:tt, $d:tt) ++ types $content:tt ++ $( $tail:tt )* ++ ) => { ++ error_chain_processing! { ++ ($content, $b, $c, $d) ++ $($tail)* ++ } ++ }; ++ ( ++ ($a:tt, {}, $c:tt, $d:tt) ++ links $content:tt ++ $( $tail:tt )* ++ ) => { ++ error_chain_processing! { ++ ($a, $content, $c, $d) ++ $($tail)* ++ } ++ }; ++ ( ++ ($a:tt, $b:tt, {}, $d:tt) ++ foreign_links $content:tt ++ $( $tail:tt )* ++ ) => { ++ error_chain_processing! { ++ ($a, $b, $content, $d) ++ $($tail)* ++ } ++ }; ++ ( ++ ($a:tt, $b:tt, $c:tt, {}) ++ errors $content:tt ++ $( $tail:tt )* ++ ) => { ++ error_chain_processing! { ++ ($a, $b, $c, $content) ++ $($tail)* ++ } ++ }; ++ ( ($a:tt, $b:tt, $c:tt, $d:tt) ) => { ++ error_chain_processed! { ++ types $a ++ links $b ++ foreign_links $c ++ errors $d ++ } ++ }; ++} ++ ++/// This macro is used for handling of duplicated and out-of-order fields. For ++/// the exact rules, see `error_chain_processed`. ++#[macro_export] ++macro_rules! error_chain { ++ ( $( $block_name:ident { $( $block_content:tt )* } )* ) => { ++ error_chain_processing! { ++ ({}, {}, {}, {}) ++ $($block_name { $( $block_content )* })* ++ } ++ }; ++} ++ ++/// Macro used to manage the `backtrace` feature. ++/// ++/// See ++/// https://www.reddit.com/r/rust/comments/57virt/hey_rustaceans_got_an_easy_question_ask_here/da5r4ti/?context=3 ++/// for more details. ++#[macro_export] ++#[doc(hidden)] ++#[cfg(feature = "backtrace")] ++macro_rules! impl_extract_backtrace { ++ ($error_name: ident ++ $error_kind_name: ident ++ $([$link_error_path: path, $(#[$meta_links: meta])*])*) => { ++ #[allow(unused_doc_comment)] ++ fn extract_backtrace(e: &(::std::error::Error + Send + 'static)) ++ -> Option<::std::sync::Arc<$crate::Backtrace>> { ++ if let Some(e) = e.downcast_ref::<$error_name>() { ++ return e.1.backtrace.clone(); ++ } ++ $( ++ $( #[$meta_links] )* ++ { ++ if let Some(e) = e.downcast_ref::<$link_error_path>() { ++ return e.1.backtrace.clone(); ++ } ++ } ++ ) * ++ None ++ } ++ } ++} ++ ++/// Macro used to manage the `backtrace` feature. ++/// ++/// See ++/// https://www.reddit.com/r/rust/comments/57virt/hey_rustaceans_got_an_easy_question_ask_here/da5r4ti/?context=3 ++/// for more details. ++#[macro_export] ++#[doc(hidden)] ++#[cfg(not(feature = "backtrace"))] ++macro_rules! impl_extract_backtrace { ++ ($error_name: ident ++ $error_kind_name: ident ++ $([$link_error_path: path, $(#[$meta_links: meta])*])*) => {} ++} diff --cc vendor/error-chain-0.11.0-rc.2/src/example_generated.rs index 000000000,000000000..413407cae new file mode 100644 --- /dev/null +++ b/vendor/error-chain-0.11.0-rc.2/src/example_generated.rs @@@ -1,0 -1,0 +1,38 @@@ ++//! These modules show an example of code generated by the macro. **IT MUST NOT BE ++//! USED OUTSIDE THIS CRATE**. ++//! ++//! This is the basic error structure. You can see that `ErrorKind` ++//! has been populated in a variety of ways. All `ErrorKind`s get a ++//! `Msg` variant for basic errors. When strings are converted to ++//! `ErrorKind`s they become `ErrorKind::Msg`. The "links" defined in ++//! the macro are expanded to the `Inner` variant, and the ++//! "foreign links" to the `Io` variant. ++//! ++//! Both types come with a variety of `From` conversions as well: ++//! `Error` can be created from `ErrorKind`, `&str` and `String`, ++//! and the `links` and `foreign_links` error types. `ErrorKind` ++//! can be created from the corresponding `ErrorKind`s of the link ++//! types, as well as from `&str` and `String`. ++//! ++//! `into()` and `From::from` are used heavily to massage types into ++//! the right shape. Which one to use in any specific case depends on ++//! the influence of type inference, but there are some patterns that ++//! arise frequently. ++ ++/// Another code generated by the macro. ++pub mod inner { ++ error_chain!{} ++} ++ ++error_chain! { ++ links { ++ Inner(inner::Error, inner::ErrorKind) #[doc = "Link to another `ErrorChain`."]; ++ } ++ foreign_links { ++ Io(::std::io::Error) #[doc = "Link to a `std::error::Error` type."]; ++ } ++ errors { ++ #[doc = "A custom error kind."] ++ Custom ++ } ++} diff --cc vendor/error-chain-0.11.0-rc.2/src/lib.rs index 000000000,000000000..01a2705c0 new file mode 100644 --- /dev/null +++ b/vendor/error-chain-0.11.0-rc.2/src/lib.rs @@@ -1,0 -1,0 +1,777 @@@ ++#![deny(missing_docs)] ++#![allow(unknown_lints)] // to be removed when unused_doc_comments lints is merged ++#![doc(html_root_url = "https://docs.rs/error-chain/0.11.0-rc.2")] ++ ++//! A library for consistent and reliable error handling ++//! ++//! error-chain makes it easy to take full advantage of Rust's ++//! powerful error handling features without the overhead of ++//! maintaining boilerplate error types and conversions. It implements ++//! an opinionated strategy for defining your own error types, as well ++//! as conversions from others' error types. ++//! ++//! ## Quick start ++//! ++//! If you just want to set up your new project with error-chain, ++//! follow the [quickstart.rs] template, and read this [intro] ++//! to error-chain. ++//! ++//! [quickstart.rs]: https://github.com/brson/error-chain/blob/master/examples/quickstart.rs ++//! [intro]: http://brson.github.io/2016/11/30/starting-with-error-chain ++//! ++//! ## Why error chain? ++//! ++//! * error-chain is easy to configure. Handle errors robustly with minimal ++//! effort. ++//! * Basic error handling requires no maintenance of custom error types ++//! nor the `From` conversions that make `?` work. ++//! * error-chain scales from simple error handling strategies to more ++//! rigorous. Return formatted strings for simple errors, only ++//! introducing error variants and their strong typing as needed for ++//! advanced error recovery. ++//! * error-chain makes it trivial to correctly manage the [cause] of ++//! the errors generated by your own code. This is the "chaining" ++//! in "error-chain". ++//! ++//! [cause]: https://doc.rust-lang.org/std/error/trait.Error.html#method.cause ++//! ++//! ## Principles of error-chain ++//! ++//! error-chain is based on the following principles: ++//! ++//! * No error should ever be discarded. This library primarily ++//! makes it easy to "chain" errors with the `chain_err` method. ++//! * Introducing new errors is trivial. Simple errors can be introduced ++//! at the error site with just a string. ++//! * Handling errors is possible with pattern matching. ++//! * Conversions between error types are done in an automatic and ++//! consistent way - `From` conversion behavior is never specified ++//! explicitly. ++//! * Errors implement Send. ++//! * Errors can carry backtraces. ++//! ++//! Similar to other libraries like [error-type] and [quick-error], ++//! this library introduces the error chaining mechanism originally ++//! employed by Cargo. The `error_chain!` macro declares the types ++//! and implementation boilerplate necessary for fulfilling a ++//! particular error-handling strategy. Most importantly it defines a ++//! custom error type (called `Error` by convention) and the `From` ++//! conversions that let the `?` operator work. ++//! ++//! This library differs in a few ways from previous error libs: ++//! ++//! * Instead of defining the custom `Error` type as an enum, it is a ++//! struct containing an `ErrorKind` (which defines the ++//! `description` and `display_chain` methods for the error), an opaque, ++//! optional, boxed `std::error::Error + Send + 'static` object ++//! (which defines the `cause`, and establishes the links in the ++//! error chain), and a `Backtrace`. ++//! * The macro also defines a `ResultExt` trait that defines a ++//! `chain_err` method. This method on all `std::error::Error + Send + 'static` ++//! types extends the error chain by boxing the current ++//! error into an opaque object and putting it inside a new concrete ++//! error. ++//! * It provides automatic `From` conversions between other error types ++//! defined by the `error_chain!` that preserve type information, ++//! and facilitate seamless error composition and matching of composed ++//! errors. ++//! * It provides automatic `From` conversions between any other error ++//! type that hides the type of the other error in the `cause` box. ++//! * If `RUST_BACKTRACE` is enabled, it collects a single backtrace at ++//! the earliest opportunity and propagates it down the stack through ++//! `From` and `ResultExt` conversions. ++//! ++//! To accomplish its goals it makes some tradeoffs: ++//! ++//! * The split between the `Error` and `ErrorKind` types can make it ++//! slightly more cumbersome to instantiate new (unchained) errors, ++//! requiring an `Into` or `From` conversion; as well as slightly ++//! more cumbersome to match on errors with another layer of types ++//! to match. ++//! * Because the error type contains `std::error::Error + Send + 'static` objects, ++//! it can't implement `PartialEq` for easy comparisons. ++//! ++//! ## Declaring error types ++//! ++//! Generally, you define one family of error types per crate, though ++//! it's also perfectly fine to define error types on a finer-grained ++//! basis, such as per module. ++//! ++//! Assuming you are using crate-level error types, typically you will ++//! define an `errors` module and inside it call `error_chain!`: ++//! ++//! ``` ++//! # #[macro_use] extern crate error_chain; ++//! mod other_error { ++//! error_chain! {} ++//! } ++//! ++//! error_chain! { ++//! // The type defined for this error. These are the conventional ++//! // and recommended names, but they can be arbitrarily chosen. ++//! // ++//! // It is also possible to leave this section out entirely, or ++//! // leave it empty, and these names will be used automatically. ++//! types { ++//! Error, ErrorKind, ResultExt, Result; ++//! } ++//! ++//! // Without the `Result` wrapper: ++//! // ++//! // types { ++//! // Error, ErrorKind, ResultExt; ++//! // } ++//! ++//! // Automatic conversions between this error chain and other ++//! // error chains. In this case, it will e.g. generate an ++//! // `ErrorKind` variant called `Another` which in turn contains ++//! // the `other_error::ErrorKind`, with conversions from ++//! // `other_error::Error`. ++//! // ++//! // Optionally, some attributes can be added to a variant. ++//! // ++//! // This section can be empty. ++//! links { ++//! Another(other_error::Error, other_error::ErrorKind) #[cfg(unix)]; ++//! } ++//! ++//! // Automatic conversions between this error chain and other ++//! // error types not defined by the `error_chain!`. These will be ++//! // wrapped in a new error with, in the first case, the ++//! // `ErrorKind::Fmt` variant. The description and cause will ++//! // forward to the description and cause of the original error. ++//! // ++//! // Optionally, some attributes can be added to a variant. ++//! // ++//! // This section can be empty. ++//! foreign_links { ++//! Fmt(::std::fmt::Error); ++//! Io(::std::io::Error) #[cfg(unix)]; ++//! } ++//! ++//! // Define additional `ErrorKind` variants. The syntax here is ++//! // the same as `quick_error!`, but the `from()` and `cause()` ++//! // syntax is not supported. ++//! errors { ++//! InvalidToolchainName(t: String) { ++//! description("invalid toolchain name") ++//! display("invalid toolchain name: '{}'", t) ++//! } ++//! ++//! // You can also add commas after description/display. ++//! // This may work better with some editor auto-indentation modes: ++//! UnknownToolchainVersion(v: String) { ++//! description("unknown toolchain version"), // note the , ++//! display("unknown toolchain version: '{}'", v), // trailing comma is allowed ++//! } ++//! } ++//! } ++//! ++//! # fn main() {} ++//! ``` ++//! ++//! Each section, `types`, `links`, `foreign_links`, and `errors` may ++//! be omitted if it is empty. ++//! ++//! This populates the module with a number of definitions, ++//! the most important of which are the `Error` type ++//! and the `ErrorKind` type. An example of generated code can be found in the ++//! [example_generated](example_generated/index.html) module. ++//! ++//! ## Returning new errors ++//! ++//! Introducing new error chains, with a string message: ++//! ++//! ``` ++//! # #[macro_use] extern crate error_chain; ++//! # fn main() {} ++//! # error_chain! {} ++//! fn foo() -> Result<()> { ++//! Err("foo error!".into()) ++//! } ++//! ``` ++//! ++//! Introducing new error chains, with an `ErrorKind`: ++//! ++//! ``` ++//! # #[macro_use] extern crate error_chain; ++//! # fn main() {} ++//! error_chain! { ++//! errors { FooError } ++//! } ++//! ++//! fn foo() -> Result<()> { ++//! Err(ErrorKind::FooError.into()) ++//! } ++//! ``` ++//! ++//! Note that the return type is the typedef `Result`, which is ++//! defined by the macro as `pub type Result = ++//! ::std::result::Result`. Note that in both cases ++//! `.into()` is called to convert a type into the `Error` type; both ++//! strings and `ErrorKind` have `From` conversions to turn them into ++//! `Error`. ++//! ++//! When the error is emitted behind the `?` operator, the explicit conversion ++//! isn't needed; `Err(ErrorKind)` will automatically be converted to `Err(Error)`. ++//! So the below is equivalent to the previous: ++//! ++//! ``` ++//! # #[macro_use] extern crate error_chain; ++//! # fn main() {} ++//! # error_chain! { errors { FooError } } ++//! fn foo() -> Result<()> { ++//! Ok(Err(ErrorKind::FooError)?) ++//! } ++//! ++//! fn bar() -> Result<()> { ++//! Ok(Err("bogus!")?) ++//! } ++//! ``` ++//! ++//! ## The `bail!` macro ++//! ++//! The above method of introducing new errors works but is a little ++//! verbose. Instead, we can use the `bail!` macro, which performs an early return ++//! with conversions done automatically. ++//! ++//! With `bail!` the previous examples look like: ++//! ++//! ``` ++//! # #[macro_use] extern crate error_chain; ++//! # fn main() {} ++//! # error_chain! { errors { FooError } } ++//! fn foo() -> Result<()> { ++//! if true { ++//! bail!(ErrorKind::FooError); ++//! } else { ++//! Ok(()) ++//! } ++//! } ++//! ++//! fn bar() -> Result<()> { ++//! if true { ++//! bail!("bogus!"); ++//! } else { ++//! Ok(()) ++//! } ++//! } ++//! ``` ++//! ++//! ## Chaining errors ++//! error-chain supports extending an error chain by appending new errors. ++//! This can be done on a Result or on an existing Error. ++//! ++//! To extend the error chain: ++//! ++//! ``` ++//! # #[macro_use] extern crate error_chain; ++//! # fn main() {} ++//! # error_chain! {} ++//! # fn do_something() -> Result<()> { unimplemented!() } ++//! # fn test() -> Result<()> { ++//! let res: Result<()> = do_something().chain_err(|| "something went wrong"); ++//! # Ok(()) ++//! # } ++//! ``` ++//! ++//! `chain_err` can be called on any `Result` type where the contained ++//! error type implements `std::error::Error + Send + 'static`, as long as ++//! the `Result` type's corresponding `ResultExt` trait is in scope. If ++//! the `Result` is an `Err` then `chain_err` evaluates the closure, ++//! which returns *some type that can be converted to `ErrorKind`*, ++//! boxes the original error to store as the cause, then returns a new ++//! error containing the original error. ++//! ++//! Calling `chain_err` on an existing `Error` instance has the same ++//! signature and produces the same outcome as being called on a `Result` ++//! matching the properties described above. This is most useful when ++//! partially handling errors using the `map_err` function. ++//! ++//! To chain an error directly, use `with_chain`: ++//! ++//! ``` ++//! # #[macro_use] extern crate error_chain; ++//! # fn main() {} ++//! # error_chain! {} ++//! # fn do_something() -> Result<()> { unimplemented!() } ++//! # fn test() -> Result<()> { ++//! let res: Result<()> = ++//! do_something().map_err(|e| Error::with_chain(e, "something went wrong")); ++//! # Ok(()) ++//! # } ++//! ``` ++//! ++//! ## Linking errors ++//! ++//! To convert an error from another error chain to this error chain: ++//! ++//! ``` ++//! # #[macro_use] extern crate error_chain; ++//! # fn main() {} ++//! # mod other { error_chain! {} } ++//! error_chain! { ++//! links { ++//! OtherError(other::Error, other::ErrorKind); ++//! } ++//! } ++//! ++//! fn do_other_thing() -> other::Result<()> { unimplemented!() } ++//! ++//! # fn test() -> Result<()> { ++//! let res: Result<()> = do_other_thing().map_err(|e| e.into()); ++//! # Ok(()) ++//! # } ++//! ``` ++//! ++//! The `Error` and `ErrorKind` types implements `From` for the corresponding ++//! types of all linked error chains. Linked errors do not introduce a new ++//! cause to the error chain. ++//! ++//! ## Matching errors ++//! ++//! error-chain error variants are matched with simple patterns. ++//! `Error` is a tuple struct and its first field is the `ErrorKind`, ++//! making dispatching on error kinds relatively compact: ++//! ++//! ``` ++//! # #[macro_use] extern crate error_chain; ++//! # fn main() { ++//! error_chain! { ++//! errors { ++//! InvalidToolchainName(t: String) { ++//! description("invalid toolchain name") ++//! display("invalid toolchain name: '{}'", t) ++//! } ++//! } ++//! } ++//! ++//! match Error::from("error!") { ++//! Error(ErrorKind::InvalidToolchainName(_), _) => { } ++//! Error(ErrorKind::Msg(_), _) => { } ++//! } ++//! # } ++//! ``` ++//! ++//! Chained errors are also matched with (relatively) compact syntax ++//! ++//! ``` ++//! # #[macro_use] extern crate error_chain; ++//! mod utils { ++//! error_chain! { ++//! errors { ++//! BadStuff { ++//! description("bad stuff") ++//! } ++//! } ++//! } ++//! } ++//! ++//! mod app { ++//! error_chain! { ++//! links { ++//! Utils(::utils::Error, ::utils::ErrorKind); ++//! } ++//! } ++//! } ++//! ++//! ++//! # fn main() { ++//! match app::Error::from("error!") { ++//! app::Error(app::ErrorKind::Utils(utils::ErrorKind::BadStuff), _) => { } ++//! _ => { } ++//! } ++//! # } ++//! ``` ++//! ++//! ## Inspecting errors ++//! ++//! An error-chain error contains information about the error itself, a backtrace, and the chain ++//! of causing errors. For reporting purposes, this information can be accessed as follows. ++//! ++//! ``` ++//! # #[macro_use] extern crate error_chain; ++//! use error_chain::ChainedError; // for e.display_chain() ++//! ++//! error_chain! { ++//! errors { ++//! InvalidToolchainName(t: String) { ++//! description("invalid toolchain name") ++//! display("invalid toolchain name: '{}'", t) ++//! } ++//! } ++//! } ++//! ++//! # fn main() { ++//! // Generate an example error to inspect: ++//! let e = "xyzzy".parse::() ++//! .chain_err(|| ErrorKind::InvalidToolchainName("xyzzy".to_string())) ++//! .unwrap_err(); ++//! ++//! // Get the brief description of the error: ++//! assert_eq!(e.description(), "invalid toolchain name"); ++//! ++//! // Get the display version of the error: ++//! assert_eq!(e.to_string(), "invalid toolchain name: 'xyzzy'"); ++//! ++//! // Get the full cause and backtrace: ++//! println!("{}", e.display_chain().to_string()); ++//! // Error: invalid toolchain name: 'xyzzy' ++//! // Caused by: invalid digit found in string ++//! // stack backtrace: ++//! // 0: 0x7fa9f684fc94 - backtrace::backtrace::libunwind::trace ++//! // at src/backtrace/libunwind.rs:53 ++//! // - backtrace::backtrace::trace ++//! // at src/backtrace/mod.rs:42 ++//! // 1: 0x7fa9f6850b0e - backtrace::capture::{{impl}}::new ++//! // at out/capture.rs:79 ++//! // [..] ++//! # } ++//! ``` ++//! ++//! The `Error` and `ErrorKind` types also allow programmatic access to these elements. ++//! ++//! ## Foreign links ++//! ++//! Errors that do not conform to the same conventions as this library ++//! can still be included in the error chain. They are considered "foreign ++//! errors", and are declared using the `foreign_links` block of the ++//! `error_chain!` macro. `Error`s are automatically created from ++//! foreign errors by the `?` operator. ++//! ++//! Foreign links and regular links have one crucial difference: ++//! `From` conversions for regular links *do not introduce a new error ++//! into the error chain*, while conversions for foreign links *always ++//! introduce a new error into the error chain*. So for the example ++//! above all errors deriving from the `std::fmt::Error` type will be ++//! presented to the user as a new `ErrorKind::Fmt` variant, and the ++//! cause will be the original `std::fmt::Error` error. In contrast, when ++//! `other_error::Error` is converted to `Error` the two `ErrorKind`s ++//! are converted between each other to create a new `Error` but the ++//! old error is discarded; there is no "cause" created from the ++//! original error. ++//! ++//! ## Backtraces ++//! ++//! If the `RUST_BACKTRACE` environment variable is set to anything ++//! but ``0``, the earliest non-foreign error to be generated creates ++//! a single backtrace, which is passed through all `From` conversions ++//! and `chain_err` invocations of compatible types. To read the ++//! backtrace just call the `backtrace()` method. ++//! ++//! Backtrace generation can be disabled by turning off the `backtrace` feature. ++//! ++//! ## Iteration ++//! ++//! The `iter` method returns an iterator over the chain of error boxes. ++//! ++//! [error-type]: https://github.com/DanielKeep/rust-error-type ++//! [quick-error]: https://github.com/tailhook/quick-error ++ ++ ++#[cfg(feature = "backtrace")] ++extern crate backtrace; ++ ++use std::error; ++use std::iter::Iterator; ++#[cfg(feature = "backtrace")] ++use std::sync::Arc; ++use std::fmt; ++ ++#[cfg(feature = "backtrace")] ++pub use backtrace::Backtrace; ++#[cfg(not(feature = "backtrace"))] ++/// Dummy type used when the `backtrace` feature is disabled. ++pub type Backtrace = (); ++ ++#[macro_use] ++mod quick_error; ++#[macro_use] ++mod error_chain; ++#[macro_use] ++mod quick_main; ++pub use quick_main::ExitCode; ++#[cfg(feature = "example_generated")] ++pub mod example_generated; ++ ++#[derive(Debug)] ++/// Iterator over the error chain using the `Error::cause()` method. ++pub struct Iter<'a>(Option<&'a error::Error>); ++ ++impl<'a> Iter<'a> { ++ /// Returns a new iterator over the error chain using `Error::cause()`. ++ pub fn new(err: Option<&'a error::Error>) -> Iter<'a> { ++ Iter(err) ++ } ++} ++ ++impl<'a> Iterator for Iter<'a> { ++ type Item = &'a error::Error; ++ ++ fn next<'b>(&'b mut self) -> Option<&'a error::Error> { ++ match self.0.take() { ++ Some(e) => { ++ self.0 = e.cause(); ++ Some(e) ++ } ++ None => None, ++ } ++ } ++} ++ ++/// Returns a backtrace of the current call stack if `RUST_BACKTRACE` ++/// is set to anything but ``0``, and `None` otherwise. This is used ++/// in the generated error implementations. ++#[cfg(feature = "backtrace")] ++#[doc(hidden)] ++pub fn make_backtrace() -> Option> { ++ match std::env::var_os("RUST_BACKTRACE") { ++ Some(ref val) if val != "0" => Some(Arc::new(Backtrace::new())), ++ _ => None, ++ } ++} ++ ++/// This trait is implemented on all the errors generated by the `error_chain` ++/// macro. ++pub trait ChainedError: error::Error + Send + 'static { ++ /// Associated kind type. ++ type ErrorKind; ++ ++ /// Constructs an error from a kind, and generates a backtrace. ++ fn from_kind(kind: Self::ErrorKind) -> Self where Self: Sized; ++ ++ /// Constructs a chained error from another error and a kind, and generates a backtrace. ++ fn with_chain(error: E, kind: K) -> Self ++ where Self: Sized, ++ E: ::std::error::Error + Send + 'static, ++ K: Into; ++ ++ /// Returns the kind of the error. ++ fn kind(&self) -> &Self::ErrorKind; ++ ++ /// Iterates over the error chain. ++ fn iter(&self) -> Iter; ++ ++ /// Returns the backtrace associated with this error. ++ fn backtrace(&self) -> Option<&Backtrace>; ++ ++ /// Returns an object which implements `Display` for printing the full ++ /// context of this error. ++ /// ++ /// The full cause chain and backtrace, if present, will be printed. ++ fn display_chain<'a>(&'a self) -> DisplayChain<'a, Self> { ++ DisplayChain(self) ++ } ++ ++ /// Extends the error chain with a new entry. ++ fn chain_err(self, error: F) -> Self ++ where F: FnOnce() -> EK, ++ EK: Into; ++ ++ /// Creates an error from its parts. ++ #[doc(hidden)] ++ fn new(kind: Self::ErrorKind, state: State) -> Self where Self: Sized; ++ ++ /// Returns the first known backtrace, either from its State or from one ++ /// of the errors from `foreign_links`. ++ #[cfg(feature = "backtrace")] ++ #[doc(hidden)] ++ fn extract_backtrace(e: &(error::Error + Send + 'static)) -> Option> ++ where Self: Sized; ++} ++ ++/// A struct which formats an error for output. ++#[derive(Debug)] ++pub struct DisplayChain<'a, T: 'a + ?Sized>(&'a T); ++ ++impl<'a, T> fmt::Display for DisplayChain<'a, T> ++ where T: ChainedError ++{ ++ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { ++ // Keep `try!` for 1.10 support ++ try!(writeln!(fmt, "Error: {}", self.0)); ++ ++ for e in self.0.iter().skip(1) { ++ try!(writeln!(fmt, "Caused by: {}", e)); ++ } ++ ++ if let Some(backtrace) = self.0.backtrace() { ++ try!(writeln!(fmt, "{:?}", backtrace)); ++ } ++ ++ Ok(()) ++ } ++} ++ ++/// Common state between errors. ++#[derive(Debug)] ++#[doc(hidden)] ++pub struct State { ++ /// Next error in the error chain. ++ pub next_error: Option>, ++ /// Backtrace for the current error. ++ #[cfg(feature = "backtrace")] ++ pub backtrace: Option>, ++} ++ ++impl Default for State { ++ #[cfg(feature = "backtrace")] ++ fn default() -> State { ++ State { ++ next_error: None, ++ backtrace: make_backtrace(), ++ } ++ } ++ ++ #[cfg(not(feature = "backtrace"))] ++ fn default() -> State { ++ State { next_error: None } ++ } ++} ++ ++impl State { ++ /// Creates a new State type ++ #[cfg(feature = "backtrace")] ++ pub fn new(e: Box) -> State { ++ let backtrace = CE::extract_backtrace(&*e).or_else(make_backtrace); ++ State { ++ next_error: Some(e), ++ backtrace: backtrace, ++ } ++ } ++ ++ /// Creates a new State type ++ #[cfg(not(feature = "backtrace"))] ++ pub fn new(e: Box) -> State { ++ State { next_error: Some(e) } ++ } ++ ++ /// Returns the inner backtrace if present. ++ #[cfg(feature = "backtrace")] ++ pub fn backtrace(&self) -> Option<&Backtrace> { ++ self.backtrace.as_ref().map(|v| &**v) ++ } ++ ++ /// Returns the inner backtrace if present. ++ #[cfg(not(feature = "backtrace"))] ++ pub fn backtrace(&self) -> Option<&Backtrace> { ++ None ++ } ++} ++ ++/// Exits a function early with an error ++/// ++/// The `bail!` macro provides an easy way to exit a function. ++/// `bail!(expr)` is equivalent to writing. ++/// ++/// ``` ++/// # #[macro_use] extern crate error_chain; ++/// # error_chain! { } ++/// # fn main() { } ++/// # fn foo() -> Result<()> { ++/// # let expr = ""; ++/// return Err(expr.into()); ++/// # } ++/// ``` ++/// ++/// And as shorthand it takes a formatting string a la `println!`: ++/// ++/// ``` ++/// # #[macro_use] extern crate error_chain; ++/// # error_chain! { } ++/// # fn main() { } ++/// # fn foo() -> Result<()> { ++/// # let n = 0; ++/// bail!("bad number: {}", n); ++/// # } ++/// ``` ++/// ++/// # Examples ++/// ++/// Bailing on a custom error: ++/// ++/// ``` ++/// # #[macro_use] extern crate error_chain; ++/// # fn main() {} ++/// error_chain! { ++/// errors { FooError } ++/// } ++/// ++/// fn foo() -> Result<()> { ++/// if bad_condition() { ++/// bail!(ErrorKind::FooError); ++/// } ++/// ++/// Ok(()) ++/// } ++/// ++/// # fn bad_condition() -> bool { true } ++/// ``` ++/// ++/// Bailing on a formatted string: ++/// ++/// ``` ++/// # #[macro_use] extern crate error_chain; ++/// # fn main() {} ++/// error_chain! { } ++/// ++/// fn foo() -> Result<()> { ++/// if let Some(bad_num) = bad_condition() { ++/// bail!("so bad: {}", bad_num); ++/// } ++/// ++/// Ok(()) ++/// } ++/// ++/// # fn bad_condition() -> Option { None } ++/// ``` ++#[macro_export] ++macro_rules! bail { ++ ($e:expr) => { ++ return Err($e.into()); ++ }; ++ ($fmt:expr, $($arg:tt)+) => { ++ return Err(format!($fmt, $($arg)+).into()); ++ }; ++} ++ ++/// Exits a function early with an error if the condition is not satisfied ++/// ++/// The `ensure!` macro is a convenience helper that provides a way to exit ++/// a function with an error if the given condition fails. ++/// ++/// As an example, `ensure!(condition, "error code: {}", errcode)` is equivalent to ++/// ++/// ``` ++/// # #[macro_use] extern crate error_chain; ++/// # error_chain! { } ++/// # fn main() { } ++/// # fn foo() -> Result<()> { ++/// # let errcode = 0u8; ++/// # let condition = true; ++/// if !condition { ++/// bail!("error code: {}", errcode); ++/// } ++/// # Ok(()) ++/// # } ++/// ``` ++/// ++/// See documentation for `bail!` macro for further details. ++#[macro_export] ++macro_rules! ensure { ++ ($cond:expr, $e:expr) => { ++ if !($cond) { ++ bail!($e); ++ } ++ }; ++ ($cond:expr, $fmt:expr, $($arg:tt)+) => { ++ if !($cond) { ++ bail!($fmt, $($arg)+); ++ } ++ }; ++} ++ ++#[doc(hidden)] ++pub mod mock { ++ error_chain!{} ++} diff --cc vendor/error-chain-0.11.0-rc.2/src/quick_error.rs index 000000000,000000000..c38f50c8f new file mode 100644 --- /dev/null +++ b/vendor/error-chain-0.11.0-rc.2/src/quick_error.rs @@@ -1,0 -1,0 +1,533 @@@ ++// From https://github.com/tailhook/quick-error ++// Changes: ++// - replace `impl Error` by `impl Item::description` ++// - $imeta ++ ++#[macro_export] ++macro_rules! quick_error { ++ ( $(#[$meta:meta])* ++ pub enum $name:ident { $($chunks:tt)* } ++ ) => { ++ quick_error!(SORT [pub enum $name $(#[$meta])* ] ++ items [] buf [] ++ queue [ $($chunks)* ]); ++ }; ++ ( $(#[$meta:meta])* ++ enum $name:ident { $($chunks:tt)* } ++ ) => { ++ quick_error!(SORT [enum $name $(#[$meta])* ] ++ items [] buf [] ++ queue [ $($chunks)* ]); ++ }; ++ // Queue is empty, can do the work ++ (SORT [enum $name:ident $( #[$meta:meta] )*] ++ items [$($( #[$imeta:meta] )* ++ => $iitem:ident: $imode:tt [$( $ivar:ident: $ityp:ty ),*] ++ {$( $ifuncs:tt )*} )* ] ++ buf [ ] ++ queue [ ] ++ ) => { ++ quick_error!(ENUM_DEFINITION [enum $name $( #[$meta] )*] ++ body [] ++ queue [$($( #[$imeta] )* ++ => $iitem: $imode [$( $ivar: $ityp ),*] )*] ++ ); ++ quick_error!(IMPLEMENTATIONS $name {$( ++ $iitem: $imode [$(#[$imeta])*] [$( $ivar: $ityp ),*] {$( $ifuncs )*} ++ )*}); ++ $( ++ quick_error!(ERROR_CHECK $imode $($ifuncs)*); ++ )* ++ }; ++ (SORT [pub enum $name:ident $( #[$meta:meta] )*] ++ items [$($( #[$imeta:meta] )* ++ => $iitem:ident: $imode:tt [$( $ivar:ident: $ityp:ty ),*] ++ {$( $ifuncs:tt )*} )* ] ++ buf [ ] ++ queue [ ] ++ ) => { ++ quick_error!(ENUM_DEFINITION [pub enum $name $( #[$meta] )*] ++ body [] ++ queue [$($( #[$imeta] )* ++ => $iitem: $imode [$( $ivar: $ityp ),*] )*] ++ ); ++ quick_error!(IMPLEMENTATIONS $name {$( ++ $iitem: $imode [$(#[$imeta])*] [$( $ivar: $ityp ),*] {$( $ifuncs )*} ++ )*}); ++ $( ++ quick_error!(ERROR_CHECK $imode $($ifuncs)*); ++ )* ++ }; ++ // Add meta to buffer ++ (SORT [$( $def:tt )*] ++ items [$($( #[$imeta:meta] )* ++ => $iitem:ident: $imode:tt [$( $ivar:ident: $ityp:ty ),*] ++ {$( $ifuncs:tt )*} )* ] ++ buf [$( #[$bmeta:meta] )*] ++ queue [ #[$qmeta:meta] $( $tail:tt )*] ++ ) => { ++ quick_error!(SORT [$( $def )*] ++ items [$( $(#[$imeta])* => $iitem: $imode [$( $ivar:$ityp ),*] {$( $ifuncs )*} )*] ++ buf [$( #[$bmeta] )* #[$qmeta] ] ++ queue [$( $tail )*]); ++ }; ++ // Add ident to buffer ++ (SORT [$( $def:tt )*] ++ items [$($( #[$imeta:meta] )* ++ => $iitem:ident: $imode:tt [$( $ivar:ident: $ityp:ty ),*] ++ {$( $ifuncs:tt )*} )* ] ++ buf [$( #[$bmeta:meta] )*] ++ queue [ $qitem:ident $( $tail:tt )*] ++ ) => { ++ quick_error!(SORT [$( $def )*] ++ items [$( $(#[$imeta])* ++ => $iitem: $imode [$( $ivar:$ityp ),*] {$( $ifuncs )*} )*] ++ buf [$(#[$bmeta])* => $qitem : UNIT [ ] ] ++ queue [$( $tail )*]); ++ }; ++ // Flush buffer on meta after ident ++ (SORT [$( $def:tt )*] ++ items [$($( #[$imeta:meta] )* ++ => $iitem:ident: $imode:tt [$( $ivar:ident: $ityp:ty ),*] ++ {$( $ifuncs:tt )*} )* ] ++ buf [$( #[$bmeta:meta] )* ++ => $bitem:ident: $bmode:tt [$( $bvar:ident: $btyp:ty ),*] ] ++ queue [ #[$qmeta:meta] $( $tail:tt )*] ++ ) => { ++ quick_error!(SORT [$( $def )*] ++ enum [$( $(#[$emeta])* => $eitem $(( $($etyp),* ))* )* ++ $(#[$bmeta])* => $bitem: $bmode $(( $($btyp),* ))*] ++ items [$($( #[$imeta:meta] )* ++ => $iitem: $imode [$( $ivar:$ityp ),*] {$( $ifuncs )*} )* ++ $bitem: $bmode [$( $bvar:$btyp ),*] {} ] ++ buf [ #[$qmeta] ] ++ queue [$( $tail )*]); ++ }; ++ // Add tuple enum-variant ++ (SORT [$( $def:tt )*] ++ items [$($( #[$imeta:meta] )* ++ => $iitem:ident: $imode:tt [$( $ivar:ident: $ityp:ty ),*] ++ {$( $ifuncs:tt )*} )* ] ++ buf [$( #[$bmeta:meta] )* => $bitem:ident: UNIT [ ] ] ++ queue [($( $qvar:ident: $qtyp:ty ),+) $( $tail:tt )*] ++ ) => { ++ quick_error!(SORT [$( $def )*] ++ items [$( $(#[$imeta])* => $iitem: $imode [$( $ivar:$ityp ),*] {$( $ifuncs )*} )*] ++ buf [$( #[$bmeta] )* => $bitem: TUPLE [$( $qvar:$qtyp ),*] ] ++ queue [$( $tail )*] ++ ); ++ }; ++ // Add struct enum-variant - e.g. { descr: &'static str } ++ (SORT [$( $def:tt )*] ++ items [$($( #[$imeta:meta] )* ++ => $iitem:ident: $imode:tt [$( $ivar:ident: $ityp:ty ),*] ++ {$( $ifuncs:tt )*} )* ] ++ buf [$( #[$bmeta:meta] )* => $bitem:ident: UNIT [ ] ] ++ queue [{ $( $qvar:ident: $qtyp:ty ),+} $( $tail:tt )*] ++ ) => { ++ quick_error!(SORT [$( $def )*] ++ items [$( $(#[$imeta])* => $iitem: $imode [$( $ivar:$ityp ),*] {$( $ifuncs )*} )*] ++ buf [$( #[$bmeta] )* => $bitem: STRUCT [$( $qvar:$qtyp ),*] ] ++ queue [$( $tail )*]); ++ }; ++ // Add struct enum-variant, with excess comma - e.g. { descr: &'static str, } ++ (SORT [$( $def:tt )*] ++ items [$($( #[$imeta:meta] )* ++ => $iitem:ident: $imode:tt [$( $ivar:ident: $ityp:ty ),*] ++ {$( $ifuncs:tt )*} )* ] ++ buf [$( #[$bmeta:meta] )* => $bitem:ident: UNIT [ ] ] ++ queue [{$( $qvar:ident: $qtyp:ty ),+ ,} $( $tail:tt )*] ++ ) => { ++ quick_error!(SORT [$( $def )*] ++ items [$( $(#[$imeta])* => $iitem: $imode [$( $ivar:$ityp ),*] {$( $ifuncs )*} )*] ++ buf [$( #[$bmeta] )* => $bitem: STRUCT [$( $qvar:$qtyp ),*] ] ++ queue [$( $tail )*]); ++ }; ++ // Add braces and flush always on braces ++ (SORT [$( $def:tt )*] ++ items [$($( #[$imeta:meta] )* ++ => $iitem:ident: $imode:tt [$( $ivar:ident: $ityp:ty ),*] ++ {$( $ifuncs:tt )*} )* ] ++ buf [$( #[$bmeta:meta] )* ++ => $bitem:ident: $bmode:tt [$( $bvar:ident: $btyp:ty ),*] ] ++ queue [ {$( $qfuncs:tt )*} $( $tail:tt )*] ++ ) => { ++ quick_error!(SORT [$( $def )*] ++ items [$( $(#[$imeta])* => $iitem: $imode [$( $ivar:$ityp ),*] {$( $ifuncs )*} )* ++ $(#[$bmeta])* => $bitem: $bmode [$( $bvar:$btyp ),*] {$( $qfuncs )*} ] ++ buf [ ] ++ queue [$( $tail )*]); ++ }; ++ // Flush buffer on double ident ++ (SORT [$( $def:tt )*] ++ items [$($( #[$imeta:meta] )* ++ => $iitem:ident: $imode:tt [$( $ivar:ident: $ityp:ty ),*] ++ {$( $ifuncs:tt )*} )* ] ++ buf [$( #[$bmeta:meta] )* ++ => $bitem:ident: $bmode:tt [$( $bvar:ident: $btyp:ty ),*] ] ++ queue [ $qitem:ident $( $tail:tt )*] ++ ) => { ++ quick_error!(SORT [$( $def )*] ++ items [$( $(#[$imeta])* => $iitem: $imode [$( $ivar:$ityp ),*] {$( $ifuncs )*} )* ++ $(#[$bmeta])* => $bitem: $bmode [$( $bvar:$btyp ),*] {} ] ++ buf [ => $qitem : UNIT [ ] ] ++ queue [$( $tail )*]); ++ }; ++ // Flush buffer on end ++ (SORT [$( $def:tt )*] ++ items [$($( #[$imeta:meta] )* ++ => $iitem:ident: $imode:tt [$( $ivar:ident: $ityp:ty ),*] ++ {$( $ifuncs:tt )*} )* ] ++ buf [$( #[$bmeta:meta] )* ++ => $bitem:ident: $bmode:tt [$( $bvar:ident: $btyp:ty ),*] ] ++ queue [ ] ++ ) => { ++ quick_error!(SORT [$( $def )*] ++ items [$( $(#[$imeta])* => $iitem: $imode [$( $ivar:$ityp ),*] {$( $ifuncs )*} )* ++ $(#[$bmeta])* => $bitem: $bmode [$( $bvar:$btyp ),*] {} ] ++ buf [ ] ++ queue [ ]); ++ }; ++ // Public enum (Queue Empty) ++ (ENUM_DEFINITION [pub enum $name:ident $( #[$meta:meta] )*] ++ body [$($( #[$imeta:meta] )* ++ => $iitem:ident ($(($( $ttyp:ty ),+))*) {$({$( $svar:ident: $styp:ty ),*})*} )* ] ++ queue [ ] ++ ) => { ++ $(#[$meta])* ++ pub enum $name { ++ $( ++ $(#[$imeta])* ++ $iitem $(($( $ttyp ),*))* $({$( $svar: $styp ),*})*, ++ )* ++ } ++ }; ++ // Private enum (Queue Empty) ++ (ENUM_DEFINITION [enum $name:ident $( #[$meta:meta] )*] ++ body [$($( #[$imeta:meta] )* ++ => $iitem:ident ($(($( $ttyp:ty ),+))*) {$({$( $svar:ident: $styp:ty ),*})*} )* ] ++ queue [ ] ++ ) => { ++ $(#[$meta])* ++ enum $name { ++ $( ++ $(#[$imeta])* ++ $iitem $(($( $ttyp ),*))* $({$( $svar: $styp ),*})*, ++ )* ++ } ++ }; ++ // Unit variant ++ (ENUM_DEFINITION [$( $def:tt )*] ++ body [$($( #[$imeta:meta] )* ++ => $iitem:ident ($(($( $ttyp:ty ),+))*) {$({$( $svar:ident: $styp:ty ),*})*} )* ] ++ queue [$( #[$qmeta:meta] )* ++ => $qitem:ident: UNIT [ ] $( $queue:tt )*] ++ ) => { ++ quick_error!(ENUM_DEFINITION [ $($def)* ] ++ body [$($( #[$imeta] )* => $iitem ($(($( $ttyp ),+))*) {$({$( $svar: $styp ),*})*} )* ++ $( #[$qmeta] )* => $qitem () {} ] ++ queue [ $($queue)* ] ++ ); ++ }; ++ // Tuple variant ++ (ENUM_DEFINITION [$( $def:tt )*] ++ body [$($( #[$imeta:meta] )* ++ => $iitem:ident ($(($( $ttyp:ty ),+))*) {$({$( $svar:ident: $styp:ty ),*})*} )* ] ++ queue [$( #[$qmeta:meta] )* ++ => $qitem:ident: TUPLE [$( $qvar:ident: $qtyp:ty ),+] $( $queue:tt )*] ++ ) => { ++ quick_error!(ENUM_DEFINITION [ $($def)* ] ++ body [$($( #[$imeta] )* => $iitem ($(($( $ttyp ),+))*) {$({$( $svar: $styp ),*})*} )* ++ $( #[$qmeta] )* => $qitem (($( $qtyp ),*)) {} ] ++ queue [ $($queue)* ] ++ ); ++ }; ++ // Struct variant ++ (ENUM_DEFINITION [$( $def:tt )*] ++ body [$($( #[$imeta:meta] )* ++ => $iitem:ident ($(($( $ttyp:ty ),+))*) {$({$( $svar:ident: $styp:ty ),*})*} )* ] ++ queue [$( #[$qmeta:meta] )* ++ => $qitem:ident: STRUCT [$( $qvar:ident: $qtyp:ty ),*] $( $queue:tt )*] ++ ) => { ++ quick_error!(ENUM_DEFINITION [ $($def)* ] ++ body [$($( #[$imeta] )* => $iitem ($(($( $ttyp ),+))*) {$({$( $svar: $styp ),*})*} )* ++ $( #[$qmeta] )* => $qitem () {{$( $qvar: $qtyp ),*}} ] ++ queue [ $($queue)* ] ++ ); ++ }; ++ (IMPLEMENTATIONS ++ $name:ident {$( ++ $item:ident: $imode:tt [$(#[$imeta:meta])*] [$( $var:ident: $typ:ty ),*] {$( $funcs:tt )*} ++ )*} ++ ) => { ++ #[allow(unused, unused_doc_comment)] ++ impl ::std::fmt::Display for $name { ++ fn fmt(&self, fmt: &mut ::std::fmt::Formatter) ++ -> ::std::fmt::Result ++ { ++ match *self { ++ $( ++ $(#[$imeta])* ++ quick_error!(ITEM_PATTERN ++ $name $item: $imode [$( ref $var ),*] ++ ) => { ++ let display_fn = quick_error!(FIND_DISPLAY_IMPL ++ $name $item: $imode ++ {$( $funcs )*}); ++ ++ display_fn(self, fmt) ++ } ++ )* ++ } ++ } ++ } ++ /*#[allow(unused)] ++ impl ::std::error::Error for $name { ++ fn description(&self) -> &str { ++ match *self { ++ $( ++ quick_error!(ITEM_PATTERN ++ $name $item: $imode [$( ref $var ),*] ++ ) => { ++ quick_error!(FIND_DESCRIPTION_IMPL ++ $item: $imode self fmt [$( $var ),*] ++ {$( $funcs )*}) ++ } ++ )* ++ } ++ } ++ fn cause(&self) -> Option<&::std::error::Error> { ++ match *self { ++ $( ++ quick_error!(ITEM_PATTERN ++ $name $item: $imode [$( ref $var ),*] ++ ) => { ++ quick_error!(FIND_CAUSE_IMPL ++ $item: $imode [$( $var ),*] ++ {$( $funcs )*}) ++ } ++ )* ++ } ++ } ++ }*/ ++ #[allow(unused, unused_doc_comment)] ++ impl $name { ++ /// A string describing the error kind. ++ pub fn description(&self) -> &str { ++ match *self { ++ $( ++ $(#[$imeta])* ++ quick_error!(ITEM_PATTERN ++ $name $item: $imode [$( ref $var ),*] ++ ) => { ++ quick_error!(FIND_DESCRIPTION_IMPL ++ $item: $imode self fmt [$( $var ),*] ++ {$( $funcs )*}) ++ } ++ )* ++ } ++ } ++ } ++ $( ++ quick_error!(FIND_FROM_IMPL ++ $name $item: $imode [$( $var:$typ ),*] ++ {$( $funcs )*}); ++ )* ++ }; ++ (FIND_DISPLAY_IMPL $name:ident $item:ident: $imode:tt ++ { display($self_:tt) -> ($( $exprs:tt )*) $( $tail:tt )*} ++ ) => { ++ |quick_error!(IDENT $self_): &$name, f: &mut ::std::fmt::Formatter| { ++ write!(f, $( $exprs )*) ++ } ++ }; ++ (FIND_DISPLAY_IMPL $name:ident $item:ident: $imode:tt ++ { display($pattern:expr) $( $tail:tt )*} ++ ) => { ++ |_, f: &mut ::std::fmt::Formatter| { write!(f, $pattern) } ++ }; ++ (FIND_DISPLAY_IMPL $name:ident $item:ident: $imode:tt ++ { display($pattern:expr, $( $exprs:tt )*) $( $tail:tt )*} ++ ) => { ++ |_, f: &mut ::std::fmt::Formatter| { write!(f, $pattern, $( $exprs )*) } ++ }; ++ (FIND_DISPLAY_IMPL $name:ident $item:ident: $imode:tt ++ { $t:tt $( $tail:tt )*} ++ ) => { ++ quick_error!(FIND_DISPLAY_IMPL ++ $name $item: $imode ++ {$( $tail )*}) ++ }; ++ (FIND_DISPLAY_IMPL $name:ident $item:ident: $imode:tt ++ { } ++ ) => { ++ |self_: &$name, f: &mut ::std::fmt::Formatter| { ++ write!(f, "{}", self_.description()) ++ } ++ }; ++ (FIND_DESCRIPTION_IMPL $item:ident: $imode:tt $me:ident $fmt:ident ++ [$( $var:ident ),*] ++ { description($expr:expr) $( $tail:tt )*} ++ ) => { ++ $expr ++ }; ++ (FIND_DESCRIPTION_IMPL $item:ident: $imode:tt $me:ident $fmt:ident ++ [$( $var:ident ),*] ++ { $t:tt $( $tail:tt )*} ++ ) => { ++ quick_error!(FIND_DESCRIPTION_IMPL ++ $item: $imode $me $fmt [$( $var ),*] ++ {$( $tail )*}) ++ }; ++ (FIND_DESCRIPTION_IMPL $item:ident: $imode:tt $me:ident $fmt:ident ++ [$( $var:ident ),*] ++ { } ++ ) => { ++ stringify!($item) ++ }; ++ (FIND_CAUSE_IMPL $item:ident: $imode:tt ++ [$( $var:ident ),*] ++ { cause($expr:expr) $( $tail:tt )*} ++ ) => { ++ Some($expr) ++ }; ++ (FIND_CAUSE_IMPL $item:ident: $imode:tt ++ [$( $var:ident ),*] ++ { $t:tt $( $tail:tt )*} ++ ) => { ++ quick_error!(FIND_CAUSE_IMPL ++ $item: $imode [$( $var ),*] ++ { $($tail)* }) ++ }; ++ (FIND_CAUSE_IMPL $item:ident: $imode:tt ++ [$( $var:ident ),*] ++ { } ++ ) => { ++ None ++ }; ++ (FIND_FROM_IMPL $name:ident $item:ident: $imode:tt ++ [$( $var:ident: $typ:ty ),*] ++ { from() $( $tail:tt )*} ++ ) => { ++ $( ++ impl From<$typ> for $name { ++ fn from($var: $typ) -> $name { ++ $name::$item($var) ++ } ++ } ++ )* ++ quick_error!(FIND_FROM_IMPL ++ $name $item: $imode [$( $var:$typ ),*] ++ {$( $tail )*}); ++ }; ++ (FIND_FROM_IMPL $name:ident $item:ident: UNIT ++ [ ] ++ { from($ftyp:ty) $( $tail:tt )*} ++ ) => { ++ impl From<$ftyp> for $name { ++ fn from(_discarded_error: $ftyp) -> $name { ++ $name::$item ++ } ++ } ++ quick_error!(FIND_FROM_IMPL ++ $name $item: UNIT [ ] ++ {$( $tail )*}); ++ }; ++ (FIND_FROM_IMPL $name:ident $item:ident: TUPLE ++ [$( $var:ident: $typ:ty ),*] ++ { from($fvar:ident: $ftyp:ty) -> ($( $texpr:expr ),*) $( $tail:tt )*} ++ ) => { ++ impl From<$ftyp> for $name { ++ fn from($fvar: $ftyp) -> $name { ++ $name::$item($( $texpr ),*) ++ } ++ } ++ quick_error!(FIND_FROM_IMPL ++ $name $item: TUPLE [$( $var:$typ ),*] ++ { $($tail)* }); ++ }; ++ (FIND_FROM_IMPL $name:ident $item:ident: STRUCT ++ [$( $var:ident: $typ:ty ),*] ++ { from($fvar:ident: $ftyp:ty) -> {$( $tvar:ident: $texpr:expr ),*} $( $tail:tt )*} ++ ) => { ++ impl From<$ftyp> for $name { ++ fn from($fvar: $ftyp) -> $name { ++ $name::$item { ++ $( $tvar: $texpr ),* ++ } ++ } ++ } ++ quick_error!(FIND_FROM_IMPL ++ $name $item: STRUCT [$( $var:$typ ),*] ++ { $($tail)* }); ++ }; ++ (FIND_FROM_IMPL $name:ident $item:ident: $imode:tt ++ [$( $var:ident: $typ:ty ),*] ++ { $t:tt $( $tail:tt )*} ++ ) => { ++ quick_error!(FIND_FROM_IMPL ++ $name $item: $imode [$( $var:$typ ),*] ++ {$( $tail )*} ++ ); ++ }; ++ (FIND_FROM_IMPL $name:ident $item:ident: $imode:tt ++ [$( $var:ident: $typ:ty ),*] ++ { } ++ ) => { ++ }; ++ (ITEM_BODY $(#[$imeta:meta])* $item:ident: UNIT ++ ) => { }; ++ (ITEM_BODY $(#[$imeta:meta])* $item:ident: TUPLE ++ [$( $typ:ty ),*] ++ ) => { ++ ($( $typ ),*) ++ }; ++ (ITEM_BODY $(#[$imeta:meta])* $item:ident: STRUCT ++ [$( $var:ident: $typ:ty ),*] ++ ) => { ++ {$( $var:$typ ),*} ++ }; ++ (ITEM_PATTERN $name:ident $item:ident: UNIT [] ++ ) => { ++ $name::$item ++ }; ++ (ITEM_PATTERN $name:ident $item:ident: TUPLE ++ [$( ref $var:ident ),*] ++ ) => { ++ $name::$item ($( ref $var ),*) ++ }; ++ (ITEM_PATTERN $name:ident $item:ident: STRUCT ++ [$( ref $var:ident ),*] ++ ) => { ++ $name::$item {$( ref $var ),*} ++ }; ++ // This one should match all allowed sequences in "funcs" but not match ++ // anything else. ++ // This is to contrast FIND_* clauses which just find stuff they need and ++ // skip everything else completely ++ (ERROR_CHECK $imode:tt display($self_:tt) -> ($( $exprs:tt )*) $( $tail:tt )*) ++ => { quick_error!(ERROR_CHECK_COMMA $imode $($tail)*); }; ++ (ERROR_CHECK $imode:tt display($pattern: expr) $( $tail:tt )*) ++ => { quick_error!(ERROR_CHECK_COMMA $imode $($tail)*); }; ++ (ERROR_CHECK $imode:tt display($pattern: expr, $( $exprs:tt )*) $( $tail:tt )*) ++ => { quick_error!(ERROR_CHECK_COMMA $imode $($tail)*); }; ++ (ERROR_CHECK $imode:tt description($expr:expr) $( $tail:tt )*) ++ => { quick_error!(ERROR_CHECK_COMMA $imode $($tail)*); }; ++ (ERROR_CHECK $imode:tt cause($expr:expr) $($tail:tt)*) ++ => { quick_error!(ERROR_CHECK_COMMA $imode $($tail)*); }; ++ (ERROR_CHECK $imode:tt from() $($tail:tt)*) ++ => { quick_error!(ERROR_CHECK_COMMA $imode $($tail)*); }; ++ (ERROR_CHECK $imode:tt from($ftyp:ty) $($tail:tt)*) ++ => { quick_error!(ERROR_CHECK_COMMA $imode $($tail)*); }; ++ (ERROR_CHECK TUPLE from($fvar:ident: $ftyp:ty) -> ($( $e:expr ),*) $( $tail:tt )*) ++ => { quick_error!(ERROR_CHECK_COMMA TUPLE $($tail)*); }; ++ (ERROR_CHECK STRUCT from($fvar:ident: $ftyp:ty) -> {$( $v:ident: $e:expr ),*} $( $tail:tt )*) ++ => { quick_error!(ERROR_CHECK_COMMA STRUCT $($tail)*); }; ++ (ERROR_CHECK $imode:tt ) => {}; ++ (ERROR_CHECK_COMMA $imode:tt , $( $tail:tt )*) ++ => { quick_error!(ERROR_CHECK $imode $($tail)*); }; ++ (ERROR_CHECK_COMMA $imode:tt $( $tail:tt )*) ++ => { quick_error!(ERROR_CHECK $imode $($tail)*); }; ++ // Utility functions ++ (IDENT $ident:ident) => { $ident } ++} diff --cc vendor/error-chain-0.11.0-rc.2/src/quick_main.rs index 000000000,000000000..f81e7d704 new file mode 100644 --- /dev/null +++ b/vendor/error-chain-0.11.0-rc.2/src/quick_main.rs @@@ -1,0 -1,0 +1,77 @@@ ++/// Convenient wrapper to be able to use `?` and such in the main. You can ++/// use it with a separated function: ++/// ++/// ``` ++/// # #[macro_use] extern crate error_chain; ++/// # error_chain! {} ++/// # fn main() { ++/// quick_main!(run); ++/// # } ++/// ++/// fn run() -> Result<()> { ++/// Err("error".into()) ++/// } ++/// ``` ++/// ++/// or with a closure: ++/// ++/// ``` ++/// # #[macro_use] extern crate error_chain; ++/// # error_chain! {} ++/// # fn main() { ++/// quick_main!(|| -> Result<()> { ++/// Err("error".into()) ++/// }); ++/// # } ++/// ``` ++/// ++/// You can also set the exit value of the process by returning a type that implements [`ExitCode`](trait.ExitCode.html): ++/// ++/// ``` ++/// # #[macro_use] extern crate error_chain; ++/// # error_chain! {} ++/// # fn main() { ++/// quick_main!(run); ++/// # } ++/// ++/// fn run() -> Result { ++/// Err("error".into()) ++/// } ++/// ``` ++#[macro_export] ++macro_rules! quick_main { ++ ($main:expr) => { ++ fn main() { ++ use ::std::io::Write; ++ ++ ::std::process::exit(match $main() { ++ Ok(ret) => $crate::ExitCode::code(ret), ++ Err(ref e) => { ++ write!(&mut ::std::io::stderr(), "{}", $crate::ChainedError::display_chain(e)) ++ .expect("Error writing to stderr"); ++ ++ 1 ++ } ++ }); ++ } ++ }; ++} ++ ++/// Represents a value that can be used as the exit status of the process. ++/// See [`quick_main!`](macro.quick_main.html). ++pub trait ExitCode { ++ /// Returns the value to use as the exit status. ++ fn code(self) -> i32; ++} ++ ++impl ExitCode for i32 { ++ fn code(self) -> i32 { ++ self ++ } ++} ++ ++impl ExitCode for () { ++ fn code(self) -> i32 { ++ 0 ++ } ++} diff --cc vendor/error-chain-0.11.0-rc.2/tests/quick_main.rs index 000000000,000000000..4ada3b4e0 new file mode 100644 --- /dev/null +++ b/vendor/error-chain-0.11.0-rc.2/tests/quick_main.rs @@@ -1,0 -1,0 +1,28 @@@ ++#![allow(dead_code)] ++#[macro_use] ++extern crate error_chain; ++ ++error_chain!(); ++ ++mod unit { ++ use super::*; ++ quick_main!(run); ++ ++ fn run() -> Result<()> { ++ Ok(()) ++ } ++} ++ ++mod i32 { ++ use super::*; ++ quick_main!(run); ++ ++ fn run() -> Result { ++ Ok(1) ++ } ++} ++ ++mod closure { ++ use super::*; ++ quick_main!(|| -> Result<()> { Ok(()) }); ++} diff --cc vendor/error-chain-0.11.0-rc.2/tests/tests.rs index 000000000,000000000..dc56b369c new file mode 100644 --- /dev/null +++ b/vendor/error-chain-0.11.0-rc.2/tests/tests.rs @@@ -1,0 -1,0 +1,633 @@@ ++#![allow(dead_code)] ++ ++#[macro_use] ++extern crate error_chain; ++ ++#[test] ++fn smoke_test_1() { ++ error_chain! { ++ types { ++ Error, ErrorKind, ResultExt, Result; ++ } ++ ++ links { } ++ ++ foreign_links { } ++ ++ errors { } ++ }; ++} ++ ++#[test] ++fn smoke_test_2() { ++ error_chain! { ++ types { } ++ ++ links { } ++ ++ foreign_links { } ++ ++ errors { } ++ }; ++} ++ ++#[test] ++fn smoke_test_3() { ++ error_chain! { ++ links { } ++ ++ foreign_links { } ++ ++ errors { } ++ }; ++} ++ ++#[test] ++fn smoke_test_4() { ++ error_chain! { ++ links { } ++ ++ foreign_links { } ++ ++ errors { ++ HttpStatus(e: u32) { ++ description("http request returned an unsuccessful status code") ++ display("http request returned an unsuccessful status code: {}", e) ++ } ++ } ++ }; ++} ++ ++#[test] ++fn smoke_test_5() { ++ error_chain! { ++ types { } ++ ++ links { } ++ ++ foreign_links { } ++ ++ errors { ++ HttpStatus(e: u32) { ++ description("http request returned an unsuccessful status code") ++ display("http request returned an unsuccessful status code: {}", e) ++ } ++ } ++ }; ++} ++ ++#[test] ++fn smoke_test_6() { ++ error_chain! { ++ errors { ++ HttpStatus(e: u32) { ++ description("http request returned an unsuccessful status code") ++ display("http request returned an unsuccessful status code: {}", e) ++ } ++ } ++ }; ++} ++ ++#[test] ++fn smoke_test_7() { ++ error_chain! { ++ types { } ++ ++ foreign_links { } ++ ++ errors { ++ HttpStatus(e: u32) { ++ description("http request returned an unsuccessful status code") ++ display("http request returned an unsuccessful status code: {}", e) ++ } ++ } ++ }; ++} ++ ++#[test] ++fn smoke_test_8() { ++ error_chain! { ++ types { } ++ ++ links { } ++ links { } ++ ++ foreign_links { } ++ foreign_links { } ++ ++ errors { ++ FileNotFound ++ AccessDenied ++ } ++ }; ++} ++ ++#[test] ++fn order_test_1() { ++ error_chain! { types { } links { } foreign_links { } errors { } }; ++} ++ ++#[test] ++fn order_test_2() { ++ error_chain! { links { } types { } foreign_links { } errors { } }; ++} ++ ++#[test] ++fn order_test_3() { ++ error_chain! { foreign_links { } links { } errors { } types { } }; ++} ++ ++#[test] ++fn order_test_4() { ++ error_chain! { errors { } types { } foreign_links { } }; ++} ++ ++#[test] ++fn order_test_5() { ++ error_chain! { foreign_links { } types { } }; ++} ++ ++#[test] ++fn order_test_6() { ++ error_chain! { ++ links { } ++ ++ errors { ++ HttpStatus(e: u32) { ++ description("http request returned an unsuccessful status code") ++ display("http request returned an unsuccessful status code: {}", e) ++ } ++ } ++ ++ ++ foreign_links { } ++ }; ++} ++ ++#[test] ++fn order_test_7() { ++ error_chain! { ++ links { } ++ ++ foreign_links { } ++ ++ types { ++ Error, ErrorKind, ResultExt, Result; ++ } ++ }; ++} ++ ++ ++#[test] ++fn order_test_8() { ++ error_chain! { ++ links { } ++ ++ foreign_links { } ++ foreign_links { } ++ ++ types { ++ Error, ErrorKind, ResultExt, Result; ++ } ++ }; ++} ++ ++#[test] ++fn empty() { ++ error_chain!{}; ++} ++ ++#[test] ++#[cfg(feature = "backtrace")] ++fn has_backtrace_depending_on_env() { ++ use std::env; ++ ++ error_chain! { ++ types {} ++ links {} ++ foreign_links {} ++ errors { ++ MyError ++ } ++ } ++ ++ let original_value = env::var_os("RUST_BACKTRACE"); ++ ++ // missing RUST_BACKTRACE and RUST_BACKTRACE=0 ++ env::remove_var("RUST_BACKTRACE"); ++ let err = Error::from(ErrorKind::MyError); ++ assert!(err.backtrace().is_none()); ++ env::set_var("RUST_BACKTRACE", "0"); ++ let err = Error::from(ErrorKind::MyError); ++ assert!(err.backtrace().is_none()); ++ ++ // RUST_BACKTRACE set to anything but 0 ++ env::set_var("RUST_BACKTRACE", "yes"); ++ let err = Error::from(ErrorKind::MyError); ++ assert!(err.backtrace().is_some()); ++ ++ if let Some(var) = original_value { ++ env::set_var("RUST_BACKTRACE", var); ++ } ++} ++ ++#[test] ++fn chain_err() { ++ use std::fmt; ++ ++ error_chain! { ++ foreign_links { ++ Fmt(fmt::Error); ++ } ++ errors { ++ Test ++ } ++ } ++ ++ let _: Result<()> = Err(fmt::Error).chain_err(|| ""); ++ let _: Result<()> = Err(Error::from_kind(ErrorKind::Test)).chain_err(|| ""); ++} ++ ++/// Verify that an error chain is extended one by `Error::chain_err`, with ++/// the new error added to the end. ++#[test] ++fn error_chain_err() { ++ error_chain! { ++ errors { ++ Test ++ } ++ } ++ ++ let base = Error::from(ErrorKind::Test); ++ let ext = base.chain_err(|| "Test passes"); ++ ++ if let Error(ErrorKind::Msg(_), _) = ext { ++ // pass ++ } else { ++ panic!("The error should be wrapped. {:?}", ext); ++ } ++} ++ ++#[test] ++fn links() { ++ mod test { ++ error_chain!{} ++ } ++ ++ error_chain! { ++ links { ++ Test(test::Error, test::ErrorKind); ++ } ++ } ++} ++ ++#[cfg(test)] ++mod foreign_link_test { ++ ++ use std::fmt; ++ ++ // Note: foreign errors must be `pub` because they appear in the ++ // signature of the public foreign_link_error_path ++ #[derive(Debug)] ++ pub struct ForeignError { ++ cause: ForeignErrorCause, ++ } ++ ++ impl ::std::error::Error for ForeignError { ++ fn description(&self) -> &'static str { ++ "Foreign error description" ++ } ++ ++ fn cause(&self) -> Option<&::std::error::Error> { ++ Some(&self.cause) ++ } ++ } ++ ++ impl fmt::Display for ForeignError { ++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { ++ write!(formatter, "Foreign error display") ++ } ++ } ++ ++ #[derive(Debug)] ++ pub struct ForeignErrorCause {} ++ ++ impl ::std::error::Error for ForeignErrorCause { ++ fn description(&self) -> &'static str { ++ "Foreign error cause description" ++ } ++ ++ fn cause(&self) -> Option<&::std::error::Error> { ++ None ++ } ++ } ++ ++ impl fmt::Display for ForeignErrorCause { ++ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { ++ write!(formatter, "Foreign error cause display") ++ } ++ } ++ ++ error_chain! { ++ types{ ++ Error, ErrorKind, ResultExt, Result; ++ } ++ links {} ++ foreign_links { ++ Foreign(ForeignError); ++ Io(::std::io::Error); ++ } ++ errors {} ++ } ++ ++ #[test] ++ fn display_underlying_error() { ++ let chained_error = try_foreign_error().err().unwrap(); ++ assert_eq!(format!("{}", ForeignError { cause: ForeignErrorCause {} }), ++ format!("{}", chained_error)); ++ } ++ ++ #[test] ++ fn finds_cause() { ++ let chained_error = try_foreign_error().err().unwrap(); ++ assert_eq!(format!("{}", ForeignErrorCause {}), ++ format!("{}", ::std::error::Error::cause(&chained_error).unwrap())); ++ } ++ ++ #[test] ++ fn iterates() { ++ let chained_error = try_foreign_error().err().unwrap(); ++ let mut error_iter = chained_error.iter(); ++ assert!(!format!("{:?}", error_iter).is_empty()); ++ assert_eq!(format!("{}", ForeignError { cause: ForeignErrorCause {} }), ++ format!("{}", error_iter.next().unwrap())); ++ assert_eq!(format!("{}", ForeignErrorCause {}), ++ format!("{}", error_iter.next().unwrap())); ++ assert_eq!(format!("{:?}", None as Option<&::std::error::Error>), ++ format!("{:?}", error_iter.next())); ++ } ++ ++ fn try_foreign_error() -> Result<()> { ++ Err(ForeignError { cause: ForeignErrorCause {} })?; ++ Ok(()) ++ } ++} ++ ++#[cfg(test)] ++mod attributes_test { ++ #[allow(unused_imports)] ++ use std::io; ++ ++ #[cfg(not(test))] ++ mod inner { ++ error_chain!{} ++ } ++ ++ error_chain! { ++ types { ++ Error, ErrorKind, ResultExt, Result; ++ } ++ ++ links { ++ Inner(inner::Error, inner::ErrorKind) #[cfg(not(test))]; ++ } ++ ++ foreign_links { ++ Io(io::Error) #[cfg(not(test))]; ++ } ++ ++ errors { ++ #[cfg(not(test))] ++ AnError { ++ ++ } ++ } ++ } ++} ++ ++#[test] ++fn with_result() { ++ error_chain! { ++ types { ++ Error, ErrorKind, ResultExt, Result; ++ } ++ } ++ let _: Result<()> = Ok(()); ++} ++ ++#[test] ++fn without_result() { ++ error_chain! { ++ types { ++ Error, ErrorKind, ResultExt; ++ } ++ } ++ let _: Result<(), ()> = Ok(()); ++} ++ ++#[test] ++fn documentation() { ++ mod inner { ++ error_chain!{} ++ } ++ ++ error_chain! { ++ links { ++ Inner(inner::Error, inner::ErrorKind) #[doc = "Doc"]; ++ } ++ foreign_links { ++ Io(::std::io::Error) #[doc = "Doc"]; ++ } ++ errors { ++ /// Doc ++ Variant ++ } ++ } ++} ++ ++#[cfg(test)] ++mod multiple_error_same_mod { ++ error_chain! { ++ types { ++ MyError, MyErrorKind, MyResultExt, MyResult; ++ } ++ } ++ error_chain!{} ++} ++ ++#[doc(test)] ++#[deny(dead_code)] ++mod allow_dead_code { ++ error_chain!{} ++} ++ ++// Make sure links actually work! ++#[test] ++fn rustup_regression() { ++ error_chain! { ++ links { ++ Download(error_chain::mock::Error, error_chain::mock::ErrorKind); ++ } ++ ++ foreign_links { } ++ ++ errors { ++ LocatingWorkingDir { ++ description("could not locate working directory") ++ } ++ } ++ } ++} ++ ++#[test] ++fn error_patterns() { ++ error_chain! { ++ links { } ++ ++ foreign_links { } ++ ++ errors { } ++ } ++ ++ // Tuples look nice when matching errors ++ match Error::from("Test") { ++ Error(ErrorKind::Msg(_), _) => {} ++ } ++} ++ ++#[test] ++fn error_first() { ++ error_chain! { ++ errors { ++ LocatingWorkingDir { ++ description("could not locate working directory") ++ } ++ } ++ ++ links { ++ Download(error_chain::mock::Error, error_chain::mock::ErrorKind); ++ } ++ ++ foreign_links { } ++ } ++} ++ ++#[test] ++fn bail() { ++ error_chain! { ++ errors { Foo } ++ } ++ ++ fn foo() -> Result<()> { ++ bail!(ErrorKind::Foo) ++ } ++ ++ fn bar() -> Result<()> { ++ bail!("bar") ++ } ++ ++ fn baz() -> Result<()> { ++ bail!("{}", "baz") ++ } ++} ++ ++#[test] ++fn ensure() { ++ error_chain! { ++ errors { Bar } ++ } ++ ++ fn foo(x: u8) -> Result<()> { ++ ensure!(x == 42, ErrorKind::Bar); ++ Ok(()) ++ } ++ ++ assert!(foo(42).is_ok()); ++ assert!(foo(0).is_err()); ++} ++ ++/// Since the `types` declaration is a list of symbols, check if we ++/// don't change their meaning or order. ++#[test] ++fn types_declarations() { ++ error_chain! { ++ types { ++ MyError, MyErrorKind, MyResultExt, MyResult; ++ } ++ } ++ ++ MyError::from_kind(MyErrorKind::Msg("".into())); ++ ++ let err: Result<(), ::std::io::Error> = Ok(()); ++ MyResultExt::chain_err(err, || "").unwrap(); ++ ++ let _: MyResult<()> = Ok(()); ++} ++ ++#[test] ++/// Calling chain_err over a `Result` containing an error to get a chained error ++/// and constructing a MyError directly, passing it an error should be equivalent. ++fn rewrapping() { ++ ++ use std::env::VarError::{self, NotPresent, NotUnicode}; ++ ++ error_chain! { ++ foreign_links { ++ VarErr(VarError); ++ } ++ ++ types { ++ MyError, MyErrorKind, MyResultExt, MyResult; ++ } ++ } ++ ++ let result_a_from_func: Result = Err(VarError::NotPresent); ++ let result_b_from_func: Result = Err(VarError::NotPresent); ++ ++ let our_error_a = result_a_from_func.map_err(|e| match e { ++ NotPresent => MyError::with_chain(e, "env var wasn't provided"), ++ NotUnicode(_) => MyError::with_chain(e, "env var was bork文字化ã"), ++ }); ++ ++ let our_error_b = result_b_from_func.or_else(|e| match e { ++ NotPresent => Err(e).chain_err(|| "env var wasn't provided"), ++ NotUnicode(_) => Err(e).chain_err(|| "env var was bork文字化ã"), ++ }); ++ ++ assert_eq!(format!("{}", our_error_a.unwrap_err()), ++ format!("{}", our_error_b.unwrap_err())); ++ ++} ++ ++#[test] ++fn comma_in_errors_impl() { ++ error_chain! { ++ links { } ++ ++ foreign_links { } ++ ++ errors { ++ HttpStatus(e: u32) { ++ description("http request returned an unsuccessful status code"), ++ display("http request returned an unsuccessful status code: {}", e) ++ } ++ } ++ }; ++} ++ ++ ++#[test] ++fn trailing_comma_in_errors_impl() { ++ error_chain! { ++ links { } ++ ++ foreign_links { } ++ ++ errors { ++ HttpStatus(e: u32) { ++ description("http request returned an unsuccessful status code"), ++ display("http request returned an unsuccessful status code: {}", e), ++ } ++ } ++ }; ++} diff --cc vendor/fnv-1.0.5/.cargo-checksum.json index 000000000,000000000..2b2416d71 new file mode 100644 --- /dev/null +++ b/vendor/fnv-1.0.5/.cargo-checksum.json @@@ -1,0 -1,0 +1,1 @@@ ++{"files":{},"package":"6cc484842f1e2884faf56f529f960cc12ad8c71ce96cc7abba0a067c98fee344"} diff --cc vendor/fnv-1.0.5/.cargo-ok index 000000000,000000000..e69de29bb new file mode 100644 --- /dev/null +++ b/vendor/fnv-1.0.5/.cargo-ok diff --cc vendor/fnv-1.0.5/.travis.yml index 000000000,000000000..9c58f03c6 new file mode 100644 --- /dev/null +++ b/vendor/fnv-1.0.5/.travis.yml @@@ -1,0 -1,0 +1,8 @@@ ++language: rust ++rust: ++ - nightly ++ - beta ++ - stable ++ ++notifications: ++ webhooks: http://build.servo.org:54856/travis diff --cc vendor/fnv-1.0.5/Cargo.toml index 000000000,000000000..c40115048 new file mode 100644 --- /dev/null +++ b/vendor/fnv-1.0.5/Cargo.toml @@@ -1,0 -1,0 +1,13 @@@ ++[package] ++name = "fnv" ++version = "1.0.5" ++authors = ["Alex Crichton "] ++description = "Fowler–Noll–Vo hash function" ++license = "Apache-2.0 / MIT" ++readme = "README.md" ++repository = "https://github.com/servo/rust-fnv" ++documentation = "https://doc.servo.org/fnv/" ++ ++[lib] ++name = "fnv" ++path = "lib.rs" diff --cc vendor/fnv-1.0.5/README.md index 000000000,000000000..96001e22d new file mode 100644 --- /dev/null +++ b/vendor/fnv-1.0.5/README.md @@@ -1,0 -1,0 +1,81 @@@ ++# rust-fnv ++ ++An implementation of the [Fowler–Noll–Vo hash function][chongo]. ++ ++### [Read the documentation](https://doc.servo.org/fnv/) ++ ++ ++## About ++ ++The FNV hash function is a custom `Hasher` implementation that is more ++efficient for smaller hash keys. ++ ++[The Rust FAQ states that][faq] while the default `Hasher` implementation, ++SipHash, is good in many cases, it is notably slower than other algorithms ++with short keys, such as when you have a map of integers to other values. ++In cases like these, [FNV is demonstrably faster][graphs]. ++ ++Its disadvantages are that it performs badly on larger inputs, and ++provides no protection against collision attacks, where a malicious user ++can craft specific keys designed to slow a hasher down. Thus, it is ++important to profile your program to ensure that you are using small hash ++keys, and be certain that your program could not be exposed to malicious ++inputs (including being a networked server). ++ ++The Rust compiler itself uses FNV, as it is not worried about ++denial-of-service attacks, and can assume that its inputs are going to be ++small—a perfect use case for FNV. ++ ++ ++## Usage ++ ++To include this crate in your program, add the following to your `Cargo.toml`: ++ ++```toml ++[dependencies] ++fnv = "1.0.3" ++``` ++ ++ ++## Using FNV in a HashMap ++ ++The `FnvHashMap` type alias is the easiest way to use the standard library’s ++`HashMap` with FNV. ++ ++```rust ++use fnv::FnvHashMap; ++ ++let mut map = FnvHashMap::default(); ++map.insert(1, "one"); ++map.insert(2, "two"); ++ ++map = FnvHashMap::with_capacity_and_hasher(10, Default::default()); ++map.insert(1, "one"); ++map.insert(2, "two"); ++``` ++ ++Note, the standard library’s `HashMap::new` and `HashMap::with_capacity` ++are only implemented for the `RandomState` hasher, so using `Default` to ++get the hasher is the next best option. ++ ++ ++## Using FNV in a HashSet ++ ++Similarly, `FnvHashSet` is a type alias for the standard library’s `HashSet` ++with FNV. ++ ++```rust ++use fnv::FnvHashSet; ++ ++let mut set = FnvHashSet::default(); ++set.insert(1); ++set.insert(2); ++ ++set = FnvHashSet::with_capacity_and_hasher(10, Default::default()); ++set.insert(1); ++set.insert(2); ++``` ++ ++[chongo]: http://www.isthe.com/chongo/tech/comp/fnv/index.html ++[faq]: https://www.rust-lang.org/faq.html#why-are-rusts-hashmaps-slow ++[graphs]: http://cglab.ca/~abeinges/blah/hash-rs/ diff --cc vendor/fnv-1.0.5/lib.rs index 000000000,000000000..1fc5d28c2 new file mode 100644 --- /dev/null +++ b/vendor/fnv-1.0.5/lib.rs @@@ -1,0 -1,0 +1,349 @@@ ++//! An implementation of the [Fowler–Noll–Vo hash function][chongo]. ++//! ++//! ## About ++//! ++//! The FNV hash function is a custom `Hasher` implementation that is more ++//! efficient for smaller hash keys. ++//! ++//! [The Rust FAQ states that][faq] while the default `Hasher` implementation, ++//! SipHash, is good in many cases, it is notably slower than other algorithms ++//! with short keys, such as when you have a map of integers to other values. ++//! In cases like these, [FNV is demonstrably faster][graphs]. ++//! ++//! Its disadvantages are that it performs badly on larger inputs, and ++//! provides no protection against collision attacks, where a malicious user ++//! can craft specific keys designed to slow a hasher down. Thus, it is ++//! important to profile your program to ensure that you are using small hash ++//! keys, and be certain that your program could not be exposed to malicious ++//! inputs (including being a networked server). ++//! ++//! The Rust compiler itself uses FNV, as it is not worried about ++//! denial-of-service attacks, and can assume that its inputs are going to be ++//! small—a perfect use case for FNV. ++//! ++//! ++//! ## Using FNV in a `HashMap` ++//! ++//! The `FnvHashMap` type alias is the easiest way to use the standard library’s ++//! `HashMap` with FNV. ++//! ++//! ```rust ++//! use fnv::FnvHashMap; ++//! ++//! let mut map = FnvHashMap::default(); ++//! map.insert(1, "one"); ++//! map.insert(2, "two"); ++//! ++//! map = FnvHashMap::with_capacity_and_hasher(10, Default::default()); ++//! map.insert(1, "one"); ++//! map.insert(2, "two"); ++//! ``` ++//! ++//! Note, the standard library’s `HashMap::new` and `HashMap::with_capacity` ++//! are only implemented for the `RandomState` hasher, so using `Default` to ++//! get the hasher is the next best option. ++//! ++//! ## Using FNV in a `HashSet` ++//! ++//! Similarly, `FnvHashSet` is a type alias for the standard library’s `HashSet` ++//! with FNV. ++//! ++//! ```rust ++//! use fnv::FnvHashSet; ++//! ++//! let mut set = FnvHashSet::default(); ++//! set.insert(1); ++//! set.insert(2); ++//! ++//! set = FnvHashSet::with_capacity_and_hasher(10, Default::default()); ++//! set.insert(1); ++//! set.insert(2); ++//! ``` ++//! ++//! [chongo]: http://www.isthe.com/chongo/tech/comp/fnv/index.html ++//! [faq]: https://www.rust-lang.org/faq.html#why-are-rusts-hashmaps-slow ++//! [graphs]: http://cglab.ca/~abeinges/blah/hash-rs/ ++ ++ ++use std::default::Default; ++use std::hash::{Hasher, BuildHasherDefault}; ++use std::collections::{HashMap, HashSet}; ++ ++/// An implementation of the Fowler–Noll–Vo hash function. ++/// ++/// See the [crate documentation](index.html) for more details. ++#[allow(missing_copy_implementations)] ++pub struct FnvHasher(u64); ++ ++impl Default for FnvHasher { ++ ++ #[inline] ++ fn default() -> FnvHasher { ++ FnvHasher(0xcbf29ce484222325) ++ } ++} ++ ++impl FnvHasher { ++ /// Create an FNV hasher starting with a state corresponding ++ /// to the hash `key`. ++ #[inline] ++ pub fn with_key(key: u64) -> FnvHasher { ++ FnvHasher(key) ++ } ++} ++ ++impl Hasher for FnvHasher { ++ #[inline] ++ fn finish(&self) -> u64 { ++ self.0 ++ } ++ ++ #[inline] ++ fn write(&mut self, bytes: &[u8]) { ++ let FnvHasher(mut hash) = *self; ++ ++ for byte in bytes.iter() { ++ hash = hash ^ (*byte as u64); ++ hash = hash.wrapping_mul(0x100000001b3); ++ } ++ ++ *self = FnvHasher(hash); ++ } ++} ++ ++/// A builder for default FNV hashers. ++pub type FnvBuildHasher = BuildHasherDefault; ++ ++/// A `HashMap` using a default FNV hasher. ++pub type FnvHashMap = HashMap; ++ ++/// A `HashSet` using a default FNV hasher. ++pub type FnvHashSet = HashSet; ++ ++ ++#[cfg(test)] ++mod test { ++ use super::*; ++ use std::hash::Hasher; ++ ++ fn fnv1a(bytes: &[u8]) -> u64 { ++ let mut hasher = FnvHasher::default(); ++ hasher.write(bytes); ++ hasher.finish() ++ } ++ ++ fn repeat_10(bytes: &[u8]) -> Vec { ++ (0..10).flat_map(|_| bytes.iter().cloned()).collect() ++ } ++ ++ fn repeat_500(bytes: &[u8]) -> Vec { ++ (0..500).flat_map(|_| bytes.iter().cloned()).collect() ++ } ++ ++ #[test] ++ fn basic_tests() { ++ assert_eq!(fnv1a(b""), 0xcbf29ce484222325); ++ assert_eq!(fnv1a(b"a"), 0xaf63dc4c8601ec8c); ++ assert_eq!(fnv1a(b"b"), 0xaf63df4c8601f1a5); ++ assert_eq!(fnv1a(b"c"), 0xaf63de4c8601eff2); ++ assert_eq!(fnv1a(b"d"), 0xaf63d94c8601e773); ++ assert_eq!(fnv1a(b"e"), 0xaf63d84c8601e5c0); ++ assert_eq!(fnv1a(b"f"), 0xaf63db4c8601ead9); ++ assert_eq!(fnv1a(b"fo"), 0x08985907b541d342); ++ assert_eq!(fnv1a(b"foo"), 0xdcb27518fed9d577); ++ assert_eq!(fnv1a(b"foob"), 0xdd120e790c2512af); ++ assert_eq!(fnv1a(b"fooba"), 0xcac165afa2fef40a); ++ assert_eq!(fnv1a(b"foobar"), 0x85944171f73967e8); ++ assert_eq!(fnv1a(b"\0"), 0xaf63bd4c8601b7df); ++ assert_eq!(fnv1a(b"a\0"), 0x089be207b544f1e4); ++ assert_eq!(fnv1a(b"b\0"), 0x08a61407b54d9b5f); ++ assert_eq!(fnv1a(b"c\0"), 0x08a2ae07b54ab836); ++ assert_eq!(fnv1a(b"d\0"), 0x0891b007b53c4869); ++ assert_eq!(fnv1a(b"e\0"), 0x088e4a07b5396540); ++ assert_eq!(fnv1a(b"f\0"), 0x08987c07b5420ebb); ++ assert_eq!(fnv1a(b"fo\0"), 0xdcb28a18fed9f926); ++ assert_eq!(fnv1a(b"foo\0"), 0xdd1270790c25b935); ++ assert_eq!(fnv1a(b"foob\0"), 0xcac146afa2febf5d); ++ assert_eq!(fnv1a(b"fooba\0"), 0x8593d371f738acfe); ++ assert_eq!(fnv1a(b"foobar\0"), 0x34531ca7168b8f38); ++ assert_eq!(fnv1a(b"ch"), 0x08a25607b54a22ae); ++ assert_eq!(fnv1a(b"cho"), 0xf5faf0190cf90df3); ++ assert_eq!(fnv1a(b"chon"), 0xf27397910b3221c7); ++ assert_eq!(fnv1a(b"chong"), 0x2c8c2b76062f22e0); ++ assert_eq!(fnv1a(b"chongo"), 0xe150688c8217b8fd); ++ assert_eq!(fnv1a(b"chongo "), 0xf35a83c10e4f1f87); ++ assert_eq!(fnv1a(b"chongo w"), 0xd1edd10b507344d0); ++ assert_eq!(fnv1a(b"chongo wa"), 0x2a5ee739b3ddb8c3); ++ assert_eq!(fnv1a(b"chongo was"), 0xdcfb970ca1c0d310); ++ assert_eq!(fnv1a(b"chongo was "), 0x4054da76daa6da90); ++ assert_eq!(fnv1a(b"chongo was h"), 0xf70a2ff589861368); ++ assert_eq!(fnv1a(b"chongo was he"), 0x4c628b38aed25f17); ++ assert_eq!(fnv1a(b"chongo was her"), 0x9dd1f6510f78189f); ++ assert_eq!(fnv1a(b"chongo was here"), 0xa3de85bd491270ce); ++ assert_eq!(fnv1a(b"chongo was here!"), 0x858e2fa32a55e61d); ++ assert_eq!(fnv1a(b"chongo was here!\n"), 0x46810940eff5f915); ++ assert_eq!(fnv1a(b"ch\0"), 0xf5fadd190cf8edaa); ++ assert_eq!(fnv1a(b"cho\0"), 0xf273ed910b32b3e9); ++ assert_eq!(fnv1a(b"chon\0"), 0x2c8c5276062f6525); ++ assert_eq!(fnv1a(b"chong\0"), 0xe150b98c821842a0); ++ assert_eq!(fnv1a(b"chongo\0"), 0xf35aa3c10e4f55e7); ++ assert_eq!(fnv1a(b"chongo \0"), 0xd1ed680b50729265); ++ assert_eq!(fnv1a(b"chongo w\0"), 0x2a5f0639b3dded70); ++ assert_eq!(fnv1a(b"chongo wa\0"), 0xdcfbaa0ca1c0f359); ++ assert_eq!(fnv1a(b"chongo was\0"), 0x4054ba76daa6a430); ++ assert_eq!(fnv1a(b"chongo was \0"), 0xf709c7f5898562b0); ++ assert_eq!(fnv1a(b"chongo was h\0"), 0x4c62e638aed2f9b8); ++ assert_eq!(fnv1a(b"chongo was he\0"), 0x9dd1a8510f779415); ++ assert_eq!(fnv1a(b"chongo was her\0"), 0xa3de2abd4911d62d); ++ assert_eq!(fnv1a(b"chongo was here\0"), 0x858e0ea32a55ae0a); ++ assert_eq!(fnv1a(b"chongo was here!\0"), 0x46810f40eff60347); ++ assert_eq!(fnv1a(b"chongo was here!\n\0"), 0xc33bce57bef63eaf); ++ assert_eq!(fnv1a(b"cu"), 0x08a24307b54a0265); ++ assert_eq!(fnv1a(b"cur"), 0xf5b9fd190cc18d15); ++ assert_eq!(fnv1a(b"curd"), 0x4c968290ace35703); ++ assert_eq!(fnv1a(b"curds"), 0x07174bd5c64d9350); ++ assert_eq!(fnv1a(b"curds "), 0x5a294c3ff5d18750); ++ assert_eq!(fnv1a(b"curds a"), 0x05b3c1aeb308b843); ++ assert_eq!(fnv1a(b"curds an"), 0xb92a48da37d0f477); ++ assert_eq!(fnv1a(b"curds and"), 0x73cdddccd80ebc49); ++ assert_eq!(fnv1a(b"curds and "), 0xd58c4c13210a266b); ++ assert_eq!(fnv1a(b"curds and w"), 0xe78b6081243ec194); ++ assert_eq!(fnv1a(b"curds and wh"), 0xb096f77096a39f34); ++ assert_eq!(fnv1a(b"curds and whe"), 0xb425c54ff807b6a3); ++ assert_eq!(fnv1a(b"curds and whey"), 0x23e520e2751bb46e); ++ assert_eq!(fnv1a(b"curds and whey\n"), 0x1a0b44ccfe1385ec); ++ assert_eq!(fnv1a(b"cu\0"), 0xf5ba4b190cc2119f); ++ assert_eq!(fnv1a(b"cur\0"), 0x4c962690ace2baaf); ++ assert_eq!(fnv1a(b"curd\0"), 0x0716ded5c64cda19); ++ assert_eq!(fnv1a(b"curds\0"), 0x5a292c3ff5d150f0); ++ assert_eq!(fnv1a(b"curds \0"), 0x05b3e0aeb308ecf0); ++ assert_eq!(fnv1a(b"curds a\0"), 0xb92a5eda37d119d9); ++ assert_eq!(fnv1a(b"curds an\0"), 0x73ce41ccd80f6635); ++ assert_eq!(fnv1a(b"curds and\0"), 0xd58c2c132109f00b); ++ assert_eq!(fnv1a(b"curds and \0"), 0xe78baf81243f47d1); ++ assert_eq!(fnv1a(b"curds and w\0"), 0xb0968f7096a2ee7c); ++ assert_eq!(fnv1a(b"curds and wh\0"), 0xb425a84ff807855c); ++ assert_eq!(fnv1a(b"curds and whe\0"), 0x23e4e9e2751b56f9); ++ assert_eq!(fnv1a(b"curds and whey\0"), 0x1a0b4eccfe1396ea); ++ assert_eq!(fnv1a(b"curds and whey\n\0"), 0x54abd453bb2c9004); ++ assert_eq!(fnv1a(b"hi"), 0x08ba5f07b55ec3da); ++ assert_eq!(fnv1a(b"hi\0"), 0x337354193006cb6e); ++ assert_eq!(fnv1a(b"hello"), 0xa430d84680aabd0b); ++ assert_eq!(fnv1a(b"hello\0"), 0xa9bc8acca21f39b1); ++ assert_eq!(fnv1a(b"\xff\x00\x00\x01"), 0x6961196491cc682d); ++ assert_eq!(fnv1a(b"\x01\x00\x00\xff"), 0xad2bb1774799dfe9); ++ assert_eq!(fnv1a(b"\xff\x00\x00\x02"), 0x6961166491cc6314); ++ assert_eq!(fnv1a(b"\x02\x00\x00\xff"), 0x8d1bb3904a3b1236); ++ assert_eq!(fnv1a(b"\xff\x00\x00\x03"), 0x6961176491cc64c7); ++ assert_eq!(fnv1a(b"\x03\x00\x00\xff"), 0xed205d87f40434c7); ++ assert_eq!(fnv1a(b"\xff\x00\x00\x04"), 0x6961146491cc5fae); ++ assert_eq!(fnv1a(b"\x04\x00\x00\xff"), 0xcd3baf5e44f8ad9c); ++ assert_eq!(fnv1a(b"\x40\x51\x4e\x44"), 0xe3b36596127cd6d8); ++ assert_eq!(fnv1a(b"\x44\x4e\x51\x40"), 0xf77f1072c8e8a646); ++ assert_eq!(fnv1a(b"\x40\x51\x4e\x4a"), 0xe3b36396127cd372); ++ assert_eq!(fnv1a(b"\x4a\x4e\x51\x40"), 0x6067dce9932ad458); ++ assert_eq!(fnv1a(b"\x40\x51\x4e\x54"), 0xe3b37596127cf208); ++ assert_eq!(fnv1a(b"\x54\x4e\x51\x40"), 0x4b7b10fa9fe83936); ++ assert_eq!(fnv1a(b"127.0.0.1"), 0xaabafe7104d914be); ++ assert_eq!(fnv1a(b"127.0.0.1\0"), 0xf4d3180b3cde3eda); ++ assert_eq!(fnv1a(b"127.0.0.2"), 0xaabafd7104d9130b); ++ assert_eq!(fnv1a(b"127.0.0.2\0"), 0xf4cfb20b3cdb5bb1); ++ assert_eq!(fnv1a(b"127.0.0.3"), 0xaabafc7104d91158); ++ assert_eq!(fnv1a(b"127.0.0.3\0"), 0xf4cc4c0b3cd87888); ++ assert_eq!(fnv1a(b"64.81.78.68"), 0xe729bac5d2a8d3a7); ++ assert_eq!(fnv1a(b"64.81.78.68\0"), 0x74bc0524f4dfa4c5); ++ assert_eq!(fnv1a(b"64.81.78.74"), 0xe72630c5d2a5b352); ++ assert_eq!(fnv1a(b"64.81.78.74\0"), 0x6b983224ef8fb456); ++ assert_eq!(fnv1a(b"64.81.78.84"), 0xe73042c5d2ae266d); ++ assert_eq!(fnv1a(b"64.81.78.84\0"), 0x8527e324fdeb4b37); ++ assert_eq!(fnv1a(b"feedface"), 0x0a83c86fee952abc); ++ assert_eq!(fnv1a(b"feedface\0"), 0x7318523267779d74); ++ assert_eq!(fnv1a(b"feedfacedaffdeed"), 0x3e66d3d56b8caca1); ++ assert_eq!(fnv1a(b"feedfacedaffdeed\0"), 0x956694a5c0095593); ++ assert_eq!(fnv1a(b"feedfacedeadbeef"), 0xcac54572bb1a6fc8); ++ assert_eq!(fnv1a(b"feedfacedeadbeef\0"), 0xa7a4c9f3edebf0d8); ++ assert_eq!(fnv1a(b"line 1\nline 2\nline 3"), 0x7829851fac17b143); ++ assert_eq!(fnv1a(b"chongo /\\../\\"), 0x2c8f4c9af81bcf06); ++ assert_eq!(fnv1a(b"chongo /\\../\\\0"), 0xd34e31539740c732); ++ assert_eq!(fnv1a(b"chongo (Landon Curt Noll) /\\../\\"), 0x3605a2ac253d2db1); ++ assert_eq!(fnv1a(b"chongo (Landon Curt Noll) /\\../\\\0"), 0x08c11b8346f4a3c3); ++ assert_eq!(fnv1a(b"http://antwrp.gsfc.nasa.gov/apod/astropix.html"), 0x6be396289ce8a6da); ++ assert_eq!(fnv1a(b"http://en.wikipedia.org/wiki/Fowler_Noll_Vo_hash"), 0xd9b957fb7fe794c5); ++ assert_eq!(fnv1a(b"http://epod.usra.edu/"), 0x05be33da04560a93); ++ assert_eq!(fnv1a(b"http://exoplanet.eu/"), 0x0957f1577ba9747c); ++ assert_eq!(fnv1a(b"http://hvo.wr.usgs.gov/cam3/"), 0xda2cc3acc24fba57); ++ assert_eq!(fnv1a(b"http://hvo.wr.usgs.gov/cams/HMcam/"), 0x74136f185b29e7f0); ++ assert_eq!(fnv1a(b"http://hvo.wr.usgs.gov/kilauea/update/deformation.html"), 0xb2f2b4590edb93b2); ++ assert_eq!(fnv1a(b"http://hvo.wr.usgs.gov/kilauea/update/images.html"), 0xb3608fce8b86ae04); ++ assert_eq!(fnv1a(b"http://hvo.wr.usgs.gov/kilauea/update/maps.html"), 0x4a3a865079359063); ++ assert_eq!(fnv1a(b"http://hvo.wr.usgs.gov/volcanowatch/current_issue.html"), 0x5b3a7ef496880a50); ++ assert_eq!(fnv1a(b"http://neo.jpl.nasa.gov/risk/"), 0x48fae3163854c23b); ++ assert_eq!(fnv1a(b"http://norvig.com/21-days.html"), 0x07aaa640476e0b9a); ++ assert_eq!(fnv1a(b"http://primes.utm.edu/curios/home.php"), 0x2f653656383a687d); ++ assert_eq!(fnv1a(b"http://slashdot.org/"), 0xa1031f8e7599d79c); ++ assert_eq!(fnv1a(b"http://tux.wr.usgs.gov/Maps/155.25-19.5.html"), 0xa31908178ff92477); ++ assert_eq!(fnv1a(b"http://volcano.wr.usgs.gov/kilaueastatus.php"), 0x097edf3c14c3fb83); ++ assert_eq!(fnv1a(b"http://www.avo.alaska.edu/activity/Redoubt.php"), 0xb51ca83feaa0971b); ++ assert_eq!(fnv1a(b"http://www.dilbert.com/fast/"), 0xdd3c0d96d784f2e9); ++ assert_eq!(fnv1a(b"http://www.fourmilab.ch/gravitation/orbits/"), 0x86cd26a9ea767d78); ++ assert_eq!(fnv1a(b"http://www.fpoa.net/"), 0xe6b215ff54a30c18); ++ assert_eq!(fnv1a(b"http://www.ioccc.org/index.html"), 0xec5b06a1c5531093); ++ assert_eq!(fnv1a(b"http://www.isthe.com/cgi-bin/number.cgi"), 0x45665a929f9ec5e5); ++ assert_eq!(fnv1a(b"http://www.isthe.com/chongo/bio.html"), 0x8c7609b4a9f10907); ++ assert_eq!(fnv1a(b"http://www.isthe.com/chongo/index.html"), 0x89aac3a491f0d729); ++ assert_eq!(fnv1a(b"http://www.isthe.com/chongo/src/calc/lucas-calc"), 0x32ce6b26e0f4a403); ++ assert_eq!(fnv1a(b"http://www.isthe.com/chongo/tech/astro/venus2004.html"), 0x614ab44e02b53e01); ++ assert_eq!(fnv1a(b"http://www.isthe.com/chongo/tech/astro/vita.html"), 0xfa6472eb6eef3290); ++ assert_eq!(fnv1a(b"http://www.isthe.com/chongo/tech/comp/c/expert.html"), 0x9e5d75eb1948eb6a); ++ assert_eq!(fnv1a(b"http://www.isthe.com/chongo/tech/comp/calc/index.html"), 0xb6d12ad4a8671852); ++ assert_eq!(fnv1a(b"http://www.isthe.com/chongo/tech/comp/fnv/index.html"), 0x88826f56eba07af1); ++ assert_eq!(fnv1a(b"http://www.isthe.com/chongo/tech/math/number/howhigh.html"), 0x44535bf2645bc0fd); ++ assert_eq!(fnv1a(b"http://www.isthe.com/chongo/tech/math/number/number.html"), 0x169388ffc21e3728); ++ assert_eq!(fnv1a(b"http://www.isthe.com/chongo/tech/math/prime/mersenne.html"), 0xf68aac9e396d8224); ++ assert_eq!(fnv1a(b"http://www.isthe.com/chongo/tech/math/prime/mersenne.html#largest"), 0x8e87d7e7472b3883); ++ assert_eq!(fnv1a(b"http://www.lavarnd.org/cgi-bin/corpspeak.cgi"), 0x295c26caa8b423de); ++ assert_eq!(fnv1a(b"http://www.lavarnd.org/cgi-bin/haiku.cgi"), 0x322c814292e72176); ++ assert_eq!(fnv1a(b"http://www.lavarnd.org/cgi-bin/rand-none.cgi"), 0x8a06550eb8af7268); ++ assert_eq!(fnv1a(b"http://www.lavarnd.org/cgi-bin/randdist.cgi"), 0xef86d60e661bcf71); ++ assert_eq!(fnv1a(b"http://www.lavarnd.org/index.html"), 0x9e5426c87f30ee54); ++ assert_eq!(fnv1a(b"http://www.lavarnd.org/what/nist-test.html"), 0xf1ea8aa826fd047e); ++ assert_eq!(fnv1a(b"http://www.macosxhints.com/"), 0x0babaf9a642cb769); ++ assert_eq!(fnv1a(b"http://www.mellis.com/"), 0x4b3341d4068d012e); ++ assert_eq!(fnv1a(b"http://www.nature.nps.gov/air/webcams/parks/havoso2alert/havoalert.cfm"), 0xd15605cbc30a335c); ++ assert_eq!(fnv1a(b"http://www.nature.nps.gov/air/webcams/parks/havoso2alert/timelines_24.cfm"), 0x5b21060aed8412e5); ++ assert_eq!(fnv1a(b"http://www.paulnoll.com/"), 0x45e2cda1ce6f4227); ++ assert_eq!(fnv1a(b"http://www.pepysdiary.com/"), 0x50ae3745033ad7d4); ++ assert_eq!(fnv1a(b"http://www.sciencenews.org/index/home/activity/view"), 0xaa4588ced46bf414); ++ assert_eq!(fnv1a(b"http://www.skyandtelescope.com/"), 0xc1b0056c4a95467e); ++ assert_eq!(fnv1a(b"http://www.sput.nl/~rob/sirius.html"), 0x56576a71de8b4089); ++ assert_eq!(fnv1a(b"http://www.systemexperts.com/"), 0xbf20965fa6dc927e); ++ assert_eq!(fnv1a(b"http://www.tq-international.com/phpBB3/index.php"), 0x569f8383c2040882); ++ assert_eq!(fnv1a(b"http://www.travelquesttours.com/index.htm"), 0xe1e772fba08feca0); ++ assert_eq!(fnv1a(b"http://www.wunderground.com/global/stations/89606.html"), 0x4ced94af97138ac4); ++ assert_eq!(fnv1a(&repeat_10(b"21701")), 0xc4112ffb337a82fb); ++ assert_eq!(fnv1a(&repeat_10(b"M21701")), 0xd64a4fd41de38b7d); ++ assert_eq!(fnv1a(&repeat_10(b"2^21701-1")), 0x4cfc32329edebcbb); ++ assert_eq!(fnv1a(&repeat_10(b"\x54\xc5")), 0x0803564445050395); ++ assert_eq!(fnv1a(&repeat_10(b"\xc5\x54")), 0xaa1574ecf4642ffd); ++ assert_eq!(fnv1a(&repeat_10(b"23209")), 0x694bc4e54cc315f9); ++ assert_eq!(fnv1a(&repeat_10(b"M23209")), 0xa3d7cb273b011721); ++ assert_eq!(fnv1a(&repeat_10(b"2^23209-1")), 0x577c2f8b6115bfa5); ++ assert_eq!(fnv1a(&repeat_10(b"\x5a\xa9")), 0xb7ec8c1a769fb4c1); ++ assert_eq!(fnv1a(&repeat_10(b"\xa9\x5a")), 0x5d5cfce63359ab19); ++ assert_eq!(fnv1a(&repeat_10(b"391581216093")), 0x33b96c3cd65b5f71); ++ assert_eq!(fnv1a(&repeat_10(b"391581*2^216093-1")), 0xd845097780602bb9); ++ assert_eq!(fnv1a(&repeat_10(b"\x05\xf9\x9d\x03\x4c\x81")), 0x84d47645d02da3d5); ++ assert_eq!(fnv1a(&repeat_10(b"FEDCBA9876543210")), 0x83544f33b58773a5); ++ assert_eq!(fnv1a(&repeat_10(b"\xfe\xdc\xba\x98\x76\x54\x32\x10")), 0x9175cbb2160836c5); ++ assert_eq!(fnv1a(&repeat_10(b"EFCDAB8967452301")), 0xc71b3bc175e72bc5); ++ assert_eq!(fnv1a(&repeat_10(b"\xef\xcd\xab\x89\x67\x45\x23\x01")), 0x636806ac222ec985); ++ assert_eq!(fnv1a(&repeat_10(b"0123456789ABCDEF")), 0xb6ef0e6950f52ed5); ++ assert_eq!(fnv1a(&repeat_10(b"\x01\x23\x45\x67\x89\xab\xcd\xef")), 0xead3d8a0f3dfdaa5); ++ assert_eq!(fnv1a(&repeat_10(b"1032547698BADCFE")), 0x922908fe9a861ba5); ++ assert_eq!(fnv1a(&repeat_10(b"\x10\x32\x54\x76\x98\xba\xdc\xfe")), 0x6d4821de275fd5c5); ++ assert_eq!(fnv1a(&repeat_500(b"\x00")), 0x1fe3fce62bd816b5); ++ assert_eq!(fnv1a(&repeat_500(b"\x07")), 0xc23e9fccd6f70591); ++ assert_eq!(fnv1a(&repeat_500(b"~")), 0xc1af12bdfe16b5b5); ++ assert_eq!(fnv1a(&repeat_500(b"\x7f")), 0x39e9f18f2f85e221); ++ } ++} diff --cc vendor/globset-0.2.0/.cargo-checksum.json index 000000000,000000000..3cac4c65a new file mode 100644 --- /dev/null +++ b/vendor/globset-0.2.0/.cargo-checksum.json @@@ -1,0 -1,0 +1,1 @@@ ++{"files":{},"package":"feeb1b6840809ef5efcf7a4a990bc4e1b7ee3df8cf9e2379a75aeb2ba42ac9c3"} diff --cc vendor/globset-0.2.0/.cargo-ok index 000000000,000000000..e69de29bb new file mode 100644 --- /dev/null +++ b/vendor/globset-0.2.0/.cargo-ok diff --cc vendor/globset-0.2.0/COPYING index 000000000,000000000..bb9c20a09 new file mode 100644 --- /dev/null +++ b/vendor/globset-0.2.0/COPYING @@@ -1,0 -1,0 +1,3 @@@ ++This project is dual-licensed under the Unlicense and MIT licenses. ++ ++You may use this code under the terms of either license. diff --cc vendor/globset-0.2.0/Cargo.toml index 000000000,000000000..6132926a9 new file mode 100644 --- /dev/null +++ b/vendor/globset-0.2.0/Cargo.toml @@@ -1,0 -1,0 +1,32 @@@ ++[package] ++name = "globset" ++version = "0.2.0" #:version ++authors = ["Andrew Gallant "] ++description = """ ++Cross platform single glob and glob set matching. Glob set matching is the ++process of matching one or more glob patterns against a single candidate path ++simultaneously, and returning all of the globs that matched. ++""" ++documentation = "https://docs.rs/globset" ++homepage = "https://github.com/BurntSushi/ripgrep/tree/master/globset" ++repository = "https://github.com/BurntSushi/ripgrep/tree/master/globset" ++readme = "README.md" ++keywords = ["regex", "glob", "multiple", "set", "pattern"] ++license = "Unlicense/MIT" ++ ++[lib] ++name = "globset" ++bench = false ++ ++[dependencies] ++aho-corasick = "0.6.0" ++fnv = "1.0" ++log = "0.3" ++memchr = "1" ++regex = "0.2.1" ++ ++[dev-dependencies] ++glob = "0.2" ++ ++[features] ++simd-accel = ["regex/simd-accel"] diff --cc vendor/globset-0.2.0/LICENSE-MIT index 000000000,000000000..3b0a5dc09 new file mode 100644 --- /dev/null +++ b/vendor/globset-0.2.0/LICENSE-MIT @@@ -1,0 -1,0 +1,21 @@@ ++The MIT License (MIT) ++ ++Copyright (c) 2015 Andrew Gallant ++ ++Permission is hereby granted, free of charge, to any person obtaining a copy ++of this software and associated documentation files (the "Software"), to deal ++in the Software without restriction, including without limitation the rights ++to use, copy, modify, merge, publish, distribute, sublicense, and/or sell ++copies of the Software, and to permit persons to whom the Software is ++furnished to do so, subject to the following conditions: ++ ++The above copyright notice and this permission notice shall be included in ++all copies or substantial portions of the Software. ++ ++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR ++IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, ++FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE ++AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER ++LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, ++OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN ++THE SOFTWARE. diff --cc vendor/globset-0.2.0/README.md index 000000000,000000000..f40b8aac5 new file mode 100644 --- /dev/null +++ b/vendor/globset-0.2.0/README.md @@@ -1,0 -1,0 +1,122 @@@ ++globset ++======= ++Cross platform single glob and glob set matching. Glob set matching is the ++process of matching one or more glob patterns against a single candidate path ++simultaneously, and returning all of the globs that matched. ++ ++[![Linux build status](https://api.travis-ci.org/BurntSushi/ripgrep.png)](https://travis-ci.org/BurntSushi/ripgrep) ++[![Windows build status](https://ci.appveyor.com/api/projects/status/github/BurntSushi/ripgrep?svg=true)](https://ci.appveyor.com/project/BurntSushi/ripgrep) ++[![](https://img.shields.io/crates/v/globset.svg)](https://crates.io/crates/globset) ++ ++Dual-licensed under MIT or the [UNLICENSE](http://unlicense.org). ++ ++### Documentation ++ ++[https://docs.rs/globset](https://docs.rs/globset) ++ ++### Usage ++ ++Add this to your `Cargo.toml`: ++ ++```toml ++[dependencies] ++globset = "0.1" ++``` ++ ++and this to your crate root: ++ ++```rust ++extern crate globset; ++``` ++ ++### Example: one glob ++ ++This example shows how to match a single glob against a single file path. ++ ++```rust ++use globset::Glob; ++ ++let glob = try!(Glob::new("*.rs")).compile_matcher(); ++ ++assert!(glob.is_match("foo.rs")); ++assert!(glob.is_match("foo/bar.rs")); ++assert!(!glob.is_match("Cargo.toml")); ++``` ++ ++### Example: configuring a glob matcher ++ ++This example shows how to use a `GlobBuilder` to configure aspects of match ++semantics. In this example, we prevent wildcards from matching path separators. ++ ++```rust ++use globset::GlobBuilder; ++ ++let glob = try!(GlobBuilder::new("*.rs") ++ .literal_separator(true).build()).compile_matcher(); ++ ++assert!(glob.is_match("foo.rs")); ++assert!(!glob.is_match("foo/bar.rs")); // no longer matches ++assert!(!glob.is_match("Cargo.toml")); ++``` ++ ++### Example: match multiple globs at once ++ ++This example shows how to match multiple glob patterns at once. ++ ++```rust ++use globset::{Glob, GlobSetBuilder}; ++ ++let mut builder = GlobSetBuilder::new(); ++// A GlobBuilder can be used to configure each glob's match semantics ++// independently. ++builder.add(try!(Glob::new("*.rs"))); ++builder.add(try!(Glob::new("src/lib.rs"))); ++builder.add(try!(Glob::new("src/**/foo.rs"))); ++let set = try!(builder.build()); ++ ++assert_eq!(set.matches("src/bar/baz/foo.rs"), vec![0, 2]); ++``` ++ ++### Performance ++ ++This crate implements globs by converting them to regular expressions, and ++executing them with the ++[`regex`](https://github.com/rust-lang-nursery/regex) ++crate. ++ ++For single glob matching, performance of this crate should be roughly on par ++with the performance of the ++[`glob`](https://github.com/rust-lang-nursery/glob) ++crate. (`*_regex` correspond to benchmarks for this library while `*_glob` ++correspond to benchmarks for the `glob` library.) ++Optimizations in the `regex` crate may propel this library past `glob`, ++particularly when matching longer paths. ++ ++``` ++test ext_glob ... bench: 425 ns/iter (+/- 21) ++test ext_regex ... bench: 175 ns/iter (+/- 10) ++test long_glob ... bench: 182 ns/iter (+/- 11) ++test long_regex ... bench: 173 ns/iter (+/- 10) ++test short_glob ... bench: 69 ns/iter (+/- 4) ++test short_regex ... bench: 83 ns/iter (+/- 2) ++``` ++ ++The primary performance advantage of this crate is when matching multiple ++globs against a single path. With the `glob` crate, one must match each glob ++synchronously, one after the other. In this crate, many can be matched ++simultaneously. For example: ++ ++``` ++test many_short_glob ... bench: 1,063 ns/iter (+/- 47) ++test many_short_regex_set ... bench: 186 ns/iter (+/- 11) ++``` ++ ++### Comparison with the [`glob`](https://github.com/rust-lang-nursery/glob) crate ++ ++* Supports alternate "or" globs, e.g., `*.{foo,bar}`. ++* Can match non-UTF-8 file paths correctly. ++* Supports matching multiple globs at once. ++* Doesn't provide a recursive directory iterator of matching file paths, ++ although I believe this crate should grow one eventually. ++* Supports case insensitive and require-literal-separator match options, but ++ **doesn't** support the require-literal-leading-dot option. diff --cc vendor/globset-0.2.0/UNLICENSE index 000000000,000000000..68a49daad new file mode 100644 --- /dev/null +++ b/vendor/globset-0.2.0/UNLICENSE @@@ -1,0 -1,0 +1,24 @@@ ++This is free and unencumbered software released into the public domain. ++ ++Anyone is free to copy, modify, publish, use, compile, sell, or ++distribute this software, either in source code form or as a compiled ++binary, for any purpose, commercial or non-commercial, and by any ++means. ++ ++In jurisdictions that recognize copyright laws, the author or authors ++of this software dedicate any and all copyright interest in the ++software to the public domain. We make this dedication for the benefit ++of the public at large and to the detriment of our heirs and ++successors. We intend this dedication to be an overt act of ++relinquishment in perpetuity of all present and future rights to this ++software under copyright law. ++ ++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, ++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF ++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. ++IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR ++OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ++ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR ++OTHER DEALINGS IN THE SOFTWARE. ++ ++For more information, please refer to diff --cc vendor/globset-0.2.0/benches/bench.rs index 000000000,000000000..e142ed72e new file mode 100644 --- /dev/null +++ b/vendor/globset-0.2.0/benches/bench.rs @@@ -1,0 -1,0 +1,121 @@@ ++/*! ++This module benchmarks the glob implementation. For benchmarks on the ripgrep ++tool itself, see the benchsuite directory. ++*/ ++#![feature(test)] ++ ++extern crate glob; ++extern crate globset; ++#[macro_use] ++extern crate lazy_static; ++extern crate regex; ++extern crate test; ++ ++use std::ffi::OsStr; ++use std::path::Path; ++ ++use globset::{Candidate, Glob, GlobMatcher, GlobSet, GlobSetBuilder}; ++ ++const EXT: &'static str = "some/a/bigger/path/to/the/crazy/needle.txt"; ++const EXT_PAT: &'static str = "*.txt"; ++ ++const SHORT: &'static str = "some/needle.txt"; ++const SHORT_PAT: &'static str = "some/**/needle.txt"; ++ ++const LONG: &'static str = "some/a/bigger/path/to/the/crazy/needle.txt"; ++const LONG_PAT: &'static str = "some/**/needle.txt"; ++ ++fn new_glob(pat: &str) -> glob::Pattern { ++ glob::Pattern::new(pat).unwrap() ++} ++ ++fn new_reglob(pat: &str) -> GlobMatcher { ++ Glob::new(pat).unwrap().compile_matcher() ++} ++ ++fn new_reglob_many(pats: &[&str]) -> GlobSet { ++ let mut builder = GlobSetBuilder::new(); ++ for pat in pats { ++ builder.add(Glob::new(pat).unwrap()); ++ } ++ builder.build().unwrap() ++} ++ ++#[bench] ++fn ext_glob(b: &mut test::Bencher) { ++ let pat = new_glob(EXT_PAT); ++ b.iter(|| assert!(pat.matches(EXT))); ++} ++ ++#[bench] ++fn ext_regex(b: &mut test::Bencher) { ++ let set = new_reglob(EXT_PAT); ++ let cand = Candidate::new(EXT); ++ b.iter(|| assert!(set.is_match_candidate(&cand))); ++} ++ ++#[bench] ++fn short_glob(b: &mut test::Bencher) { ++ let pat = new_glob(SHORT_PAT); ++ b.iter(|| assert!(pat.matches(SHORT))); ++} ++ ++#[bench] ++fn short_regex(b: &mut test::Bencher) { ++ let set = new_reglob(SHORT_PAT); ++ let cand = Candidate::new(SHORT); ++ b.iter(|| assert!(set.is_match_candidate(&cand))); ++} ++ ++#[bench] ++fn long_glob(b: &mut test::Bencher) { ++ let pat = new_glob(LONG_PAT); ++ b.iter(|| assert!(pat.matches(LONG))); ++} ++ ++#[bench] ++fn long_regex(b: &mut test::Bencher) { ++ let set = new_reglob(LONG_PAT); ++ let cand = Candidate::new(LONG); ++ b.iter(|| assert!(set.is_match_candidate(&cand))); ++} ++ ++const MANY_SHORT_GLOBS: &'static [&'static str] = &[ ++ // Taken from a random .gitignore on my system. ++ ".*.swp", ++ "tags", ++ "target", ++ "*.lock", ++ "tmp", ++ "*.csv", ++ "*.fst", ++ "*-got", ++ "*.csv.idx", ++ "words", ++ "98m*", ++ "dict", ++ "test", ++ "months", ++]; ++ ++const MANY_SHORT_SEARCH: &'static str = "98m-blah.csv.idx"; ++ ++#[bench] ++fn many_short_glob(b: &mut test::Bencher) { ++ let pats: Vec<_> = MANY_SHORT_GLOBS.iter().map(|&s| new_glob(s)).collect(); ++ b.iter(|| { ++ let mut count = 0; ++ for pat in &pats { ++ if pat.matches(MANY_SHORT_SEARCH) { ++ count += 1; ++ } ++ } ++ assert_eq!(2, count); ++ }) ++} ++ ++#[bench] ++fn many_short_regex_set(b: &mut test::Bencher) { ++ let set = new_reglob_many(MANY_SHORT_GLOBS); ++ b.iter(|| assert_eq!(2, set.matches(MANY_SHORT_SEARCH).iter().count())); ++} diff --cc vendor/globset-0.2.0/src/glob.rs index 000000000,000000000..bb7b0602e new file mode 100644 --- /dev/null +++ b/vendor/globset-0.2.0/src/glob.rs @@@ -1,0 -1,0 +1,1340 @@@ ++use std::ffi::{OsStr, OsString}; ++use std::fmt; ++use std::hash; ++use std::iter; ++use std::ops::{Deref, DerefMut}; ++use std::path::{Path, is_separator}; ++use std::str; ++ ++use regex; ++use regex::bytes::Regex; ++ ++use {Candidate, Error, ErrorKind, new_regex}; ++ ++/// Describes a matching strategy for a particular pattern. ++/// ++/// This provides a way to more quickly determine whether a pattern matches ++/// a particular file path in a way that scales with a large number of ++/// patterns. For example, if many patterns are of the form `*.ext`, then it's ++/// possible to test whether any of those patterns matches by looking up a ++/// file path's extension in a hash table. ++#[derive(Clone, Debug, Eq, PartialEq)] ++pub enum MatchStrategy { ++ /// A pattern matches if and only if the entire file path matches this ++ /// literal string. ++ Literal(String), ++ /// A pattern matches if and only if the file path's basename matches this ++ /// literal string. ++ BasenameLiteral(String), ++ /// A pattern matches if and only if the file path's extension matches this ++ /// literal string. ++ Extension(OsString), ++ /// A pattern matches if and only if this prefix literal is a prefix of the ++ /// candidate file path. ++ Prefix(String), ++ /// A pattern matches if and only if this prefix literal is a prefix of the ++ /// candidate file path. ++ /// ++ /// An exception: if `component` is true, then `suffix` must appear at the ++ /// beginning of a file path or immediately following a `/`. ++ Suffix { ++ /// The actual suffix. ++ suffix: String, ++ /// Whether this must start at the beginning of a path component. ++ component: bool, ++ }, ++ /// A pattern matches only if the given extension matches the file path's ++ /// extension. Note that this is a necessary but NOT sufficient criterion. ++ /// Namely, if the extension matches, then a full regex search is still ++ /// required. ++ RequiredExtension(OsString), ++ /// A regex needs to be used for matching. ++ Regex, ++} ++ ++impl MatchStrategy { ++ /// Returns a matching strategy for the given pattern. ++ pub fn new(pat: &Glob) -> MatchStrategy { ++ if let Some(lit) = pat.basename_literal() { ++ MatchStrategy::BasenameLiteral(lit) ++ } else if let Some(lit) = pat.literal() { ++ MatchStrategy::Literal(lit) ++ } else if let Some(ext) = pat.ext() { ++ MatchStrategy::Extension(ext) ++ } else if let Some(prefix) = pat.prefix() { ++ MatchStrategy::Prefix(prefix) ++ } else if let Some((suffix, component)) = pat.suffix() { ++ MatchStrategy::Suffix { suffix: suffix, component: component } ++ } else if let Some(ext) = pat.required_ext() { ++ MatchStrategy::RequiredExtension(ext) ++ } else { ++ MatchStrategy::Regex ++ } ++ } ++} ++ ++/// Glob represents a successfully parsed shell glob pattern. ++/// ++/// It cannot be used directly to match file paths, but it can be converted ++/// to a regular expression string or a matcher. ++#[derive(Clone, Debug, Eq)] ++pub struct Glob { ++ glob: String, ++ re: String, ++ opts: GlobOptions, ++ tokens: Tokens, ++} ++ ++impl PartialEq for Glob { ++ fn eq(&self, other: &Glob) -> bool { ++ self.glob == other.glob && self.opts == other.opts ++ } ++} ++ ++impl hash::Hash for Glob { ++ fn hash(&self, state: &mut H) { ++ self.glob.hash(state); ++ self.opts.hash(state); ++ } ++} ++ ++impl fmt::Display for Glob { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ self.glob.fmt(f) ++ } ++} ++ ++/// A matcher for a single pattern. ++#[derive(Clone, Debug)] ++pub struct GlobMatcher { ++ /// The underlying pattern. ++ pat: Glob, ++ /// The pattern, as a compiled regex. ++ re: Regex, ++} ++ ++impl GlobMatcher { ++ /// Tests whether the given path matches this pattern or not. ++ pub fn is_match>(&self, path: P) -> bool { ++ self.is_match_candidate(&Candidate::new(path.as_ref())) ++ } ++ ++ /// Tests whether the given path matches this pattern or not. ++ pub fn is_match_candidate(&self, path: &Candidate) -> bool { ++ self.re.is_match(&path.path) ++ } ++} ++ ++/// A strategic matcher for a single pattern. ++#[cfg(test)] ++#[derive(Clone, Debug)] ++struct GlobStrategic { ++ /// The match strategy to use. ++ strategy: MatchStrategy, ++ /// The underlying pattern. ++ pat: Glob, ++ /// The pattern, as a compiled regex. ++ re: Regex, ++} ++ ++#[cfg(test)] ++impl GlobStrategic { ++ /// Tests whether the given path matches this pattern or not. ++ fn is_match>(&self, path: P) -> bool { ++ self.is_match_candidate(&Candidate::new(path.as_ref())) ++ } ++ ++ /// Tests whether the given path matches this pattern or not. ++ fn is_match_candidate(&self, candidate: &Candidate) -> bool { ++ let byte_path = &*candidate.path; ++ ++ match self.strategy { ++ MatchStrategy::Literal(ref lit) => lit.as_bytes() == byte_path, ++ MatchStrategy::BasenameLiteral(ref lit) => { ++ lit.as_bytes() == &*candidate.basename ++ } ++ MatchStrategy::Extension(ref ext) => { ++ candidate.ext == ext ++ } ++ MatchStrategy::Prefix(ref pre) => { ++ starts_with(pre.as_bytes(), byte_path) ++ } ++ MatchStrategy::Suffix { ref suffix, component } => { ++ if component && byte_path == &suffix.as_bytes()[1..] { ++ return true; ++ } ++ ends_with(suffix.as_bytes(), byte_path) ++ } ++ MatchStrategy::RequiredExtension(ref ext) => { ++ candidate.ext == ext && self.re.is_match(byte_path) ++ } ++ MatchStrategy::Regex => self.re.is_match(byte_path), ++ } ++ } ++} ++ ++/// A builder for a pattern. ++/// ++/// This builder enables configuring the match semantics of a pattern. For ++/// example, one can make matching case insensitive. ++/// ++/// The lifetime `'a` refers to the lifetime of the pattern string. ++#[derive(Clone, Debug)] ++pub struct GlobBuilder<'a> { ++ /// The glob pattern to compile. ++ glob: &'a str, ++ /// Options for the pattern. ++ opts: GlobOptions, ++} ++ ++#[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)] ++struct GlobOptions { ++ /// Whether to match case insensitively. ++ case_insensitive: bool, ++ /// Whether to require a literal separator to match a separator in a file ++ /// path. e.g., when enabled, `*` won't match `/`. ++ literal_separator: bool, ++} ++ ++#[derive(Clone, Debug, Default, Eq, PartialEq)] ++struct Tokens(Vec); ++ ++impl Deref for Tokens { ++ type Target = Vec; ++ fn deref(&self) -> &Vec { &self.0 } ++} ++ ++impl DerefMut for Tokens { ++ fn deref_mut(&mut self) -> &mut Vec { &mut self.0 } ++} ++ ++#[derive(Clone, Debug, Eq, PartialEq)] ++enum Token { ++ Literal(char), ++ Any, ++ ZeroOrMore, ++ RecursivePrefix, ++ RecursiveSuffix, ++ RecursiveZeroOrMore, ++ Class { ++ negated: bool, ++ ranges: Vec<(char, char)>, ++ }, ++ Alternates(Vec), ++} ++ ++impl Glob { ++ /// Builds a new pattern with default options. ++ pub fn new(glob: &str) -> Result { ++ GlobBuilder::new(glob).build() ++ } ++ ++ /// Returns a matcher for this pattern. ++ pub fn compile_matcher(&self) -> GlobMatcher { ++ let re = new_regex(&self.re) ++ .expect("regex compilation shouldn't fail"); ++ GlobMatcher { ++ pat: self.clone(), ++ re: re, ++ } ++ } ++ ++ /// Returns a strategic matcher. ++ /// ++ /// This isn't exposed because it's not clear whether it's actually ++ /// faster than just running a regex for a *single* pattern. If it ++ /// is faster, then GlobMatcher should do it automatically. ++ #[cfg(test)] ++ fn compile_strategic_matcher(&self) -> GlobStrategic { ++ let strategy = MatchStrategy::new(self); ++ let re = new_regex(&self.re) ++ .expect("regex compilation shouldn't fail"); ++ GlobStrategic { ++ strategy: strategy, ++ pat: self.clone(), ++ re: re, ++ } ++ } ++ ++ /// Returns the original glob pattern used to build this pattern. ++ pub fn glob(&self) -> &str { ++ &self.glob ++ } ++ ++ /// Returns the regular expression string for this glob. ++ pub fn regex(&self) -> &str { ++ &self.re ++ } ++ ++ /// Returns the pattern as a literal if and only if the pattern must match ++ /// an entire path exactly. ++ /// ++ /// The basic format of these patterns is `{literal}`. ++ fn literal(&self) -> Option { ++ if self.opts.case_insensitive { ++ return None; ++ } ++ let mut lit = String::new(); ++ for t in &*self.tokens { ++ match *t { ++ Token::Literal(c) => lit.push(c), ++ _ => return None, ++ } ++ } ++ if lit.is_empty() { ++ None ++ } else { ++ Some(lit) ++ } ++ } ++ ++ /// Returns an extension if this pattern matches a file path if and only ++ /// if the file path has the extension returned. ++ /// ++ /// Note that this extension returned differs from the extension that ++ /// std::path::Path::extension returns. Namely, this extension includes ++ /// the '.'. Also, paths like `.rs` are considered to have an extension ++ /// of `.rs`. ++ fn ext(&self) -> Option { ++ if self.opts.case_insensitive { ++ return None; ++ } ++ let start = match self.tokens.get(0) { ++ Some(&Token::RecursivePrefix) => 1, ++ Some(_) => 0, ++ _ => return None, ++ }; ++ match self.tokens.get(start) { ++ Some(&Token::ZeroOrMore) => { ++ // If there was no recursive prefix, then we only permit ++ // `*` if `*` can match a `/`. For example, if `*` can't ++ // match `/`, then `*.c` doesn't match `foo/bar.c`. ++ if start == 0 && self.opts.literal_separator { ++ return None; ++ } ++ } ++ _ => return None, ++ } ++ match self.tokens.get(start + 1) { ++ Some(&Token::Literal('.')) => {} ++ _ => return None, ++ } ++ let mut lit = OsStr::new(".").to_os_string(); ++ for t in self.tokens[start + 2..].iter() { ++ match *t { ++ Token::Literal('.') | Token::Literal('/') => return None, ++ Token::Literal(c) => lit.push(c.to_string()), ++ _ => return None, ++ } ++ } ++ if lit.is_empty() { ++ None ++ } else { ++ Some(lit) ++ } ++ } ++ ++ /// This is like `ext`, but returns an extension even if it isn't sufficent ++ /// to imply a match. Namely, if an extension is returned, then it is ++ /// necessary but not sufficient for a match. ++ fn required_ext(&self) -> Option { ++ if self.opts.case_insensitive { ++ return None; ++ } ++ // We don't care at all about the beginning of this pattern. All we ++ // need to check for is if it ends with a literal of the form `.ext`. ++ let mut ext: Vec = vec![]; // built in reverse ++ for t in self.tokens.iter().rev() { ++ match *t { ++ Token::Literal('/') => return None, ++ Token::Literal(c) => { ++ ext.push(c); ++ if c == '.' { ++ break; ++ } ++ } ++ _ => return None, ++ } ++ } ++ if ext.last() != Some(&'.') { ++ None ++ } else { ++ ext.reverse(); ++ Some(OsString::from(ext.into_iter().collect::())) ++ } ++ } ++ ++ /// Returns a literal prefix of this pattern if the entire pattern matches ++ /// if the literal prefix matches. ++ fn prefix(&self) -> Option { ++ if self.opts.case_insensitive { ++ return None; ++ } ++ let end = match self.tokens.last() { ++ Some(&Token::ZeroOrMore) => { ++ if self.opts.literal_separator { ++ // If a trailing `*` can't match a `/`, then we can't ++ // assume a match of the prefix corresponds to a match ++ // of the overall pattern. e.g., `foo/*` with ++ // `literal_separator` enabled matches `foo/bar` but not ++ // `foo/bar/baz`, even though `foo/bar/baz` has a `foo/` ++ // literal prefix. ++ return None; ++ } ++ self.tokens.len() - 1 ++ } ++ _ => self.tokens.len(), ++ }; ++ let mut lit = String::new(); ++ for t in &self.tokens[0..end] { ++ match *t { ++ Token::Literal(c) => lit.push(c), ++ _ => return None, ++ } ++ } ++ if lit.is_empty() { ++ None ++ } else { ++ Some(lit) ++ } ++ } ++ ++ /// Returns a literal suffix of this pattern if the entire pattern matches ++ /// if the literal suffix matches. ++ /// ++ /// If a literal suffix is returned and it must match either the entire ++ /// file path or be preceded by a `/`, then also return true. This happens ++ /// with a pattern like `**/foo/bar`. Namely, this pattern matches ++ /// `foo/bar` and `baz/foo/bar`, but not `foofoo/bar`. In this case, the ++ /// suffix returned is `/foo/bar` (but should match the entire path ++ /// `foo/bar`). ++ /// ++ /// When this returns true, the suffix literal is guaranteed to start with ++ /// a `/`. ++ fn suffix(&self) -> Option<(String, bool)> { ++ if self.opts.case_insensitive { ++ return None; ++ } ++ let mut lit = String::new(); ++ let (start, entire) = match self.tokens.get(0) { ++ Some(&Token::RecursivePrefix) => { ++ // We only care if this follows a path component if the next ++ // token is a literal. ++ if let Some(&Token::Literal(_)) = self.tokens.get(1) { ++ lit.push('/'); ++ (1, true) ++ } else { ++ (1, false) ++ } ++ } ++ _ => (0, false), ++ }; ++ let start = match self.tokens.get(start) { ++ Some(&Token::ZeroOrMore) => { ++ // If literal_separator is enabled, then a `*` can't ++ // necessarily match everything, so reporting a suffix match ++ // as a match of the pattern would be a false positive. ++ if self.opts.literal_separator { ++ return None; ++ } ++ start + 1 ++ } ++ _ => start, ++ }; ++ for t in &self.tokens[start..] { ++ match *t { ++ Token::Literal(c) => lit.push(c), ++ _ => return None, ++ } ++ } ++ if lit.is_empty() || lit == "/" { ++ None ++ } else { ++ Some((lit, entire)) ++ } ++ } ++ ++ /// If this pattern only needs to inspect the basename of a file path, ++ /// then the tokens corresponding to only the basename match are returned. ++ /// ++ /// For example, given a pattern of `**/*.foo`, only the tokens ++ /// corresponding to `*.foo` are returned. ++ /// ++ /// Note that this will return None if any match of the basename tokens ++ /// doesn't correspond to a match of the entire pattern. For example, the ++ /// glob `foo` only matches when a file path has a basename of `foo`, but ++ /// doesn't *always* match when a file path has a basename of `foo`. e.g., ++ /// `foo` doesn't match `abc/foo`. ++ fn basename_tokens(&self) -> Option<&[Token]> { ++ if self.opts.case_insensitive { ++ return None; ++ } ++ let start = match self.tokens.get(0) { ++ Some(&Token::RecursivePrefix) => 1, ++ _ => { ++ // With nothing to gobble up the parent portion of a path, ++ // we can't assume that matching on only the basename is ++ // correct. ++ return None; ++ } ++ }; ++ if self.tokens[start..].is_empty() { ++ return None; ++ } ++ for t in &self.tokens[start..] { ++ match *t { ++ Token::Literal('/') => return None, ++ Token::Literal(_) => {} // OK ++ Token::Any | Token::ZeroOrMore => { ++ if !self.opts.literal_separator { ++ // In this case, `*` and `?` can match a path ++ // separator, which means this could reach outside ++ // the basename. ++ return None; ++ } ++ } ++ Token::RecursivePrefix ++ | Token::RecursiveSuffix ++ | Token::RecursiveZeroOrMore => { ++ return None; ++ } ++ Token::Class{..} | Token::Alternates(..) => { ++ // We *could* be a little smarter here, but either one ++ // of these is going to prevent our literal optimizations ++ // anyway, so give up. ++ return None; ++ } ++ } ++ } ++ Some(&self.tokens[start..]) ++ } ++ ++ /// Returns the pattern as a literal if and only if the pattern exclusiely ++ /// matches the basename of a file path *and* is a literal. ++ /// ++ /// The basic format of these patterns is `**/{literal}`, where `{literal}` ++ /// does not contain a path separator. ++ fn basename_literal(&self) -> Option { ++ let tokens = match self.basename_tokens() { ++ None => return None, ++ Some(tokens) => tokens, ++ }; ++ let mut lit = String::new(); ++ for t in tokens { ++ match *t { ++ Token::Literal(c) => lit.push(c), ++ _ => return None, ++ } ++ } ++ Some(lit) ++ } ++} ++ ++impl<'a> GlobBuilder<'a> { ++ /// Create a new builder for the pattern given. ++ /// ++ /// The pattern is not compiled until `build` is called. ++ pub fn new(glob: &'a str) -> GlobBuilder<'a> { ++ GlobBuilder { ++ glob: glob, ++ opts: GlobOptions::default(), ++ } ++ } ++ ++ /// Parses and builds the pattern. ++ pub fn build(&self) -> Result { ++ let mut p = Parser { ++ glob: &self.glob, ++ stack: vec![Tokens::default()], ++ chars: self.glob.chars().peekable(), ++ prev: None, ++ cur: None, ++ }; ++ try!(p.parse()); ++ if p.stack.is_empty() { ++ Err(Error { ++ glob: Some(self.glob.to_string()), ++ kind: ErrorKind::UnopenedAlternates, ++ }) ++ } else if p.stack.len() > 1 { ++ Err(Error { ++ glob: Some(self.glob.to_string()), ++ kind: ErrorKind::UnclosedAlternates, ++ }) ++ } else { ++ let tokens = p.stack.pop().unwrap(); ++ Ok(Glob { ++ glob: self.glob.to_string(), ++ re: tokens.to_regex_with(&self.opts), ++ opts: self.opts, ++ tokens: tokens, ++ }) ++ } ++ } ++ ++ /// Toggle whether the pattern matches case insensitively or not. ++ /// ++ /// This is disabled by default. ++ pub fn case_insensitive(&mut self, yes: bool) -> &mut GlobBuilder<'a> { ++ self.opts.case_insensitive = yes; ++ self ++ } ++ ++ /// Toggle whether a literal `/` is required to match a path separator. ++ pub fn literal_separator(&mut self, yes: bool) -> &mut GlobBuilder<'a> { ++ self.opts.literal_separator = yes; ++ self ++ } ++} ++ ++impl Tokens { ++ /// Convert this pattern to a string that is guaranteed to be a valid ++ /// regular expression and will represent the matching semantics of this ++ /// glob pattern and the options given. ++ fn to_regex_with(&self, options: &GlobOptions) -> String { ++ let mut re = String::new(); ++ re.push_str("(?-u)"); ++ if options.case_insensitive { ++ re.push_str("(?i)"); ++ } ++ re.push('^'); ++ // Special case. If the entire glob is just `**`, then it should match ++ // everything. ++ if self.len() == 1 && self[0] == Token::RecursivePrefix { ++ re.push_str(".*"); ++ re.push('$'); ++ return re; ++ } ++ self.tokens_to_regex(options, &self, &mut re); ++ re.push('$'); ++ re ++ } ++ ++ fn tokens_to_regex( ++ &self, ++ options: &GlobOptions, ++ tokens: &[Token], ++ re: &mut String, ++ ) { ++ for tok in tokens { ++ match *tok { ++ Token::Literal(c) => { ++ re.push_str(&char_to_escaped_literal(c)); ++ } ++ Token::Any => { ++ if options.literal_separator { ++ re.push_str("[^/]"); ++ } else { ++ re.push_str("."); ++ } ++ } ++ Token::ZeroOrMore => { ++ if options.literal_separator { ++ re.push_str("[^/]*"); ++ } else { ++ re.push_str(".*"); ++ } ++ } ++ Token::RecursivePrefix => { ++ re.push_str("(?:/?|.*/)"); ++ } ++ Token::RecursiveSuffix => { ++ re.push_str("(?:/?|/.*)"); ++ } ++ Token::RecursiveZeroOrMore => { ++ re.push_str("(?:/|/.*/)"); ++ } ++ Token::Class { negated, ref ranges } => { ++ re.push('['); ++ if negated { ++ re.push('^'); ++ } ++ for r in ranges { ++ if r.0 == r.1 { ++ // Not strictly necessary, but nicer to look at. ++ re.push_str(&char_to_escaped_literal(r.0)); ++ } else { ++ re.push_str(&char_to_escaped_literal(r.0)); ++ re.push('-'); ++ re.push_str(&char_to_escaped_literal(r.1)); ++ } ++ } ++ re.push(']'); ++ } ++ Token::Alternates(ref patterns) => { ++ let mut parts = vec![]; ++ for pat in patterns { ++ let mut altre = String::new(); ++ self.tokens_to_regex(options, &pat, &mut altre); ++ if !altre.is_empty() { ++ parts.push(altre); ++ } ++ } ++ ++ // It is possible to have an empty set in which case the ++ // resulting alternation '()' would be an error. ++ if !parts.is_empty() { ++ re.push('('); ++ re.push_str(&parts.join("|")); ++ re.push(')'); ++ } ++ } ++ } ++ } ++ } ++} ++ ++/// Convert a Unicode scalar value to an escaped string suitable for use as ++/// a literal in a non-Unicode regex. ++fn char_to_escaped_literal(c: char) -> String { ++ bytes_to_escaped_literal(&c.to_string().into_bytes()) ++} ++ ++/// Converts an arbitrary sequence of bytes to a UTF-8 string. All non-ASCII ++/// code units are converted to their escaped form. ++fn bytes_to_escaped_literal(bs: &[u8]) -> String { ++ let mut s = String::with_capacity(bs.len()); ++ for &b in bs { ++ if b <= 0x7F { ++ s.push_str(®ex::escape(&(b as char).to_string())); ++ } else { ++ s.push_str(&format!("\\x{:02x}", b)); ++ } ++ } ++ s ++} ++ ++struct Parser<'a> { ++ glob: &'a str, ++ stack: Vec, ++ chars: iter::Peekable>, ++ prev: Option, ++ cur: Option, ++} ++ ++impl<'a> Parser<'a> { ++ fn error(&self, kind: ErrorKind) -> Error { ++ Error { glob: Some(self.glob.to_string()), kind: kind } ++ } ++ ++ fn parse(&mut self) -> Result<(), Error> { ++ while let Some(c) = self.bump() { ++ match c { ++ '?' => try!(self.push_token(Token::Any)), ++ '*' => try!(self.parse_star()), ++ '[' => try!(self.parse_class()), ++ '{' => try!(self.push_alternate()), ++ '}' => try!(self.pop_alternate()), ++ ',' => try!(self.parse_comma()), ++ c => { ++ if is_separator(c) { ++ // Normalize all patterns to use / as a separator. ++ try!(self.push_token(Token::Literal('/'))) ++ } else { ++ try!(self.push_token(Token::Literal(c))) ++ } ++ } ++ } ++ } ++ Ok(()) ++ } ++ ++ fn push_alternate(&mut self) -> Result<(), Error> { ++ if self.stack.len() > 1 { ++ return Err(self.error(ErrorKind::NestedAlternates)); ++ } ++ Ok(self.stack.push(Tokens::default())) ++ } ++ ++ fn pop_alternate(&mut self) -> Result<(), Error> { ++ let mut alts = vec![]; ++ while self.stack.len() >= 2 { ++ alts.push(self.stack.pop().unwrap()); ++ } ++ self.push_token(Token::Alternates(alts)) ++ } ++ ++ fn push_token(&mut self, tok: Token) -> Result<(), Error> { ++ if let Some(ref mut pat) = self.stack.last_mut() { ++ return Ok(pat.push(tok)); ++ } ++ Err(self.error(ErrorKind::UnopenedAlternates)) ++ } ++ ++ fn pop_token(&mut self) -> Result { ++ if let Some(ref mut pat) = self.stack.last_mut() { ++ return Ok(pat.pop().unwrap()); ++ } ++ Err(self.error(ErrorKind::UnopenedAlternates)) ++ } ++ ++ fn have_tokens(&self) -> Result { ++ match self.stack.last() { ++ None => Err(self.error(ErrorKind::UnopenedAlternates)), ++ Some(ref pat) => Ok(!pat.is_empty()), ++ } ++ } ++ ++ fn parse_comma(&mut self) -> Result<(), Error> { ++ // If we aren't inside a group alternation, then don't ++ // treat commas specially. Otherwise, we need to start ++ // a new alternate. ++ if self.stack.len() <= 1 { ++ self.push_token(Token::Literal(',')) ++ } else { ++ Ok(self.stack.push(Tokens::default())) ++ } ++ } ++ ++ fn parse_star(&mut self) -> Result<(), Error> { ++ let prev = self.prev; ++ if self.chars.peek() != Some(&'*') { ++ try!(self.push_token(Token::ZeroOrMore)); ++ return Ok(()); ++ } ++ assert!(self.bump() == Some('*')); ++ if !try!(self.have_tokens()) { ++ try!(self.push_token(Token::RecursivePrefix)); ++ let next = self.bump(); ++ if !next.map(is_separator).unwrap_or(true) { ++ return Err(self.error(ErrorKind::InvalidRecursive)); ++ } ++ return Ok(()); ++ } ++ try!(self.pop_token()); ++ if !prev.map(is_separator).unwrap_or(false) { ++ if self.stack.len() <= 1 ++ || (prev != Some(',') && prev != Some('{')) { ++ return Err(self.error(ErrorKind::InvalidRecursive)); ++ } ++ } ++ match self.chars.peek() { ++ None => { ++ assert!(self.bump().is_none()); ++ self.push_token(Token::RecursiveSuffix) ++ } ++ Some(&',') | Some(&'}') if self.stack.len() >= 2 => { ++ self.push_token(Token::RecursiveSuffix) ++ } ++ Some(&c) if is_separator(c) => { ++ assert!(self.bump().map(is_separator).unwrap_or(false)); ++ self.push_token(Token::RecursiveZeroOrMore) ++ } ++ _ => Err(self.error(ErrorKind::InvalidRecursive)), ++ } ++ } ++ ++ fn parse_class(&mut self) -> Result<(), Error> { ++ fn add_to_last_range( ++ glob: &str, ++ r: &mut (char, char), ++ add: char, ++ ) -> Result<(), Error> { ++ r.1 = add; ++ if r.1 < r.0 { ++ Err(Error { ++ glob: Some(glob.to_string()), ++ kind: ErrorKind::InvalidRange(r.0, r.1), ++ }) ++ } else { ++ Ok(()) ++ } ++ } ++ let mut negated = false; ++ let mut ranges = vec![]; ++ if self.chars.peek() == Some(&'!') { ++ assert!(self.bump() == Some('!')); ++ negated = true; ++ } ++ let mut first = true; ++ let mut in_range = false; ++ loop { ++ let c = match self.bump() { ++ Some(c) => c, ++ // The only way to successfully break this loop is to observe ++ // a ']'. ++ None => return Err(self.error(ErrorKind::UnclosedClass)), ++ }; ++ match c { ++ ']' => { ++ if first { ++ ranges.push((']', ']')); ++ } else { ++ break; ++ } ++ } ++ '-' => { ++ if first { ++ ranges.push(('-', '-')); ++ } else if in_range { ++ // invariant: in_range is only set when there is ++ // already at least one character seen. ++ let r = ranges.last_mut().unwrap(); ++ try!(add_to_last_range(&self.glob, r, '-')); ++ in_range = false; ++ } else { ++ assert!(!ranges.is_empty()); ++ in_range = true; ++ } ++ } ++ c => { ++ if in_range { ++ // invariant: in_range is only set when there is ++ // already at least one character seen. ++ try!(add_to_last_range( ++ &self.glob, ranges.last_mut().unwrap(), c)); ++ } else { ++ ranges.push((c, c)); ++ } ++ in_range = false; ++ } ++ } ++ first = false; ++ } ++ if in_range { ++ // Means that the last character in the class was a '-', so add ++ // it as a literal. ++ ranges.push(('-', '-')); ++ } ++ self.push_token(Token::Class { ++ negated: negated, ++ ranges: ranges, ++ }) ++ } ++ ++ fn bump(&mut self) -> Option { ++ self.prev = self.cur; ++ self.cur = self.chars.next(); ++ self.cur ++ } ++} ++ ++#[cfg(test)] ++fn starts_with(needle: &[u8], haystack: &[u8]) -> bool { ++ needle.len() <= haystack.len() && needle == &haystack[..needle.len()] ++} ++ ++#[cfg(test)] ++fn ends_with(needle: &[u8], haystack: &[u8]) -> bool { ++ if needle.len() > haystack.len() { ++ return false; ++ } ++ needle == &haystack[haystack.len() - needle.len()..] ++} ++ ++#[cfg(test)] ++mod tests { ++ use std::ffi::{OsStr, OsString}; ++ ++ use {GlobSetBuilder, ErrorKind}; ++ use super::{Glob, GlobBuilder, Token}; ++ use super::Token::*; ++ ++ #[derive(Clone, Copy, Debug, Default)] ++ struct Options { ++ casei: bool, ++ litsep: bool, ++ } ++ ++ macro_rules! syntax { ++ ($name:ident, $pat:expr, $tokens:expr) => { ++ #[test] ++ fn $name() { ++ let pat = Glob::new($pat).unwrap(); ++ assert_eq!($tokens, pat.tokens.0); ++ } ++ } ++ } ++ ++ macro_rules! syntaxerr { ++ ($name:ident, $pat:expr, $err:expr) => { ++ #[test] ++ fn $name() { ++ let err = Glob::new($pat).unwrap_err(); ++ assert_eq!(&$err, err.kind()); ++ } ++ } ++ } ++ ++ macro_rules! toregex { ++ ($name:ident, $pat:expr, $re:expr) => { ++ toregex!($name, $pat, $re, Options::default()); ++ }; ++ ($name:ident, $pat:expr, $re:expr, $options:expr) => { ++ #[test] ++ fn $name() { ++ let pat = GlobBuilder::new($pat) ++ .case_insensitive($options.casei) ++ .literal_separator($options.litsep) ++ .build() ++ .unwrap(); ++ assert_eq!(format!("(?-u){}", $re), pat.regex()); ++ } ++ }; ++ } ++ ++ macro_rules! matches { ++ ($name:ident, $pat:expr, $path:expr) => { ++ matches!($name, $pat, $path, Options::default()); ++ }; ++ ($name:ident, $pat:expr, $path:expr, $options:expr) => { ++ #[test] ++ fn $name() { ++ let pat = GlobBuilder::new($pat) ++ .case_insensitive($options.casei) ++ .literal_separator($options.litsep) ++ .build() ++ .unwrap(); ++ let matcher = pat.compile_matcher(); ++ let strategic = pat.compile_strategic_matcher(); ++ let set = GlobSetBuilder::new().add(pat).build().unwrap(); ++ assert!(matcher.is_match($path)); ++ assert!(strategic.is_match($path)); ++ assert!(set.is_match($path)); ++ } ++ }; ++ } ++ ++ macro_rules! nmatches { ++ ($name:ident, $pat:expr, $path:expr) => { ++ nmatches!($name, $pat, $path, Options::default()); ++ }; ++ ($name:ident, $pat:expr, $path:expr, $options:expr) => { ++ #[test] ++ fn $name() { ++ let pat = GlobBuilder::new($pat) ++ .case_insensitive($options.casei) ++ .literal_separator($options.litsep) ++ .build() ++ .unwrap(); ++ let matcher = pat.compile_matcher(); ++ let strategic = pat.compile_strategic_matcher(); ++ let set = GlobSetBuilder::new().add(pat).build().unwrap(); ++ assert!(!matcher.is_match($path)); ++ assert!(!strategic.is_match($path)); ++ assert!(!set.is_match($path)); ++ } ++ }; ++ } ++ ++ fn s(string: &str) -> String { string.to_string() } ++ fn os(string: &str) -> OsString { OsStr::new(string).to_os_string() } ++ ++ fn class(s: char, e: char) -> Token { ++ Class { negated: false, ranges: vec![(s, e)] } ++ } ++ ++ fn classn(s: char, e: char) -> Token { ++ Class { negated: true, ranges: vec![(s, e)] } ++ } ++ ++ fn rclass(ranges: &[(char, char)]) -> Token { ++ Class { negated: false, ranges: ranges.to_vec() } ++ } ++ ++ fn rclassn(ranges: &[(char, char)]) -> Token { ++ Class { negated: true, ranges: ranges.to_vec() } ++ } ++ ++ syntax!(literal1, "a", vec![Literal('a')]); ++ syntax!(literal2, "ab", vec![Literal('a'), Literal('b')]); ++ syntax!(any1, "?", vec![Any]); ++ syntax!(any2, "a?b", vec![Literal('a'), Any, Literal('b')]); ++ syntax!(seq1, "*", vec![ZeroOrMore]); ++ syntax!(seq2, "a*b", vec![Literal('a'), ZeroOrMore, Literal('b')]); ++ syntax!(seq3, "*a*b*", vec![ ++ ZeroOrMore, Literal('a'), ZeroOrMore, Literal('b'), ZeroOrMore, ++ ]); ++ syntax!(rseq1, "**", vec![RecursivePrefix]); ++ syntax!(rseq2, "**/", vec![RecursivePrefix]); ++ syntax!(rseq3, "/**", vec![RecursiveSuffix]); ++ syntax!(rseq4, "/**/", vec![RecursiveZeroOrMore]); ++ syntax!(rseq5, "a/**/b", vec![ ++ Literal('a'), RecursiveZeroOrMore, Literal('b'), ++ ]); ++ syntax!(cls1, "[a]", vec![class('a', 'a')]); ++ syntax!(cls2, "[!a]", vec![classn('a', 'a')]); ++ syntax!(cls3, "[a-z]", vec![class('a', 'z')]); ++ syntax!(cls4, "[!a-z]", vec![classn('a', 'z')]); ++ syntax!(cls5, "[-]", vec![class('-', '-')]); ++ syntax!(cls6, "[]]", vec![class(']', ']')]); ++ syntax!(cls7, "[*]", vec![class('*', '*')]); ++ syntax!(cls8, "[!!]", vec![classn('!', '!')]); ++ syntax!(cls9, "[a-]", vec![rclass(&[('a', 'a'), ('-', '-')])]); ++ syntax!(cls10, "[-a-z]", vec![rclass(&[('-', '-'), ('a', 'z')])]); ++ syntax!(cls11, "[a-z-]", vec![rclass(&[('a', 'z'), ('-', '-')])]); ++ syntax!(cls12, "[-a-z-]", vec![ ++ rclass(&[('-', '-'), ('a', 'z'), ('-', '-')]), ++ ]); ++ syntax!(cls13, "[]-z]", vec![class(']', 'z')]); ++ syntax!(cls14, "[--z]", vec![class('-', 'z')]); ++ syntax!(cls15, "[ --]", vec![class(' ', '-')]); ++ syntax!(cls16, "[0-9a-z]", vec![rclass(&[('0', '9'), ('a', 'z')])]); ++ syntax!(cls17, "[a-z0-9]", vec![rclass(&[('a', 'z'), ('0', '9')])]); ++ syntax!(cls18, "[!0-9a-z]", vec![rclassn(&[('0', '9'), ('a', 'z')])]); ++ syntax!(cls19, "[!a-z0-9]", vec![rclassn(&[('a', 'z'), ('0', '9')])]); ++ ++ syntaxerr!(err_rseq1, "a**", ErrorKind::InvalidRecursive); ++ syntaxerr!(err_rseq2, "**a", ErrorKind::InvalidRecursive); ++ syntaxerr!(err_rseq3, "a**b", ErrorKind::InvalidRecursive); ++ syntaxerr!(err_rseq4, "***", ErrorKind::InvalidRecursive); ++ syntaxerr!(err_rseq5, "/a**", ErrorKind::InvalidRecursive); ++ syntaxerr!(err_rseq6, "/**a", ErrorKind::InvalidRecursive); ++ syntaxerr!(err_rseq7, "/a**b", ErrorKind::InvalidRecursive); ++ syntaxerr!(err_unclosed1, "[", ErrorKind::UnclosedClass); ++ syntaxerr!(err_unclosed2, "[]", ErrorKind::UnclosedClass); ++ syntaxerr!(err_unclosed3, "[!", ErrorKind::UnclosedClass); ++ syntaxerr!(err_unclosed4, "[!]", ErrorKind::UnclosedClass); ++ syntaxerr!(err_range1, "[z-a]", ErrorKind::InvalidRange('z', 'a')); ++ syntaxerr!(err_range2, "[z--]", ErrorKind::InvalidRange('z', '-')); ++ ++ const CASEI: Options = Options { ++ casei: true, ++ litsep: false, ++ }; ++ const SLASHLIT: Options = Options { ++ casei: false, ++ litsep: true, ++ }; ++ ++ toregex!(re_casei, "a", "(?i)^a$", &CASEI); ++ ++ toregex!(re_slash1, "?", r"^[^/]$", SLASHLIT); ++ toregex!(re_slash2, "*", r"^[^/]*$", SLASHLIT); ++ ++ toregex!(re1, "a", "^a$"); ++ toregex!(re2, "?", "^.$"); ++ toregex!(re3, "*", "^.*$"); ++ toregex!(re4, "a?", "^a.$"); ++ toregex!(re5, "?a", "^.a$"); ++ toregex!(re6, "a*", "^a.*$"); ++ toregex!(re7, "*a", "^.*a$"); ++ toregex!(re8, "[*]", r"^[\*]$"); ++ toregex!(re9, "[+]", r"^[\+]$"); ++ toregex!(re10, "+", r"^\+$"); ++ toregex!(re11, "**", r"^.*$"); ++ toregex!(re12, "☃", r"^\xe2\x98\x83$"); ++ ++ matches!(match1, "a", "a"); ++ matches!(match2, "a*b", "a_b"); ++ matches!(match3, "a*b*c", "abc"); ++ matches!(match4, "a*b*c", "a_b_c"); ++ matches!(match5, "a*b*c", "a___b___c"); ++ matches!(match6, "abc*abc*abc", "abcabcabcabcabcabcabc"); ++ matches!(match7, "a*a*a*a*a*a*a*a*a", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"); ++ matches!(match8, "a*b[xyz]c*d", "abxcdbxcddd"); ++ matches!(match9, "*.rs", ".rs"); ++ matches!(match10, "☃", "☃"); ++ ++ matches!(matchrec1, "some/**/needle.txt", "some/needle.txt"); ++ matches!(matchrec2, "some/**/needle.txt", "some/one/needle.txt"); ++ matches!(matchrec3, "some/**/needle.txt", "some/one/two/needle.txt"); ++ matches!(matchrec4, "some/**/needle.txt", "some/other/needle.txt"); ++ matches!(matchrec5, "**", "abcde"); ++ matches!(matchrec6, "**", ""); ++ matches!(matchrec7, "**", ".asdf"); ++ matches!(matchrec8, "**", "/x/.asdf"); ++ matches!(matchrec9, "some/**/**/needle.txt", "some/needle.txt"); ++ matches!(matchrec10, "some/**/**/needle.txt", "some/one/needle.txt"); ++ matches!(matchrec11, "some/**/**/needle.txt", "some/one/two/needle.txt"); ++ matches!(matchrec12, "some/**/**/needle.txt", "some/other/needle.txt"); ++ matches!(matchrec13, "**/test", "one/two/test"); ++ matches!(matchrec14, "**/test", "one/test"); ++ matches!(matchrec15, "**/test", "test"); ++ matches!(matchrec16, "/**/test", "/one/two/test"); ++ matches!(matchrec17, "/**/test", "/one/test"); ++ matches!(matchrec18, "/**/test", "/test"); ++ matches!(matchrec19, "**/.*", ".abc"); ++ matches!(matchrec20, "**/.*", "abc/.abc"); ++ matches!(matchrec21, ".*/**", ".abc"); ++ matches!(matchrec22, ".*/**", ".abc/abc"); ++ matches!(matchrec23, "foo/**", "foo"); ++ matches!(matchrec24, "**/foo/bar", "foo/bar"); ++ ++ matches!(matchrange1, "a[0-9]b", "a0b"); ++ matches!(matchrange2, "a[0-9]b", "a9b"); ++ matches!(matchrange3, "a[!0-9]b", "a_b"); ++ matches!(matchrange4, "[a-z123]", "1"); ++ matches!(matchrange5, "[1a-z23]", "1"); ++ matches!(matchrange6, "[123a-z]", "1"); ++ matches!(matchrange7, "[abc-]", "-"); ++ matches!(matchrange8, "[-abc]", "-"); ++ matches!(matchrange9, "[-a-c]", "b"); ++ matches!(matchrange10, "[a-c-]", "b"); ++ matches!(matchrange11, "[-]", "-"); ++ ++ matches!(matchpat1, "*hello.txt", "hello.txt"); ++ matches!(matchpat2, "*hello.txt", "gareth_says_hello.txt"); ++ matches!(matchpat3, "*hello.txt", "some/path/to/hello.txt"); ++ matches!(matchpat4, "*hello.txt", "some\\path\\to\\hello.txt"); ++ matches!(matchpat5, "*hello.txt", "/an/absolute/path/to/hello.txt"); ++ matches!(matchpat6, "*some/path/to/hello.txt", "some/path/to/hello.txt"); ++ matches!(matchpat7, "*some/path/to/hello.txt", ++ "a/bigger/some/path/to/hello.txt"); ++ ++ matches!(matchescape, "_[[]_[]]_[?]_[*]_!_", "_[_]_?_*_!_"); ++ ++ matches!(matchcasei1, "aBcDeFg", "aBcDeFg", CASEI); ++ matches!(matchcasei2, "aBcDeFg", "abcdefg", CASEI); ++ matches!(matchcasei3, "aBcDeFg", "ABCDEFG", CASEI); ++ matches!(matchcasei4, "aBcDeFg", "AbCdEfG", CASEI); ++ ++ matches!(matchalt1, "a,b", "a,b"); ++ matches!(matchalt2, ",", ","); ++ matches!(matchalt3, "{a,b}", "a"); ++ matches!(matchalt4, "{a,b}", "b"); ++ matches!(matchalt5, "{**/src/**,foo}", "abc/src/bar"); ++ matches!(matchalt6, "{**/src/**,foo}", "foo"); ++ matches!(matchalt7, "{[}],foo}", "}"); ++ matches!(matchalt8, "{foo}", "foo"); ++ matches!(matchalt9, "{}", ""); ++ matches!(matchalt10, "{,}", ""); ++ matches!(matchalt11, "{*.foo,*.bar,*.wat}", "test.foo"); ++ matches!(matchalt12, "{*.foo,*.bar,*.wat}", "test.bar"); ++ matches!(matchalt13, "{*.foo,*.bar,*.wat}", "test.wat"); ++ ++ matches!(matchslash1, "abc/def", "abc/def", SLASHLIT); ++ #[cfg(unix)] ++ nmatches!(matchslash2, "abc?def", "abc/def", SLASHLIT); ++ #[cfg(not(unix))] ++ nmatches!(matchslash2, "abc?def", "abc\\def", SLASHLIT); ++ nmatches!(matchslash3, "abc*def", "abc/def", SLASHLIT); ++ matches!(matchslash4, "abc[/]def", "abc/def", SLASHLIT); // differs ++ #[cfg(unix)] ++ nmatches!(matchslash5, "abc\\def", "abc/def", SLASHLIT); ++ #[cfg(not(unix))] ++ matches!(matchslash5, "abc\\def", "abc/def", SLASHLIT); ++ ++ nmatches!(matchnot1, "a*b*c", "abcd"); ++ nmatches!(matchnot2, "abc*abc*abc", "abcabcabcabcabcabcabca"); ++ nmatches!(matchnot3, "some/**/needle.txt", "some/other/notthis.txt"); ++ nmatches!(matchnot4, "some/**/**/needle.txt", "some/other/notthis.txt"); ++ nmatches!(matchnot5, "/**/test", "test"); ++ nmatches!(matchnot6, "/**/test", "/one/notthis"); ++ nmatches!(matchnot7, "/**/test", "/notthis"); ++ nmatches!(matchnot8, "**/.*", "ab.c"); ++ nmatches!(matchnot9, "**/.*", "abc/ab.c"); ++ nmatches!(matchnot10, ".*/**", "a.bc"); ++ nmatches!(matchnot11, ".*/**", "abc/a.bc"); ++ nmatches!(matchnot12, "a[0-9]b", "a_b"); ++ nmatches!(matchnot13, "a[!0-9]b", "a0b"); ++ nmatches!(matchnot14, "a[!0-9]b", "a9b"); ++ nmatches!(matchnot15, "[!-]", "-"); ++ nmatches!(matchnot16, "*hello.txt", "hello.txt-and-then-some"); ++ nmatches!(matchnot17, "*hello.txt", "goodbye.txt"); ++ nmatches!(matchnot18, "*some/path/to/hello.txt", ++ "some/path/to/hello.txt-and-then-some"); ++ nmatches!(matchnot19, "*some/path/to/hello.txt", ++ "some/other/path/to/hello.txt"); ++ nmatches!(matchnot20, "a", "foo/a"); ++ nmatches!(matchnot21, "./foo", "foo"); ++ nmatches!(matchnot22, "**/foo", "foofoo"); ++ nmatches!(matchnot23, "**/foo/bar", "foofoo/bar"); ++ nmatches!(matchnot24, "/*.c", "mozilla-sha1/sha1.c"); ++ nmatches!(matchnot25, "*.c", "mozilla-sha1/sha1.c", SLASHLIT); ++ nmatches!(matchnot26, "**/m4/ltoptions.m4", ++ "csharp/src/packages/repositories.config", SLASHLIT); ++ ++ macro_rules! extract { ++ ($which:ident, $name:ident, $pat:expr, $expect:expr) => { ++ extract!($which, $name, $pat, $expect, Options::default()); ++ }; ++ ($which:ident, $name:ident, $pat:expr, $expect:expr, $opts:expr) => { ++ #[test] ++ fn $name() { ++ let pat = GlobBuilder::new($pat) ++ .case_insensitive($opts.casei) ++ .literal_separator($opts.litsep) ++ .build().unwrap(); ++ assert_eq!($expect, pat.$which()); ++ } ++ }; ++ } ++ ++ macro_rules! literal { ++ ($($tt:tt)*) => { extract!(literal, $($tt)*); } ++ } ++ ++ macro_rules! basetokens { ++ ($($tt:tt)*) => { extract!(basename_tokens, $($tt)*); } ++ } ++ ++ macro_rules! ext { ++ ($($tt:tt)*) => { extract!(ext, $($tt)*); } ++ } ++ ++ macro_rules! required_ext { ++ ($($tt:tt)*) => { extract!(required_ext, $($tt)*); } ++ } ++ ++ macro_rules! prefix { ++ ($($tt:tt)*) => { extract!(prefix, $($tt)*); } ++ } ++ ++ macro_rules! suffix { ++ ($($tt:tt)*) => { extract!(suffix, $($tt)*); } ++ } ++ ++ macro_rules! baseliteral { ++ ($($tt:tt)*) => { extract!(basename_literal, $($tt)*); } ++ } ++ ++ literal!(extract_lit1, "foo", Some(s("foo"))); ++ literal!(extract_lit2, "foo", None, CASEI); ++ literal!(extract_lit3, "/foo", Some(s("/foo"))); ++ literal!(extract_lit4, "/foo/", Some(s("/foo/"))); ++ literal!(extract_lit5, "/foo/bar", Some(s("/foo/bar"))); ++ literal!(extract_lit6, "*.foo", None); ++ literal!(extract_lit7, "foo/bar", Some(s("foo/bar"))); ++ literal!(extract_lit8, "**/foo/bar", None); ++ ++ basetokens!(extract_basetoks1, "**/foo", Some(&*vec![ ++ Literal('f'), Literal('o'), Literal('o'), ++ ])); ++ basetokens!(extract_basetoks2, "**/foo", None, CASEI); ++ basetokens!(extract_basetoks3, "**/foo", Some(&*vec![ ++ Literal('f'), Literal('o'), Literal('o'), ++ ]), SLASHLIT); ++ basetokens!(extract_basetoks4, "*foo", None, SLASHLIT); ++ basetokens!(extract_basetoks5, "*foo", None); ++ basetokens!(extract_basetoks6, "**/fo*o", None); ++ basetokens!(extract_basetoks7, "**/fo*o", Some(&*vec![ ++ Literal('f'), Literal('o'), ZeroOrMore, Literal('o'), ++ ]), SLASHLIT); ++ ++ ext!(extract_ext1, "**/*.rs", Some(os(".rs"))); ++ ext!(extract_ext2, "**/*.rs.bak", None); ++ ext!(extract_ext3, "*.rs", Some(os(".rs"))); ++ ext!(extract_ext4, "a*.rs", None); ++ ext!(extract_ext5, "/*.c", None); ++ ext!(extract_ext6, "*.c", None, SLASHLIT); ++ ext!(extract_ext7, "*.c", Some(os(".c"))); ++ ++ required_ext!(extract_req_ext1, "*.rs", Some(os(".rs"))); ++ required_ext!(extract_req_ext2, "/foo/bar/*.rs", Some(os(".rs"))); ++ required_ext!(extract_req_ext3, "/foo/bar/*.rs", Some(os(".rs"))); ++ required_ext!(extract_req_ext4, "/foo/bar/.rs", Some(os(".rs"))); ++ required_ext!(extract_req_ext5, ".rs", Some(os(".rs"))); ++ required_ext!(extract_req_ext6, "./rs", None); ++ required_ext!(extract_req_ext7, "foo", None); ++ required_ext!(extract_req_ext8, ".foo/", None); ++ required_ext!(extract_req_ext9, "foo/", None); ++ ++ prefix!(extract_prefix1, "/foo", Some(s("/foo"))); ++ prefix!(extract_prefix2, "/foo/*", Some(s("/foo/"))); ++ prefix!(extract_prefix3, "**/foo", None); ++ prefix!(extract_prefix4, "foo/**", None); ++ ++ suffix!(extract_suffix1, "**/foo/bar", Some((s("/foo/bar"), true))); ++ suffix!(extract_suffix2, "*/foo/bar", Some((s("/foo/bar"), false))); ++ suffix!(extract_suffix3, "*/foo/bar", None, SLASHLIT); ++ suffix!(extract_suffix4, "foo/bar", Some((s("foo/bar"), false))); ++ suffix!(extract_suffix5, "*.foo", Some((s(".foo"), false))); ++ suffix!(extract_suffix6, "*.foo", None, SLASHLIT); ++ suffix!(extract_suffix7, "**/*_test", Some((s("_test"), false))); ++ ++ baseliteral!(extract_baselit1, "**/foo", Some(s("foo"))); ++ baseliteral!(extract_baselit2, "foo", None); ++ baseliteral!(extract_baselit3, "*foo", None); ++ baseliteral!(extract_baselit4, "*/foo", None); ++} diff --cc vendor/globset-0.2.0/src/lib.rs index 000000000,000000000..980807057 new file mode 100644 --- /dev/null +++ b/vendor/globset-0.2.0/src/lib.rs @@@ -1,0 -1,0 +1,835 @@@ ++/*! ++The globset crate provides cross platform single glob and glob set matching. ++ ++Glob set matching is the process of matching one or more glob patterns against ++a single candidate path simultaneously, and returning all of the globs that ++matched. For example, given this set of globs: ++ ++```ignore ++*.rs ++src/lib.rs ++src/**/foo.rs ++``` ++ ++and a path `src/bar/baz/foo.rs`, then the set would report the first and third ++globs as matching. ++ ++# Example: one glob ++ ++This example shows how to match a single glob against a single file path. ++ ++``` ++# fn example() -> Result<(), globset::Error> { ++use globset::Glob; ++ ++let glob = try!(Glob::new("*.rs")).compile_matcher(); ++ ++assert!(glob.is_match("foo.rs")); ++assert!(glob.is_match("foo/bar.rs")); ++assert!(!glob.is_match("Cargo.toml")); ++# Ok(()) } example().unwrap(); ++``` ++ ++# Example: configuring a glob matcher ++ ++This example shows how to use a `GlobBuilder` to configure aspects of match ++semantics. In this example, we prevent wildcards from matching path separators. ++ ++``` ++# fn example() -> Result<(), globset::Error> { ++use globset::GlobBuilder; ++ ++let glob = try!(GlobBuilder::new("*.rs") ++ .literal_separator(true).build()).compile_matcher(); ++ ++assert!(glob.is_match("foo.rs")); ++assert!(!glob.is_match("foo/bar.rs")); // no longer matches ++assert!(!glob.is_match("Cargo.toml")); ++# Ok(()) } example().unwrap(); ++``` ++ ++# Example: match multiple globs at once ++ ++This example shows how to match multiple glob patterns at once. ++ ++``` ++# fn example() -> Result<(), globset::Error> { ++use globset::{Glob, GlobSetBuilder}; ++ ++let mut builder = GlobSetBuilder::new(); ++// A GlobBuilder can be used to configure each glob's match semantics ++// independently. ++builder.add(try!(Glob::new("*.rs"))); ++builder.add(try!(Glob::new("src/lib.rs"))); ++builder.add(try!(Glob::new("src/**/foo.rs"))); ++let set = try!(builder.build()); ++ ++assert_eq!(set.matches("src/bar/baz/foo.rs"), vec![0, 2]); ++# Ok(()) } example().unwrap(); ++``` ++ ++# Syntax ++ ++Standard Unix-style glob syntax is supported: ++ ++* `?` matches any single character. (If the `literal_separator` option is ++ enabled, then `?` can never match a path separator.) ++* `*` matches zero or more characters. (If the `literal_separator` option is ++ enabled, then `*` can never match a path separator.) ++* `**` recursively matches directories but are only legal in three situations. ++ First, if the glob starts with \*\*/, then it matches ++ all directories. For example, \*\*/foo matches `foo` ++ and `bar/foo` but not `foo/bar`. Secondly, if the glob ends with ++ /\*\*, then it matches all sub-entries. For example, ++ foo/\*\* matches `foo/a` and `foo/a/b`, but not `foo`. ++ Thirdly, if the glob contains /\*\*/ anywhere within ++ the pattern, then it matches zero or more directories. Using `**` anywhere ++ else is illegal (N.B. the glob `**` is allowed and means "match everything"). ++* `{a,b}` matches `a` or `b` where `a` and `b` are arbitrary glob patterns. ++ (N.B. Nesting `{...}` is not currently allowed.) ++* `[ab]` matches `a` or `b` where `a` and `b` are characters. Use ++ `[!ab]` to match any character except for `a` and `b`. ++* Metacharacters such as `*` and `?` can be escaped with character class ++ notation. e.g., `[*]` matches `*`. ++ ++A `GlobBuilder` can be used to prevent wildcards from matching path separators, ++or to enable case insensitive matching. ++*/ ++ ++#![deny(missing_docs)] ++ ++extern crate aho_corasick; ++extern crate fnv; ++#[macro_use] ++extern crate log; ++extern crate memchr; ++extern crate regex; ++ ++use std::borrow::Cow; ++use std::collections::{BTreeMap, HashMap}; ++use std::error::Error as StdError; ++use std::ffi::{OsStr, OsString}; ++use std::fmt; ++use std::hash; ++use std::path::Path; ++use std::str; ++ ++use aho_corasick::{Automaton, AcAutomaton, FullAcAutomaton}; ++use regex::bytes::{Regex, RegexBuilder, RegexSet}; ++ ++use pathutil::{ ++ file_name, file_name_ext, normalize_path, os_str_bytes, path_bytes, ++}; ++use glob::MatchStrategy; ++pub use glob::{Glob, GlobBuilder, GlobMatcher}; ++ ++mod glob; ++mod pathutil; ++ ++/// Represents an error that can occur when parsing a glob pattern. ++#[derive(Clone, Debug, Eq, PartialEq)] ++pub struct Error { ++ /// The original glob provided by the caller. ++ glob: Option, ++ /// The kind of error. ++ kind: ErrorKind, ++} ++ ++/// The kind of error that can occur when parsing a glob pattern. ++#[derive(Clone, Debug, Eq, PartialEq)] ++pub enum ErrorKind { ++ /// Occurs when a use of `**` is invalid. Namely, `**` can only appear ++ /// adjacent to a path separator, or the beginning/end of a glob. ++ InvalidRecursive, ++ /// Occurs when a character class (e.g., `[abc]`) is not closed. ++ UnclosedClass, ++ /// Occurs when a range in a character (e.g., `[a-z]`) is invalid. For ++ /// example, if the range starts with a lexicographically larger character ++ /// than it ends with. ++ InvalidRange(char, char), ++ /// Occurs when a `}` is found without a matching `{`. ++ UnopenedAlternates, ++ /// Occurs when a `{` is found without a matching `}`. ++ UnclosedAlternates, ++ /// Occurs when an alternating group is nested inside another alternating ++ /// group, e.g., `{{a,b},{c,d}}`. ++ NestedAlternates, ++ /// An error associated with parsing or compiling a regex. ++ Regex(String), ++} ++ ++impl StdError for Error { ++ fn description(&self) -> &str { ++ self.kind.description() ++ } ++} ++ ++impl Error { ++ /// Return the glob that caused this error, if one exists. ++ pub fn glob(&self) -> Option<&str> { ++ self.glob.as_ref().map(|s| &**s) ++ } ++ ++ /// Return the kind of this error. ++ pub fn kind(&self) -> &ErrorKind { ++ &self.kind ++ } ++} ++ ++impl ErrorKind { ++ fn description(&self) -> &str { ++ match *self { ++ ErrorKind::InvalidRecursive => { ++ "invalid use of **; must be one path component" ++ } ++ ErrorKind::UnclosedClass => { ++ "unclosed character class; missing ']'" ++ } ++ ErrorKind::InvalidRange(_, _) => { ++ "invalid character range" ++ } ++ ErrorKind::UnopenedAlternates => { ++ "unopened alternate group; missing '{' \ ++ (maybe escape '}' with '[}]'?)" ++ } ++ ErrorKind::UnclosedAlternates => { ++ "unclosed alternate group; missing '}' \ ++ (maybe escape '{' with '[{]'?)" ++ } ++ ErrorKind::NestedAlternates => { ++ "nested alternate groups are not allowed" ++ } ++ ErrorKind::Regex(ref err) => err, ++ } ++ } ++} ++ ++impl fmt::Display for Error { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ match self.glob { ++ None => self.kind.fmt(f), ++ Some(ref glob) => { ++ write!(f, "error parsing glob '{}': {}", glob, self.kind) ++ } ++ } ++ } ++} ++ ++impl fmt::Display for ErrorKind { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ match *self { ++ ErrorKind::InvalidRecursive ++ | ErrorKind::UnclosedClass ++ | ErrorKind::UnopenedAlternates ++ | ErrorKind::UnclosedAlternates ++ | ErrorKind::NestedAlternates ++ | ErrorKind::Regex(_) => { ++ write!(f, "{}", self.description()) ++ } ++ ErrorKind::InvalidRange(s, e) => { ++ write!(f, "invalid range; '{}' > '{}'", s, e) ++ } ++ } ++ } ++} ++ ++fn new_regex(pat: &str) -> Result { ++ RegexBuilder::new(pat) ++ .dot_matches_new_line(true) ++ .size_limit(10 * (1 << 20)) ++ .dfa_size_limit(10 * (1 << 20)) ++ .build() ++ .map_err(|err| { ++ Error { ++ glob: Some(pat.to_string()), ++ kind: ErrorKind::Regex(err.to_string()), ++ } ++ }) ++} ++ ++fn new_regex_set(pats: I) -> Result ++ where S: AsRef, I: IntoIterator { ++ RegexSet::new(pats).map_err(|err| { ++ Error { ++ glob: None, ++ kind: ErrorKind::Regex(err.to_string()), ++ } ++ }) ++} ++ ++type Fnv = hash::BuildHasherDefault; ++ ++/// GlobSet represents a group of globs that can be matched together in a ++/// single pass. ++#[derive(Clone, Debug)] ++pub struct GlobSet { ++ len: usize, ++ strats: Vec, ++} ++ ++impl GlobSet { ++ /// Returns true if this set is empty, and therefore matches nothing. ++ pub fn is_empty(&self) -> bool { ++ self.len == 0 ++ } ++ ++ /// Returns the number of globs in this set. ++ pub fn len(&self) -> usize { ++ self.len ++ } ++ ++ /// Returns true if any glob in this set matches the path given. ++ pub fn is_match>(&self, path: P) -> bool { ++ self.is_match_candidate(&Candidate::new(path.as_ref())) ++ } ++ ++ /// Returns true if any glob in this set matches the path given. ++ /// ++ /// This takes a Candidate as input, which can be used to amortize the ++ /// cost of preparing a path for matching. ++ pub fn is_match_candidate(&self, path: &Candidate) -> bool { ++ if self.is_empty() { ++ return false; ++ } ++ for strat in &self.strats { ++ if strat.is_match(path) { ++ return true; ++ } ++ } ++ false ++ } ++ ++ /// Returns the sequence number of every glob pattern that matches the ++ /// given path. ++ pub fn matches>(&self, path: P) -> Vec { ++ self.matches_candidate(&Candidate::new(path.as_ref())) ++ } ++ ++ /// Returns the sequence number of every glob pattern that matches the ++ /// given path. ++ /// ++ /// This takes a Candidate as input, which can be used to amortize the ++ /// cost of preparing a path for matching. ++ pub fn matches_candidate(&self, path: &Candidate) -> Vec { ++ let mut into = vec![]; ++ if self.is_empty() { ++ return into; ++ } ++ self.matches_candidate_into(path, &mut into); ++ into ++ } ++ ++ /// Adds the sequence number of every glob pattern that matches the given ++ /// path to the vec given. ++ /// ++ /// `into` is is cleared before matching begins, and contains the set of ++ /// sequence numbers (in ascending order) after matching ends. If no globs ++ /// were matched, then `into` will be empty. ++ pub fn matches_into>( ++ &self, ++ path: P, ++ into: &mut Vec, ++ ) { ++ self.matches_candidate_into(&Candidate::new(path.as_ref()), into); ++ } ++ ++ /// Adds the sequence number of every glob pattern that matches the given ++ /// path to the vec given. ++ /// ++ /// `into` is is cleared before matching begins, and contains the set of ++ /// sequence numbers (in ascending order) after matching ends. If no globs ++ /// were matched, then `into` will be empty. ++ /// ++ /// This takes a Candidate as input, which can be used to amortize the ++ /// cost of preparing a path for matching. ++ pub fn matches_candidate_into( ++ &self, ++ path: &Candidate, ++ into: &mut Vec, ++ ) { ++ into.clear(); ++ if self.is_empty() { ++ return; ++ } ++ for strat in &self.strats { ++ strat.matches_into(path, into); ++ } ++ into.sort(); ++ into.dedup(); ++ } ++ ++ fn new(pats: &[Glob]) -> Result { ++ if pats.is_empty() { ++ return Ok(GlobSet { len: 0, strats: vec![] }); ++ } ++ let mut lits = LiteralStrategy::new(); ++ let mut base_lits = BasenameLiteralStrategy::new(); ++ let mut exts = ExtensionStrategy::new(); ++ let mut prefixes = MultiStrategyBuilder::new(); ++ let mut suffixes = MultiStrategyBuilder::new(); ++ let mut required_exts = RequiredExtensionStrategyBuilder::new(); ++ let mut regexes = MultiStrategyBuilder::new(); ++ for (i, p) in pats.iter().enumerate() { ++ match MatchStrategy::new(p) { ++ MatchStrategy::Literal(lit) => { ++ lits.add(i, lit); ++ } ++ MatchStrategy::BasenameLiteral(lit) => { ++ base_lits.add(i, lit); ++ } ++ MatchStrategy::Extension(ext) => { ++ exts.add(i, ext); ++ } ++ MatchStrategy::Prefix(prefix) => { ++ prefixes.add(i, prefix); ++ } ++ MatchStrategy::Suffix { suffix, component } => { ++ if component { ++ lits.add(i, suffix[1..].to_string()); ++ } ++ suffixes.add(i, suffix); ++ } ++ MatchStrategy::RequiredExtension(ext) => { ++ required_exts.add(i, ext, p.regex().to_owned()); ++ } ++ MatchStrategy::Regex => { ++ debug!("glob converted to regex: {:?}", p); ++ regexes.add(i, p.regex().to_owned()); ++ } ++ } ++ } ++ debug!("built glob set; {} literals, {} basenames, {} extensions, \ ++ {} prefixes, {} suffixes, {} required extensions, {} regexes", ++ lits.0.len(), base_lits.0.len(), exts.0.len(), ++ prefixes.literals.len(), suffixes.literals.len(), ++ required_exts.0.len(), regexes.literals.len()); ++ Ok(GlobSet { ++ len: pats.len(), ++ strats: vec![ ++ GlobSetMatchStrategy::Extension(exts), ++ GlobSetMatchStrategy::BasenameLiteral(base_lits), ++ GlobSetMatchStrategy::Literal(lits), ++ GlobSetMatchStrategy::Suffix(suffixes.suffix()), ++ GlobSetMatchStrategy::Prefix(prefixes.prefix()), ++ GlobSetMatchStrategy::RequiredExtension( ++ try!(required_exts.build())), ++ GlobSetMatchStrategy::Regex(try!(regexes.regex_set())), ++ ], ++ }) ++ } ++} ++ ++/// GlobSetBuilder builds a group of patterns that can be used to ++/// simultaneously match a file path. ++pub struct GlobSetBuilder { ++ pats: Vec, ++} ++ ++impl GlobSetBuilder { ++ /// Create a new GlobSetBuilder. A GlobSetBuilder can be used to add new ++ /// patterns. Once all patterns have been added, `build` should be called ++ /// to produce a `GlobSet`, which can then be used for matching. ++ pub fn new() -> GlobSetBuilder { ++ GlobSetBuilder { pats: vec![] } ++ } ++ ++ /// Builds a new matcher from all of the glob patterns added so far. ++ /// ++ /// Once a matcher is built, no new patterns can be added to it. ++ pub fn build(&self) -> Result { ++ GlobSet::new(&self.pats) ++ } ++ ++ /// Add a new pattern to this set. ++ #[allow(dead_code)] ++ pub fn add(&mut self, pat: Glob) -> &mut GlobSetBuilder { ++ self.pats.push(pat); ++ self ++ } ++} ++ ++/// A candidate path for matching. ++/// ++/// All glob matching in this crate operates on `Candidate` values. ++/// Constructing candidates has a very small cost associated with it, so ++/// callers may find it beneficial to amortize that cost when matching a single ++/// path against multiple globs or sets of globs. ++#[derive(Clone, Debug)] ++pub struct Candidate<'a> { ++ path: Cow<'a, [u8]>, ++ basename: Cow<'a, [u8]>, ++ ext: &'a OsStr, ++} ++ ++impl<'a> Candidate<'a> { ++ /// Create a new candidate for matching from the given path. ++ pub fn new + ?Sized>(path: &'a P) -> Candidate<'a> { ++ let path = path.as_ref(); ++ let basename = file_name(path).unwrap_or(OsStr::new("")); ++ Candidate { ++ path: normalize_path(path_bytes(path)), ++ basename: os_str_bytes(basename), ++ ext: file_name_ext(basename).unwrap_or(OsStr::new("")), ++ } ++ } ++ ++ fn path_prefix(&self, max: usize) -> &[u8] { ++ if self.path.len() <= max { ++ &*self.path ++ } else { ++ &self.path[..max] ++ } ++ } ++ ++ fn path_suffix(&self, max: usize) -> &[u8] { ++ if self.path.len() <= max { ++ &*self.path ++ } else { ++ &self.path[self.path.len() - max..] ++ } ++ } ++} ++ ++#[derive(Clone, Debug)] ++enum GlobSetMatchStrategy { ++ Literal(LiteralStrategy), ++ BasenameLiteral(BasenameLiteralStrategy), ++ Extension(ExtensionStrategy), ++ Prefix(PrefixStrategy), ++ Suffix(SuffixStrategy), ++ RequiredExtension(RequiredExtensionStrategy), ++ Regex(RegexSetStrategy), ++} ++ ++impl GlobSetMatchStrategy { ++ fn is_match(&self, candidate: &Candidate) -> bool { ++ use self::GlobSetMatchStrategy::*; ++ match *self { ++ Literal(ref s) => s.is_match(candidate), ++ BasenameLiteral(ref s) => s.is_match(candidate), ++ Extension(ref s) => s.is_match(candidate), ++ Prefix(ref s) => s.is_match(candidate), ++ Suffix(ref s) => s.is_match(candidate), ++ RequiredExtension(ref s) => s.is_match(candidate), ++ Regex(ref s) => s.is_match(candidate), ++ } ++ } ++ ++ fn matches_into(&self, candidate: &Candidate, matches: &mut Vec) { ++ use self::GlobSetMatchStrategy::*; ++ match *self { ++ Literal(ref s) => s.matches_into(candidate, matches), ++ BasenameLiteral(ref s) => s.matches_into(candidate, matches), ++ Extension(ref s) => s.matches_into(candidate, matches), ++ Prefix(ref s) => s.matches_into(candidate, matches), ++ Suffix(ref s) => s.matches_into(candidate, matches), ++ RequiredExtension(ref s) => s.matches_into(candidate, matches), ++ Regex(ref s) => s.matches_into(candidate, matches), ++ } ++ } ++} ++ ++#[derive(Clone, Debug)] ++struct LiteralStrategy(BTreeMap, Vec>); ++ ++impl LiteralStrategy { ++ fn new() -> LiteralStrategy { ++ LiteralStrategy(BTreeMap::new()) ++ } ++ ++ fn add(&mut self, global_index: usize, lit: String) { ++ self.0.entry(lit.into_bytes()).or_insert(vec![]).push(global_index); ++ } ++ ++ fn is_match(&self, candidate: &Candidate) -> bool { ++ self.0.contains_key(&*candidate.path) ++ } ++ ++ #[inline(never)] ++ fn matches_into(&self, candidate: &Candidate, matches: &mut Vec) { ++ if let Some(hits) = self.0.get(&*candidate.path) { ++ matches.extend(hits); ++ } ++ } ++} ++ ++#[derive(Clone, Debug)] ++struct BasenameLiteralStrategy(BTreeMap, Vec>); ++ ++impl BasenameLiteralStrategy { ++ fn new() -> BasenameLiteralStrategy { ++ BasenameLiteralStrategy(BTreeMap::new()) ++ } ++ ++ fn add(&mut self, global_index: usize, lit: String) { ++ self.0.entry(lit.into_bytes()).or_insert(vec![]).push(global_index); ++ } ++ ++ fn is_match(&self, candidate: &Candidate) -> bool { ++ if candidate.basename.is_empty() { ++ return false; ++ } ++ self.0.contains_key(&*candidate.basename) ++ } ++ ++ #[inline(never)] ++ fn matches_into(&self, candidate: &Candidate, matches: &mut Vec) { ++ if candidate.basename.is_empty() { ++ return; ++ } ++ if let Some(hits) = self.0.get(&*candidate.basename) { ++ matches.extend(hits); ++ } ++ } ++} ++ ++#[derive(Clone, Debug)] ++struct ExtensionStrategy(HashMap, Fnv>); ++ ++impl ExtensionStrategy { ++ fn new() -> ExtensionStrategy { ++ ExtensionStrategy(HashMap::with_hasher(Fnv::default())) ++ } ++ ++ fn add(&mut self, global_index: usize, ext: OsString) { ++ self.0.entry(ext).or_insert(vec![]).push(global_index); ++ } ++ ++ fn is_match(&self, candidate: &Candidate) -> bool { ++ if candidate.ext.is_empty() { ++ return false; ++ } ++ self.0.contains_key(candidate.ext) ++ } ++ ++ #[inline(never)] ++ fn matches_into(&self, candidate: &Candidate, matches: &mut Vec) { ++ if candidate.ext.is_empty() { ++ return; ++ } ++ if let Some(hits) = self.0.get(candidate.ext) { ++ matches.extend(hits); ++ } ++ } ++} ++ ++#[derive(Clone, Debug)] ++struct PrefixStrategy { ++ matcher: FullAcAutomaton>, ++ map: Vec, ++ longest: usize, ++} ++ ++impl PrefixStrategy { ++ fn is_match(&self, candidate: &Candidate) -> bool { ++ let path = candidate.path_prefix(self.longest); ++ for m in self.matcher.find_overlapping(path) { ++ if m.start == 0 { ++ return true; ++ } ++ } ++ false ++ } ++ ++ fn matches_into(&self, candidate: &Candidate, matches: &mut Vec) { ++ let path = candidate.path_prefix(self.longest); ++ for m in self.matcher.find_overlapping(path) { ++ if m.start == 0 { ++ matches.push(self.map[m.pati]); ++ } ++ } ++ } ++} ++ ++#[derive(Clone, Debug)] ++struct SuffixStrategy { ++ matcher: FullAcAutomaton>, ++ map: Vec, ++ longest: usize, ++} ++ ++impl SuffixStrategy { ++ fn is_match(&self, candidate: &Candidate) -> bool { ++ let path = candidate.path_suffix(self.longest); ++ for m in self.matcher.find_overlapping(path) { ++ if m.end == path.len() { ++ return true; ++ } ++ } ++ false ++ } ++ ++ fn matches_into(&self, candidate: &Candidate, matches: &mut Vec) { ++ let path = candidate.path_suffix(self.longest); ++ for m in self.matcher.find_overlapping(path) { ++ if m.end == path.len() { ++ matches.push(self.map[m.pati]); ++ } ++ } ++ } ++} ++ ++#[derive(Clone, Debug)] ++struct RequiredExtensionStrategy(HashMap, Fnv>); ++ ++impl RequiredExtensionStrategy { ++ fn is_match(&self, candidate: &Candidate) -> bool { ++ if candidate.ext.is_empty() { ++ return false; ++ } ++ match self.0.get(candidate.ext) { ++ None => false, ++ Some(regexes) => { ++ for &(_, ref re) in regexes { ++ if re.is_match(&*candidate.path) { ++ return true; ++ } ++ } ++ false ++ } ++ } ++ } ++ ++ #[inline(never)] ++ fn matches_into(&self, candidate: &Candidate, matches: &mut Vec) { ++ if candidate.ext.is_empty() { ++ return; ++ } ++ if let Some(regexes) = self.0.get(candidate.ext) { ++ for &(global_index, ref re) in regexes { ++ if re.is_match(&*candidate.path) { ++ matches.push(global_index); ++ } ++ } ++ } ++ } ++} ++ ++#[derive(Clone, Debug)] ++struct RegexSetStrategy { ++ matcher: RegexSet, ++ map: Vec, ++} ++ ++impl RegexSetStrategy { ++ fn is_match(&self, candidate: &Candidate) -> bool { ++ self.matcher.is_match(&*candidate.path) ++ } ++ ++ fn matches_into(&self, candidate: &Candidate, matches: &mut Vec) { ++ for i in self.matcher.matches(&*candidate.path) { ++ matches.push(self.map[i]); ++ } ++ } ++} ++ ++#[derive(Clone, Debug)] ++struct MultiStrategyBuilder { ++ literals: Vec, ++ map: Vec, ++ longest: usize, ++} ++ ++impl MultiStrategyBuilder { ++ fn new() -> MultiStrategyBuilder { ++ MultiStrategyBuilder { ++ literals: vec![], ++ map: vec![], ++ longest: 0, ++ } ++ } ++ ++ fn add(&mut self, global_index: usize, literal: String) { ++ if literal.len() > self.longest { ++ self.longest = literal.len(); ++ } ++ self.map.push(global_index); ++ self.literals.push(literal); ++ } ++ ++ fn prefix(self) -> PrefixStrategy { ++ let it = self.literals.into_iter().map(|s| s.into_bytes()); ++ PrefixStrategy { ++ matcher: AcAutomaton::new(it).into_full(), ++ map: self.map, ++ longest: self.longest, ++ } ++ } ++ ++ fn suffix(self) -> SuffixStrategy { ++ let it = self.literals.into_iter().map(|s| s.into_bytes()); ++ SuffixStrategy { ++ matcher: AcAutomaton::new(it).into_full(), ++ map: self.map, ++ longest: self.longest, ++ } ++ } ++ ++ fn regex_set(self) -> Result { ++ Ok(RegexSetStrategy { ++ matcher: try!(new_regex_set(self.literals)), ++ map: self.map, ++ }) ++ } ++} ++ ++#[derive(Clone, Debug)] ++struct RequiredExtensionStrategyBuilder( ++ HashMap>, ++); ++ ++impl RequiredExtensionStrategyBuilder { ++ fn new() -> RequiredExtensionStrategyBuilder { ++ RequiredExtensionStrategyBuilder(HashMap::new()) ++ } ++ ++ fn add(&mut self, global_index: usize, ext: OsString, regex: String) { ++ self.0.entry(ext).or_insert(vec![]).push((global_index, regex)); ++ } ++ ++ fn build(self) -> Result { ++ let mut exts = HashMap::with_hasher(Fnv::default()); ++ for (ext, regexes) in self.0.into_iter() { ++ exts.insert(ext.clone(), vec![]); ++ for (global_index, regex) in regexes { ++ let compiled = try!(new_regex(®ex)); ++ exts.get_mut(&ext).unwrap().push((global_index, compiled)); ++ } ++ } ++ Ok(RequiredExtensionStrategy(exts)) ++ } ++} ++ ++#[cfg(test)] ++mod tests { ++ use super::GlobSetBuilder; ++ use glob::Glob; ++ ++ #[test] ++ fn set_works() { ++ let mut builder = GlobSetBuilder::new(); ++ builder.add(Glob::new("src/**/*.rs").unwrap()); ++ builder.add(Glob::new("*.c").unwrap()); ++ builder.add(Glob::new("src/lib.rs").unwrap()); ++ let set = builder.build().unwrap(); ++ ++ assert!(set.is_match("foo.c")); ++ assert!(set.is_match("src/foo.c")); ++ assert!(!set.is_match("foo.rs")); ++ assert!(!set.is_match("tests/foo.rs")); ++ assert!(set.is_match("src/foo.rs")); ++ assert!(set.is_match("src/grep/src/main.rs")); ++ ++ let matches = set.matches("src/lib.rs"); ++ assert_eq!(2, matches.len()); ++ assert_eq!(0, matches[0]); ++ assert_eq!(2, matches[1]); ++ } ++ ++ #[test] ++ fn empty_set_works() { ++ let set = GlobSetBuilder::new().build().unwrap(); ++ assert!(!set.is_match("")); ++ assert!(!set.is_match("a")); ++ } ++} diff --cc vendor/globset-0.2.0/src/pathutil.rs index 000000000,000000000..16bd16fc7 new file mode 100644 --- /dev/null +++ b/vendor/globset-0.2.0/src/pathutil.rs @@@ -1,0 -1,0 +1,178 @@@ ++use std::borrow::Cow; ++use std::ffi::OsStr; ++use std::path::Path; ++ ++/// The final component of the path, if it is a normal file. ++/// ++/// If the path terminates in ., .., or consists solely of a root of prefix, ++/// file_name will return None. ++#[cfg(unix)] ++pub fn file_name<'a, P: AsRef + ?Sized>( ++ path: &'a P, ++) -> Option<&'a OsStr> { ++ use std::os::unix::ffi::OsStrExt; ++ use memchr::memrchr; ++ ++ let path = path.as_ref().as_os_str().as_bytes(); ++ if path.is_empty() { ++ return None; ++ } else if path.len() == 1 && path[0] == b'.' { ++ return None; ++ } else if path.last() == Some(&b'.') { ++ return None; ++ } else if path.len() >= 2 && &path[path.len() - 2..] == &b".."[..] { ++ return None; ++ } ++ let last_slash = memrchr(b'/', path).map(|i| i + 1).unwrap_or(0); ++ Some(OsStr::from_bytes(&path[last_slash..])) ++} ++ ++/// The final component of the path, if it is a normal file. ++/// ++/// If the path terminates in ., .., or consists solely of a root of prefix, ++/// file_name will return None. ++#[cfg(not(unix))] ++pub fn file_name<'a, P: AsRef + ?Sized>( ++ path: &'a P, ++) -> Option<&'a OsStr> { ++ path.as_ref().file_name() ++} ++ ++/// Return a file extension given a path's file name. ++/// ++/// Note that this does NOT match the semantics of std::path::Path::extension. ++/// Namely, the extension includes the `.` and matching is otherwise more ++/// liberal. Specifically, the extenion is: ++/// ++/// * None, if the file name given is empty; ++/// * None, if there is no embedded `.`; ++/// * Otherwise, the portion of the file name starting with the final `.`. ++/// ++/// e.g., A file name of `.rs` has an extension `.rs`. ++/// ++/// N.B. This is done to make certain glob match optimizations easier. Namely, ++/// a pattern like `*.rs` is obviously trying to match files with a `rs` ++/// extension, but it also matches files like `.rs`, which doesn't have an ++/// extension according to std::path::Path::extension. ++pub fn file_name_ext(name: &OsStr) -> Option<&OsStr> { ++ // Yes, these functions are awful, and yes, we are completely violating ++ // the abstraction barrier of std::ffi. The barrier we're violating is ++ // that an OsStr's encoding is *ASCII compatible*. While this is obviously ++ // true on Unix systems, it's also true on Windows because an OsStr uses ++ // WTF-8 internally: https://simonsapin.github.io/wtf-8/ ++ // ++ // We should consider doing the same for the other path utility functions. ++ // Right now, we don't break any barriers, but Windows users are paying ++ // for it. ++ // ++ // Got any better ideas that don't cost anything? Hit me up. ---AG ++ unsafe fn os_str_as_u8_slice(s: &OsStr) -> &[u8] { ++ ::std::mem::transmute(s) ++ } ++ unsafe fn u8_slice_as_os_str(s: &[u8]) -> &OsStr { ++ ::std::mem::transmute(s) ++ } ++ if name.is_empty() { ++ return None; ++ } ++ let name = unsafe { os_str_as_u8_slice(name) }; ++ for (i, &b) in name.iter().enumerate().rev() { ++ if b == b'.' { ++ return Some(unsafe { u8_slice_as_os_str(&name[i..]) }); ++ } ++ } ++ None ++} ++ ++/// Return raw bytes of a path, transcoded to UTF-8 if necessary. ++pub fn path_bytes(path: &Path) -> Cow<[u8]> { ++ os_str_bytes(path.as_os_str()) ++} ++ ++/// Return the raw bytes of the given OS string, possibly transcoded to UTF-8. ++#[cfg(unix)] ++pub fn os_str_bytes(s: &OsStr) -> Cow<[u8]> { ++ use std::os::unix::ffi::OsStrExt; ++ Cow::Borrowed(s.as_bytes()) ++} ++ ++/// Return the raw bytes of the given OS string, possibly transcoded to UTF-8. ++#[cfg(not(unix))] ++pub fn os_str_bytes(s: &OsStr) -> Cow<[u8]> { ++ // TODO(burntsushi): On Windows, OS strings are WTF-8, which is a superset ++ // of UTF-8, so even if we could get at the raw bytes, they wouldn't ++ // be useful. We *must* convert to UTF-8 before doing path matching. ++ // Unfortunate, but necessary. ++ match s.to_string_lossy() { ++ Cow::Owned(s) => Cow::Owned(s.into_bytes()), ++ Cow::Borrowed(s) => Cow::Borrowed(s.as_bytes()), ++ } ++} ++ ++/// Normalizes a path to use `/` as a separator everywhere, even on platforms ++/// that recognize other characters as separators. ++#[cfg(unix)] ++pub fn normalize_path(path: Cow<[u8]>) -> Cow<[u8]> { ++ // UNIX only uses /, so we're good. ++ path ++} ++ ++/// Normalizes a path to use `/` as a separator everywhere, even on platforms ++/// that recognize other characters as separators. ++#[cfg(not(unix))] ++pub fn normalize_path(mut path: Cow<[u8]>) -> Cow<[u8]> { ++ use std::path::is_separator; ++ ++ for i in 0..path.len() { ++ if path[i] == b'/' || !is_separator(path[i] as char) { ++ continue; ++ } ++ path.to_mut()[i] = b'/'; ++ } ++ path ++} ++ ++#[cfg(test)] ++mod tests { ++ use std::borrow::Cow; ++ use std::ffi::OsStr; ++ ++ use super::{file_name_ext, normalize_path}; ++ ++ macro_rules! ext { ++ ($name:ident, $file_name:expr, $ext:expr) => { ++ #[test] ++ fn $name() { ++ let got = file_name_ext(OsStr::new($file_name)); ++ assert_eq!($ext.map(OsStr::new), got); ++ } ++ }; ++ } ++ ++ ext!(ext1, "foo.rs", Some(".rs")); ++ ext!(ext2, ".rs", Some(".rs")); ++ ext!(ext3, "..rs", Some(".rs")); ++ ext!(ext4, "", None::<&str>); ++ ext!(ext5, "foo", None::<&str>); ++ ++ macro_rules! normalize { ++ ($name:ident, $path:expr, $expected:expr) => { ++ #[test] ++ fn $name() { ++ let got = normalize_path(Cow::Owned($path.to_vec())); ++ assert_eq!($expected.to_vec(), got.into_owned()); ++ } ++ }; ++ } ++ ++ normalize!(normal1, b"foo", b"foo"); ++ normalize!(normal2, b"foo/bar", b"foo/bar"); ++ #[cfg(unix)] ++ normalize!(normal3, b"foo\\bar", b"foo\\bar"); ++ #[cfg(not(unix))] ++ normalize!(normal3, b"foo\\bar", b"foo/bar"); ++ #[cfg(unix)] ++ normalize!(normal4, b"foo\\bar/baz", b"foo\\bar/baz"); ++ #[cfg(not(unix))] ++ normalize!(normal4, b"foo\\bar/baz", b"foo/bar/baz"); ++} diff --cc vendor/home-0.3.0/.cargo-checksum.json index 000000000,000000000..21a188d4e new file mode 100644 --- /dev/null +++ b/vendor/home-0.3.0/.cargo-checksum.json @@@ -1,0 -1,0 +1,1 @@@ ++{"files":{},"package":"9f25ae61099d8f3fee8b483df0bd4ecccf4b2731897aad40d50eca1b641fe6db"} diff --cc vendor/home-0.3.0/.cargo-ok index 000000000,000000000..e69de29bb new file mode 100644 --- /dev/null +++ b/vendor/home-0.3.0/.cargo-ok diff --cc vendor/home-0.3.0/Cargo.toml index 000000000,000000000..15db51ff2 new file mode 100644 --- /dev/null +++ b/vendor/home-0.3.0/Cargo.toml @@@ -1,0 -1,0 +1,34 @@@ ++# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO ++# ++# When uploading crates to the registry Cargo will automatically ++# "normalize" Cargo.toml files for maximal compatibility ++# with all versions of Cargo and also rewrite `path` dependencies ++# to registry (e.g. crates.io) dependencies ++# ++# If you believe there's an error in this file please file an ++# issue against the rust-lang/cargo repository. If you're ++# editing this file be aware that the upstream Cargo.toml ++# will likely look very different (and much more reasonable) ++ ++[package] ++name = "home" ++version = "0.3.0" ++authors = ["Brian Anderson "] ++description = "Shared definitions of home directories" ++documentation = "https://docs.rs/home" ++license = "MIT/Apache-2.0" ++repository = "https://github.com/brson/home" ++[target."cfg(windows)".dependencies.winapi] ++version = "0.2.8" ++ ++[target."cfg(windows)".dependencies.kernel32-sys] ++version = "0.2.1" ++ ++[target."cfg(windows)".dependencies.scopeguard] ++version = "0.1.2" ++ ++[target."cfg(windows)".dependencies.userenv-sys] ++version = "0.2.0" ++ ++[target."cfg(windows)".dependencies.advapi32-sys] ++version = "0.2.0" diff --cc vendor/home-0.3.0/README.md index 000000000,000000000..2ff22423e new file mode 100644 --- /dev/null +++ b/vendor/home-0.3.0/README.md @@@ -1,0 -1,0 +1,24 @@@ ++Canonical definitions of `home_dir`, `cargo_home`, and `rustup_home`. ++ ++This provides the definition of `home_dir` used by Cargo and rustup, ++as well functions to find the correct value of `CARGO_HOME` and ++`RUSTUP_HOME`. ++ ++The definition of `home_dir` provided by the standard library is ++incorrect because it considers the `HOME` environment variable on ++Windows. This causes surprising situations where a Rust program will ++behave differently depending on whether it is run under a Unix ++emulation environment like Cygwin or MinGW. Neither Cargo nor rustup ++use the standard libraries definition - they use the definition here. ++ ++This crate further provides two functions, `cargo_home` and ++`rustup_home`, which are the canonical way to determine the location ++that Cargo and rustup store their data. ++ ++See [rust-lang/rust#43321]. ++ ++[rust-lang/rust#43321]: https://github.com/rust-lang/rust/issues/43321 ++ ++## License ++ ++MIT/Apache-2.0 diff --cc vendor/home-0.3.0/src/lib.rs index 000000000,000000000..0917964b0 new file mode 100644 --- /dev/null +++ b/vendor/home-0.3.0/src/lib.rs @@@ -1,0 -1,0 +1,302 @@@ ++/// Canonical definitions of `home_dir`, `cargo_home`, and `rustup_home`. ++/// ++/// This provides the definition of `home_dir` used by Cargo and ++/// rustup, as well functions to find the correct value of ++/// `CARGO_HOME` and `RUSTUP_HOME`. ++/// ++/// The definition of `home_dir` provided by the standard library is ++/// incorrect because it considers the `HOME` environment variable on ++/// Windows. This causes surprising situations where a Rust program ++/// will behave differently depending on whether it is run under a ++/// Unix emulation environment like Cygwin or MinGW. Neither Cargo nor ++/// rustup use the standard libraries definition - they use the ++/// definition here. ++/// ++/// This crate further provides two functions, `cargo_home` and ++/// `rustup_home`, which are the canonical way to determine the ++/// location that Cargo and rustup store their data. ++/// ++/// See [rust-lang/rust#43321]. ++/// ++/// [rust-lang/rust#43321]: https://github.com/rust-lang/rust/issues/43321 ++ ++#[cfg(windows)] ++extern crate scopeguard; ++#[cfg(windows)] ++extern crate winapi; ++#[cfg(windows)] ++extern crate kernel32; ++#[cfg(windows)] ++extern crate advapi32; ++#[cfg(windows)] ++extern crate userenv; ++ ++#[cfg(windows)] ++use winapi::DWORD; ++use std::path::{PathBuf, Path}; ++use std::io; ++use std::env; ++ ++/// Returns the path of the current user's home directory if known. ++/// ++/// # Unix ++/// ++/// Returns the value of the 'HOME' environment variable if it is set ++/// and not equal to the empty string. Otherwise, it tries to determine the ++/// home directory by invoking the `getpwuid_r` function on the UID of the ++/// current user. ++/// ++/// # Windows ++/// ++/// Returns the value of the 'USERPROFILE' environment variable if it ++/// is set and not equal to the empty string. If both do not exist, ++/// [`GetUserProfileDirectory`][msdn] is used to return the ++/// appropriate path. ++/// ++/// [msdn]: https://msdn.microsoft.com/en-us/library/windows/desktop/bb762280(v=vs.85).aspx ++/// ++/// # Examples ++/// ++/// ``` ++/// use std::env; ++/// ++/// match env::home_dir() { ++/// Some(path) => println!("{}", path.display()), ++/// None => println!("Impossible to get your home dir!"), ++/// } ++/// ``` ++pub fn home_dir() -> Option { ++ home_dir_() ++} ++ ++#[cfg(windows)] ++fn home_dir_() -> Option { ++ use std::ptr; ++ use kernel32::{GetCurrentProcess, GetLastError, CloseHandle}; ++ use advapi32::OpenProcessToken; ++ use userenv::GetUserProfileDirectoryW; ++ use winapi::ERROR_INSUFFICIENT_BUFFER; ++ use winapi::winnt::TOKEN_READ; ++ use scopeguard; ++ ++ ::std::env::var_os("USERPROFILE").map(PathBuf::from).or_else(|| unsafe { ++ let me = GetCurrentProcess(); ++ let mut token = ptr::null_mut(); ++ if OpenProcessToken(me, TOKEN_READ, &mut token) == 0 { ++ return None; ++ } ++ let _g = scopeguard::guard(token, |h| { let _ = CloseHandle(*h); }); ++ fill_utf16_buf(|buf, mut sz| { ++ match GetUserProfileDirectoryW(token, buf, &mut sz) { ++ 0 if GetLastError() != ERROR_INSUFFICIENT_BUFFER => 0, ++ 0 => sz, ++ _ => sz - 1, // sz includes the null terminator ++ } ++ }, os2path).ok() ++ }) ++} ++ ++#[cfg(windows)] ++fn os2path(s: &[u16]) -> PathBuf { ++ use std::ffi::OsString; ++ use std::os::windows::ffi::OsStringExt; ++ PathBuf::from(OsString::from_wide(s)) ++} ++ ++#[cfg(windows)] ++fn fill_utf16_buf(mut f1: F1, f2: F2) -> io::Result ++ where F1: FnMut(*mut u16, DWORD) -> DWORD, ++ F2: FnOnce(&[u16]) -> T ++{ ++ use kernel32::{GetLastError, SetLastError}; ++ use winapi::{ERROR_INSUFFICIENT_BUFFER}; ++ ++ // Start off with a stack buf but then spill over to the heap if we end up ++ // needing more space. ++ let mut stack_buf = [0u16; 512]; ++ let mut heap_buf = Vec::new(); ++ unsafe { ++ let mut n = stack_buf.len(); ++ loop { ++ let buf = if n <= stack_buf.len() { ++ &mut stack_buf[..] ++ } else { ++ let extra = n - heap_buf.len(); ++ heap_buf.reserve(extra); ++ heap_buf.set_len(n); ++ &mut heap_buf[..] ++ }; ++ ++ // This function is typically called on windows API functions which ++ // will return the correct length of the string, but these functions ++ // also return the `0` on error. In some cases, however, the ++ // returned "correct length" may actually be 0! ++ // ++ // To handle this case we call `SetLastError` to reset it to 0 and ++ // then check it again if we get the "0 error value". If the "last ++ // error" is still 0 then we interpret it as a 0 length buffer and ++ // not an actual error. ++ SetLastError(0); ++ let k = match f1(buf.as_mut_ptr(), n as DWORD) { ++ 0 if GetLastError() == 0 => 0, ++ 0 => return Err(io::Error::last_os_error()), ++ n => n, ++ } as usize; ++ if k == n && GetLastError() == ERROR_INSUFFICIENT_BUFFER { ++ n *= 2; ++ } else if k >= n { ++ n = k; ++ } else { ++ return Ok(f2(&buf[..k])) ++ } ++ } ++ } ++} ++ ++#[cfg(unix)] ++fn home_dir_() -> Option { ++ ::std::env::home_dir() ++} ++ ++/// Returns the storage directory used by Cargo, often knowns as ++/// `.cargo` or `CARGO_HOME`. ++/// ++/// It returns one of the following values, in this order of ++/// preference: ++/// ++/// - The value of the `CARGO_HOME` environment variable, if it is ++/// an absolute path. ++/// - The value of the current working directory joined with the value ++/// of the `CARGO_HOME` environment variable, if `CARGO_HOME` is a ++/// relative directory. ++/// - The `.cargo` directory in the user's home directory, as reported ++/// by the `home_dir` function. ++/// ++/// # Errors ++/// ++/// This function fails if it fails to retrieve the current directory, ++/// or if the home directory cannot be determined. ++pub fn cargo_home() -> io::Result { ++ let cwd = env::current_dir()?; ++ cargo_home_with_cwd(&cwd) ++} ++ ++pub fn cargo_home_with_cwd(cwd: &Path) -> io::Result { ++ let env_var = env::var_os("CARGO_HOME"); ++ ++ // NB: During the multirust-rs -> rustup transition the install ++ // dir changed from ~/.multirust/bin to ~/.cargo/bin. Because ++ // multirust used to explicitly set CARGO_HOME it's possible to ++ // get here when e.g. installing under `cargo run` and decide to ++ // install to the wrong place. This check is to make the ++ // multirust-rs to rustup upgrade seamless. ++ let env_var = if let Some(v) = env_var { ++ let vv = v.to_string_lossy().to_string(); ++ if vv.contains(".multirust/cargo") || ++ vv.contains(r".multirust\cargo") || ++ vv.trim().is_empty() { ++ None ++ } else { ++ Some(v) ++ } ++ } else { ++ None ++ }; ++ ++ let env_cargo_home = env_var.map(|home| cwd.join(home)); ++ let home_dir = home_dir() ++ .ok_or(io::Error::new(io::ErrorKind::Other, "couldn't find home dir")); ++ let user_home = home_dir.map(|p| p.join(".cargo")); ++ ++ // Compatibility with old cargo that used the std definition of home_dir ++ let compat_home_dir = ::std::env::home_dir(); ++ let compat_user_home = compat_home_dir.map(|p| p.join(".cargo")); ++ ++ if let Some(p) = env_cargo_home { ++ Ok(p) ++ } else { ++ if let Some(d) = compat_user_home { ++ if d.exists() { ++ Ok(d) ++ } else { ++ user_home ++ } ++ } else { ++ user_home ++ } ++ } ++} ++ ++/// Returns the storage directory used by rustup, often knowns as ++/// `.rustup` or `RUSTUP_HOME`. ++/// ++/// It returns one of the following values, in this order of ++/// preference: ++/// ++/// - The value of the `RUSTUP_HOME` environment variable, if it is ++/// an absolute path. ++/// - The value of the current working directory joined with the value ++/// of the `RUSTUP_HOME` environment variable, if `RUSTUP_HOME` is a ++/// relative directory. ++/// - The `.rustup` directory in the user's home directory, as reported ++/// by the `home_dir` function. ++/// ++/// As a matter of backwards compatibility, this function _may_ return ++/// the `.multirust` directory in the user's home directory, only if ++/// it determines that the user is running an old version of rustup ++/// where that is necessary. ++/// ++/// # Errors ++/// ++/// This function fails if it fails to retrieve the current directory, ++/// or if the home directory cannot be determined. ++pub fn rustup_home() -> io::Result { ++ let cwd = env::current_dir()?; ++ rustup_home_with_cwd(&cwd) ++} ++ ++pub fn rustup_home_with_cwd(cwd: &Path) -> io::Result { ++ let env_var = env::var_os("RUSTUP_HOME"); ++ let env_rustup_home = env_var.map(|home| cwd.join(home)); ++ let home_dir = home_dir() ++ .ok_or(io::Error::new(io::ErrorKind::Other, "couldn't find home dir")); ++ ++ let user_home = if use_rustup_dir() { ++ home_dir.map(|d| d.join(".rustup")) ++ } else { ++ home_dir.map(|d| d.join(".multirust")) ++ }; ++ ++ if let Some(p) = env_rustup_home { ++ Ok(p) ++ } else { ++ user_home ++ } ++} ++ ++fn use_rustup_dir() -> bool { ++ fn rustup_dir() -> Option { ++ home_dir().map(|p| p.join(".rustup")) ++ } ++ ++ fn multirust_dir() -> Option { ++ home_dir().map(|p| p.join(".multirust")) ++ } ++ ++ fn rustup_dir_exists() -> bool { ++ rustup_dir().map(|p| p.exists()).unwrap_or(false) ++ } ++ ++ fn multirust_dir_exists() -> bool { ++ multirust_dir().map(|p| p.exists()).unwrap_or(false) ++ } ++ ++ fn rustup_old_version_exists() -> bool { ++ rustup_dir() ++ .map(|p| p.join("rustup-version").exists()) ++ .unwrap_or(false) ++ } ++ ++ !rustup_old_version_exists() ++ && (rustup_dir_exists() || !multirust_dir_exists()) ++} diff --cc vendor/idna-0.1.4/.cargo-checksum.json index 000000000,000000000..195950967 new file mode 100644 --- /dev/null +++ b/vendor/idna-0.1.4/.cargo-checksum.json @@@ -1,0 -1,0 +1,1 @@@ ++{"files":{},"package":"014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d"} diff --cc vendor/idna-0.1.4/.cargo-ok index 000000000,000000000..e69de29bb new file mode 100644 --- /dev/null +++ b/vendor/idna-0.1.4/.cargo-ok diff --cc vendor/idna-0.1.4/Cargo.toml index 000000000,000000000..8405be06d new file mode 100644 --- /dev/null +++ b/vendor/idna-0.1.4/Cargo.toml @@@ -1,0 -1,0 +1,27 @@@ ++[package] ++name = "idna" ++version = "0.1.4" ++authors = ["The rust-url developers"] ++description = "IDNA (Internationalizing Domain Names in Applications) and Punycode." ++repository = "https://github.com/servo/rust-url/" ++license = "MIT/Apache-2.0" ++ ++[lib] ++doctest = false ++test = false ++ ++[[test]] ++name = "tests" ++harness = false ++ ++[[test]] ++name = "unit" ++ ++[dev-dependencies] ++rustc-test = "0.2" ++rustc-serialize = "0.3" ++ ++[dependencies] ++unicode-bidi = "0.3" ++unicode-normalization = "0.1.5" ++matches = "0.1" diff --cc vendor/idna-0.1.4/LICENSE-APACHE index 000000000,000000000..16fe87b06 new file mode 100644 --- /dev/null +++ b/vendor/idna-0.1.4/LICENSE-APACHE @@@ -1,0 -1,0 +1,201 @@@ ++ Apache License ++ Version 2.0, January 2004 ++ http://www.apache.org/licenses/ ++ ++TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION ++ ++1. Definitions. ++ ++ "License" shall mean the terms and conditions for use, reproduction, ++ and distribution as defined by Sections 1 through 9 of this document. ++ ++ "Licensor" shall mean the copyright owner or entity authorized by ++ the copyright owner that is granting the License. ++ ++ "Legal Entity" shall mean the union of the acting entity and all ++ other entities that control, are controlled by, or are under common ++ control with that entity. For the purposes of this definition, ++ "control" means (i) the power, direct or indirect, to cause the ++ direction or management of such entity, whether by contract or ++ otherwise, or (ii) ownership of fifty percent (50%) or more of the ++ outstanding shares, or (iii) beneficial ownership of such entity. ++ ++ "You" (or "Your") shall mean an individual or Legal Entity ++ exercising permissions granted by this License. ++ ++ "Source" form shall mean the preferred form for making modifications, ++ including but not limited to software source code, documentation ++ source, and configuration files. ++ ++ "Object" form shall mean any form resulting from mechanical ++ transformation or translation of a Source form, including but ++ not limited to compiled object code, generated documentation, ++ and conversions to other media types. ++ ++ "Work" shall mean the work of authorship, whether in Source or ++ Object form, made available under the License, as indicated by a ++ copyright notice that is included in or attached to the work ++ (an example is provided in the Appendix below). ++ ++ "Derivative Works" shall mean any work, whether in Source or Object ++ form, that is based on (or derived from) the Work and for which the ++ editorial revisions, annotations, elaborations, or other modifications ++ represent, as a whole, an original work of authorship. For the purposes ++ of this License, Derivative Works shall not include works that remain ++ separable from, or merely link (or bind by name) to the interfaces of, ++ the Work and Derivative Works thereof. ++ ++ "Contribution" shall mean any work of authorship, including ++ the original version of the Work and any modifications or additions ++ to that Work or Derivative Works thereof, that is intentionally ++ submitted to Licensor for inclusion in the Work by the copyright owner ++ or by an individual or Legal Entity authorized to submit on behalf of ++ the copyright owner. For the purposes of this definition, "submitted" ++ means any form of electronic, verbal, or written communication sent ++ to the Licensor or its representatives, including but not limited to ++ communication on electronic mailing lists, source code control systems, ++ and issue tracking systems that are managed by, or on behalf of, the ++ Licensor for the purpose of discussing and improving the Work, but ++ excluding communication that is conspicuously marked or otherwise ++ designated in writing by the copyright owner as "Not a Contribution." ++ ++ "Contributor" shall mean Licensor and any individual or Legal Entity ++ on behalf of whom a Contribution has been received by Licensor and ++ subsequently incorporated within the Work. ++ ++2. Grant of Copyright License. Subject to the terms and conditions of ++ this License, each Contributor hereby grants to You a perpetual, ++ worldwide, non-exclusive, no-charge, royalty-free, irrevocable ++ copyright license to reproduce, prepare Derivative Works of, ++ publicly display, publicly perform, sublicense, and distribute the ++ Work and such Derivative Works in Source or Object form. ++ ++3. Grant of Patent License. Subject to the terms and conditions of ++ this License, each Contributor hereby grants to You a perpetual, ++ worldwide, non-exclusive, no-charge, royalty-free, irrevocable ++ (except as stated in this section) patent license to make, have made, ++ use, offer to sell, sell, import, and otherwise transfer the Work, ++ where such license applies only to those patent claims licensable ++ by such Contributor that are necessarily infringed by their ++ Contribution(s) alone or by combination of their Contribution(s) ++ with the Work to which such Contribution(s) was submitted. If You ++ institute patent litigation against any entity (including a ++ cross-claim or counterclaim in a lawsuit) alleging that the Work ++ or a Contribution incorporated within the Work constitutes direct ++ or contributory patent infringement, then any patent licenses ++ granted to You under this License for that Work shall terminate ++ as of the date such litigation is filed. ++ ++4. Redistribution. You may reproduce and distribute copies of the ++ Work or Derivative Works thereof in any medium, with or without ++ modifications, and in Source or Object form, provided that You ++ meet the following conditions: ++ ++ (a) You must give any other recipients of the Work or ++ Derivative Works a copy of this License; and ++ ++ (b) You must cause any modified files to carry prominent notices ++ stating that You changed the files; and ++ ++ (c) You must retain, in the Source form of any Derivative Works ++ that You distribute, all copyright, patent, trademark, and ++ attribution notices from the Source form of the Work, ++ excluding those notices that do not pertain to any part of ++ the Derivative Works; and ++ ++ (d) If the Work includes a "NOTICE" text file as part of its ++ distribution, then any Derivative Works that You distribute must ++ include a readable copy of the attribution notices contained ++ within such NOTICE file, excluding those notices that do not ++ pertain to any part of the Derivative Works, in at least one ++ of the following places: within a NOTICE text file distributed ++ as part of the Derivative Works; within the Source form or ++ documentation, if provided along with the Derivative Works; or, ++ within a display generated by the Derivative Works, if and ++ wherever such third-party notices normally appear. The contents ++ of the NOTICE file are for informational purposes only and ++ do not modify the License. You may add Your own attribution ++ notices within Derivative Works that You distribute, alongside ++ or as an addendum to the NOTICE text from the Work, provided ++ that such additional attribution notices cannot be construed ++ as modifying the License. ++ ++ You may add Your own copyright statement to Your modifications and ++ may provide additional or different license terms and conditions ++ for use, reproduction, or distribution of Your modifications, or ++ for any such Derivative Works as a whole, provided Your use, ++ reproduction, and distribution of the Work otherwise complies with ++ the conditions stated in this License. ++ ++5. Submission of Contributions. Unless You explicitly state otherwise, ++ any Contribution intentionally submitted for inclusion in the Work ++ by You to the Licensor shall be under the terms and conditions of ++ this License, without any additional terms or conditions. ++ Notwithstanding the above, nothing herein shall supersede or modify ++ the terms of any separate license agreement you may have executed ++ with Licensor regarding such Contributions. ++ ++6. Trademarks. This License does not grant permission to use the trade ++ names, trademarks, service marks, or product names of the Licensor, ++ except as required for reasonable and customary use in describing the ++ origin of the Work and reproducing the content of the NOTICE file. ++ ++7. Disclaimer of Warranty. Unless required by applicable law or ++ agreed to in writing, Licensor provides the Work (and each ++ Contributor provides its Contributions) on an "AS IS" BASIS, ++ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or ++ implied, including, without limitation, any warranties or conditions ++ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A ++ PARTICULAR PURPOSE. You are solely responsible for determining the ++ appropriateness of using or redistributing the Work and assume any ++ risks associated with Your exercise of permissions under this License. ++ ++8. Limitation of Liability. In no event and under no legal theory, ++ whether in tort (including negligence), contract, or otherwise, ++ unless required by applicable law (such as deliberate and grossly ++ negligent acts) or agreed to in writing, shall any Contributor be ++ liable to You for damages, including any direct, indirect, special, ++ incidental, or consequential damages of any character arising as a ++ result of this License or out of the use or inability to use the ++ Work (including but not limited to damages for loss of goodwill, ++ work stoppage, computer failure or malfunction, or any and all ++ other commercial damages or losses), even if such Contributor ++ has been advised of the possibility of such damages. ++ ++9. Accepting Warranty or Additional Liability. While redistributing ++ the Work or Derivative Works thereof, You may choose to offer, ++ and charge a fee for, acceptance of support, warranty, indemnity, ++ or other liability obligations and/or rights consistent with this ++ License. However, in accepting such obligations, You may act only ++ on Your own behalf and on Your sole responsibility, not on behalf ++ of any other Contributor, and only if You agree to indemnify, ++ defend, and hold each Contributor harmless for any liability ++ incurred by, or claims asserted against, such Contributor by reason ++ of your accepting any such warranty or additional liability. ++ ++END OF TERMS AND CONDITIONS ++ ++APPENDIX: How to apply the Apache License to your work. ++ ++ To apply the Apache License to your work, attach the following ++ boilerplate notice, with the fields enclosed by brackets "[]" ++ replaced with your own identifying information. (Don't include ++ the brackets!) The text should be enclosed in the appropriate ++ comment syntax for the file format. We also recommend that a ++ file or class name and description of purpose be included on the ++ same "printed page" as the copyright notice for easier ++ identification within third-party archives. ++ ++Copyright [yyyy] [name of copyright owner] ++ ++Licensed under the Apache License, Version 2.0 (the "License"); ++you may not use this file except in compliance with the License. ++You may obtain a copy of the License at ++ ++ http://www.apache.org/licenses/LICENSE-2.0 ++ ++Unless required by applicable law or agreed to in writing, software ++distributed under the License is distributed on an "AS IS" BASIS, ++WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++See the License for the specific language governing permissions and ++limitations under the License. diff --cc vendor/idna-0.1.4/LICENSE-MIT index 000000000,000000000..24de6b418 new file mode 100644 --- /dev/null +++ b/vendor/idna-0.1.4/LICENSE-MIT @@@ -1,0 -1,0 +1,25 @@@ ++Copyright (c) 2013-2016 The rust-url developers ++ ++Permission is hereby granted, free of charge, to any ++person obtaining a copy of this software and associated ++documentation files (the "Software"), to deal in the ++Software without restriction, including without ++limitation the rights to use, copy, modify, merge, ++publish, distribute, sublicense, and/or sell copies of ++the Software, and to permit persons to whom the Software ++is furnished to do so, subject to the following ++conditions: ++ ++The above copyright notice and this permission notice ++shall be included in all copies or substantial portions ++of the Software. ++ ++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ++ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED ++TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A ++PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT ++SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY ++CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION ++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR ++IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER ++DEALINGS IN THE SOFTWARE. diff --cc vendor/idna-0.1.4/src/IdnaMappingTable.txt index 000000000,000000000..295606447 new file mode 100644 --- /dev/null +++ b/vendor/idna-0.1.4/src/IdnaMappingTable.txt @@@ -1,0 -1,0 +1,8405 @@@ ++# IdnaMappingTable-10.0.0.txt ++# Date: 2017-02-23, 14:18:32 GMT ++# © 2017 Unicode®, Inc. ++# Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries. ++# For terms of use, see http://www.unicode.org/terms_of_use.html ++# ++# Unicode IDNA Compatible Preprocessing (UTS #46) ++# For documentation, see http://www.unicode.org/reports/tr46/ ++ ++0000..002C ; disallowed_STD3_valid # 1.1 ..COMMA ++002D..002E ; valid # 1.1 HYPHEN-MINUS..FULL STOP ++002F ; disallowed_STD3_valid # 1.1 SOLIDUS ++0030..0039 ; valid # 1.1 DIGIT ZERO..DIGIT NINE ++003A..0040 ; disallowed_STD3_valid # 1.1 COLON..COMMERCIAL AT ++0041 ; mapped ; 0061 # 1.1 LATIN CAPITAL LETTER A ++0042 ; mapped ; 0062 # 1.1 LATIN CAPITAL LETTER B ++0043 ; mapped ; 0063 # 1.1 LATIN CAPITAL LETTER C ++0044 ; mapped ; 0064 # 1.1 LATIN CAPITAL LETTER D ++0045 ; mapped ; 0065 # 1.1 LATIN CAPITAL LETTER E ++0046 ; mapped ; 0066 # 1.1 LATIN CAPITAL LETTER F ++0047 ; mapped ; 0067 # 1.1 LATIN CAPITAL LETTER G ++0048 ; mapped ; 0068 # 1.1 LATIN CAPITAL LETTER H ++0049 ; mapped ; 0069 # 1.1 LATIN CAPITAL LETTER I ++004A ; mapped ; 006A # 1.1 LATIN CAPITAL LETTER J ++004B ; mapped ; 006B # 1.1 LATIN CAPITAL LETTER K ++004C ; mapped ; 006C # 1.1 LATIN CAPITAL LETTER L ++004D ; mapped ; 006D # 1.1 LATIN CAPITAL LETTER M ++004E ; mapped ; 006E # 1.1 LATIN CAPITAL LETTER N ++004F ; mapped ; 006F # 1.1 LATIN CAPITAL LETTER O ++0050 ; mapped ; 0070 # 1.1 LATIN CAPITAL LETTER P ++0051 ; mapped ; 0071 # 1.1 LATIN CAPITAL LETTER Q ++0052 ; mapped ; 0072 # 1.1 LATIN CAPITAL LETTER R ++0053 ; mapped ; 0073 # 1.1 LATIN CAPITAL LETTER S ++0054 ; mapped ; 0074 # 1.1 LATIN CAPITAL LETTER T ++0055 ; mapped ; 0075 # 1.1 LATIN CAPITAL LETTER U ++0056 ; mapped ; 0076 # 1.1 LATIN CAPITAL LETTER V ++0057 ; mapped ; 0077 # 1.1 LATIN CAPITAL LETTER W ++0058 ; mapped ; 0078 # 1.1 LATIN CAPITAL LETTER X ++0059 ; mapped ; 0079 # 1.1 LATIN CAPITAL LETTER Y ++005A ; mapped ; 007A # 1.1 LATIN CAPITAL LETTER Z ++005B..0060 ; disallowed_STD3_valid # 1.1 LEFT SQUARE BRACKET..GRAVE ACCENT ++0061..007A ; valid # 1.1 LATIN SMALL LETTER A..LATIN SMALL LETTER Z ++007B..007F ; disallowed_STD3_valid # 1.1 LEFT CURLY BRACKET.. ++0080..009F ; disallowed # 1.1 .. ++00A0 ; disallowed_STD3_mapped ; 0020 # 1.1 NO-BREAK SPACE ++00A1..00A7 ; valid ; ; NV8 # 1.1 INVERTED EXCLAMATION MARK..SECTION SIGN ++00A8 ; disallowed_STD3_mapped ; 0020 0308 # 1.1 DIAERESIS ++00A9 ; valid ; ; NV8 # 1.1 COPYRIGHT SIGN ++00AA ; mapped ; 0061 # 1.1 FEMININE ORDINAL INDICATOR ++00AB..00AC ; valid ; ; NV8 # 1.1 LEFT-POINTING DOUBLE ANGLE QUOTATION MARK..NOT SIGN ++00AD ; ignored # 1.1 SOFT HYPHEN ++00AE ; valid ; ; NV8 # 1.1 REGISTERED SIGN ++00AF ; disallowed_STD3_mapped ; 0020 0304 # 1.1 MACRON ++00B0..00B1 ; valid ; ; NV8 # 1.1 DEGREE SIGN..PLUS-MINUS SIGN ++00B2 ; mapped ; 0032 # 1.1 SUPERSCRIPT TWO ++00B3 ; mapped ; 0033 # 1.1 SUPERSCRIPT THREE ++00B4 ; disallowed_STD3_mapped ; 0020 0301 # 1.1 ACUTE ACCENT ++00B5 ; mapped ; 03BC # 1.1 MICRO SIGN ++00B6 ; valid ; ; NV8 # 1.1 PILCROW SIGN ++00B7 ; valid # 1.1 MIDDLE DOT ++00B8 ; disallowed_STD3_mapped ; 0020 0327 # 1.1 CEDILLA ++00B9 ; mapped ; 0031 # 1.1 SUPERSCRIPT ONE ++00BA ; mapped ; 006F # 1.1 MASCULINE ORDINAL INDICATOR ++00BB ; valid ; ; NV8 # 1.1 RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK ++00BC ; mapped ; 0031 2044 0034 #1.1 VULGAR FRACTION ONE QUARTER ++00BD ; mapped ; 0031 2044 0032 #1.1 VULGAR FRACTION ONE HALF ++00BE ; mapped ; 0033 2044 0034 #1.1 VULGAR FRACTION THREE QUARTERS ++00BF ; valid ; ; NV8 # 1.1 INVERTED QUESTION MARK ++00C0 ; mapped ; 00E0 # 1.1 LATIN CAPITAL LETTER A WITH GRAVE ++00C1 ; mapped ; 00E1 # 1.1 LATIN CAPITAL LETTER A WITH ACUTE ++00C2 ; mapped ; 00E2 # 1.1 LATIN CAPITAL LETTER A WITH CIRCUMFLEX ++00C3 ; mapped ; 00E3 # 1.1 LATIN CAPITAL LETTER A WITH TILDE ++00C4 ; mapped ; 00E4 # 1.1 LATIN CAPITAL LETTER A WITH DIAERESIS ++00C5 ; mapped ; 00E5 # 1.1 LATIN CAPITAL LETTER A WITH RING ABOVE ++00C6 ; mapped ; 00E6 # 1.1 LATIN CAPITAL LETTER AE ++00C7 ; mapped ; 00E7 # 1.1 LATIN CAPITAL LETTER C WITH CEDILLA ++00C8 ; mapped ; 00E8 # 1.1 LATIN CAPITAL LETTER E WITH GRAVE ++00C9 ; mapped ; 00E9 # 1.1 LATIN CAPITAL LETTER E WITH ACUTE ++00CA ; mapped ; 00EA # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX ++00CB ; mapped ; 00EB # 1.1 LATIN CAPITAL LETTER E WITH DIAERESIS ++00CC ; mapped ; 00EC # 1.1 LATIN CAPITAL LETTER I WITH GRAVE ++00CD ; mapped ; 00ED # 1.1 LATIN CAPITAL LETTER I WITH ACUTE ++00CE ; mapped ; 00EE # 1.1 LATIN CAPITAL LETTER I WITH CIRCUMFLEX ++00CF ; mapped ; 00EF # 1.1 LATIN CAPITAL LETTER I WITH DIAERESIS ++00D0 ; mapped ; 00F0 # 1.1 LATIN CAPITAL LETTER ETH ++00D1 ; mapped ; 00F1 # 1.1 LATIN CAPITAL LETTER N WITH TILDE ++00D2 ; mapped ; 00F2 # 1.1 LATIN CAPITAL LETTER O WITH GRAVE ++00D3 ; mapped ; 00F3 # 1.1 LATIN CAPITAL LETTER O WITH ACUTE ++00D4 ; mapped ; 00F4 # 1.1 LATIN CAPITAL LETTER O WITH CIRCUMFLEX ++00D5 ; mapped ; 00F5 # 1.1 LATIN CAPITAL LETTER O WITH TILDE ++00D6 ; mapped ; 00F6 # 1.1 LATIN CAPITAL LETTER O WITH DIAERESIS ++00D7 ; valid ; ; NV8 # 1.1 MULTIPLICATION SIGN ++00D8 ; mapped ; 00F8 # 1.1 LATIN CAPITAL LETTER O WITH STROKE ++00D9 ; mapped ; 00F9 # 1.1 LATIN CAPITAL LETTER U WITH GRAVE ++00DA ; mapped ; 00FA # 1.1 LATIN CAPITAL LETTER U WITH ACUTE ++00DB ; mapped ; 00FB # 1.1 LATIN CAPITAL LETTER U WITH CIRCUMFLEX ++00DC ; mapped ; 00FC # 1.1 LATIN CAPITAL LETTER U WITH DIAERESIS ++00DD ; mapped ; 00FD # 1.1 LATIN CAPITAL LETTER Y WITH ACUTE ++00DE ; mapped ; 00FE # 1.1 LATIN CAPITAL LETTER THORN ++00DF ; deviation ; 0073 0073 # 1.1 LATIN SMALL LETTER SHARP S ++00E0..00F6 ; valid # 1.1 LATIN SMALL LETTER A WITH GRAVE..LATIN SMALL LETTER O WITH DIAERESIS ++00F7 ; valid ; ; NV8 # 1.1 DIVISION SIGN ++00F8..00FF ; valid # 1.1 LATIN SMALL LETTER O WITH STROKE..LATIN SMALL LETTER Y WITH DIAERESIS ++0100 ; mapped ; 0101 # 1.1 LATIN CAPITAL LETTER A WITH MACRON ++0101 ; valid # 1.1 LATIN SMALL LETTER A WITH MACRON ++0102 ; mapped ; 0103 # 1.1 LATIN CAPITAL LETTER A WITH BREVE ++0103 ; valid # 1.1 LATIN SMALL LETTER A WITH BREVE ++0104 ; mapped ; 0105 # 1.1 LATIN CAPITAL LETTER A WITH OGONEK ++0105 ; valid # 1.1 LATIN SMALL LETTER A WITH OGONEK ++0106 ; mapped ; 0107 # 1.1 LATIN CAPITAL LETTER C WITH ACUTE ++0107 ; valid # 1.1 LATIN SMALL LETTER C WITH ACUTE ++0108 ; mapped ; 0109 # 1.1 LATIN CAPITAL LETTER C WITH CIRCUMFLEX ++0109 ; valid # 1.1 LATIN SMALL LETTER C WITH CIRCUMFLEX ++010A ; mapped ; 010B # 1.1 LATIN CAPITAL LETTER C WITH DOT ABOVE ++010B ; valid # 1.1 LATIN SMALL LETTER C WITH DOT ABOVE ++010C ; mapped ; 010D # 1.1 LATIN CAPITAL LETTER C WITH CARON ++010D ; valid # 1.1 LATIN SMALL LETTER C WITH CARON ++010E ; mapped ; 010F # 1.1 LATIN CAPITAL LETTER D WITH CARON ++010F ; valid # 1.1 LATIN SMALL LETTER D WITH CARON ++0110 ; mapped ; 0111 # 1.1 LATIN CAPITAL LETTER D WITH STROKE ++0111 ; valid # 1.1 LATIN SMALL LETTER D WITH STROKE ++0112 ; mapped ; 0113 # 1.1 LATIN CAPITAL LETTER E WITH MACRON ++0113 ; valid # 1.1 LATIN SMALL LETTER E WITH MACRON ++0114 ; mapped ; 0115 # 1.1 LATIN CAPITAL LETTER E WITH BREVE ++0115 ; valid # 1.1 LATIN SMALL LETTER E WITH BREVE ++0116 ; mapped ; 0117 # 1.1 LATIN CAPITAL LETTER E WITH DOT ABOVE ++0117 ; valid # 1.1 LATIN SMALL LETTER E WITH DOT ABOVE ++0118 ; mapped ; 0119 # 1.1 LATIN CAPITAL LETTER E WITH OGONEK ++0119 ; valid # 1.1 LATIN SMALL LETTER E WITH OGONEK ++011A ; mapped ; 011B # 1.1 LATIN CAPITAL LETTER E WITH CARON ++011B ; valid # 1.1 LATIN SMALL LETTER E WITH CARON ++011C ; mapped ; 011D # 1.1 LATIN CAPITAL LETTER G WITH CIRCUMFLEX ++011D ; valid # 1.1 LATIN SMALL LETTER G WITH CIRCUMFLEX ++011E ; mapped ; 011F # 1.1 LATIN CAPITAL LETTER G WITH BREVE ++011F ; valid # 1.1 LATIN SMALL LETTER G WITH BREVE ++0120 ; mapped ; 0121 # 1.1 LATIN CAPITAL LETTER G WITH DOT ABOVE ++0121 ; valid # 1.1 LATIN SMALL LETTER G WITH DOT ABOVE ++0122 ; mapped ; 0123 # 1.1 LATIN CAPITAL LETTER G WITH CEDILLA ++0123 ; valid # 1.1 LATIN SMALL LETTER G WITH CEDILLA ++0124 ; mapped ; 0125 # 1.1 LATIN CAPITAL LETTER H WITH CIRCUMFLEX ++0125 ; valid # 1.1 LATIN SMALL LETTER H WITH CIRCUMFLEX ++0126 ; mapped ; 0127 # 1.1 LATIN CAPITAL LETTER H WITH STROKE ++0127 ; valid # 1.1 LATIN SMALL LETTER H WITH STROKE ++0128 ; mapped ; 0129 # 1.1 LATIN CAPITAL LETTER I WITH TILDE ++0129 ; valid # 1.1 LATIN SMALL LETTER I WITH TILDE ++012A ; mapped ; 012B # 1.1 LATIN CAPITAL LETTER I WITH MACRON ++012B ; valid # 1.1 LATIN SMALL LETTER I WITH MACRON ++012C ; mapped ; 012D # 1.1 LATIN CAPITAL LETTER I WITH BREVE ++012D ; valid # 1.1 LATIN SMALL LETTER I WITH BREVE ++012E ; mapped ; 012F # 1.1 LATIN CAPITAL LETTER I WITH OGONEK ++012F ; valid # 1.1 LATIN SMALL LETTER I WITH OGONEK ++0130 ; mapped ; 0069 0307 # 1.1 LATIN CAPITAL LETTER I WITH DOT ABOVE ++0131 ; valid # 1.1 LATIN SMALL LETTER DOTLESS I ++0132..0133 ; mapped ; 0069 006A # 1.1 LATIN CAPITAL LIGATURE IJ..LATIN SMALL LIGATURE IJ ++0134 ; mapped ; 0135 # 1.1 LATIN CAPITAL LETTER J WITH CIRCUMFLEX ++0135 ; valid # 1.1 LATIN SMALL LETTER J WITH CIRCUMFLEX ++0136 ; mapped ; 0137 # 1.1 LATIN CAPITAL LETTER K WITH CEDILLA ++0137..0138 ; valid # 1.1 LATIN SMALL LETTER K WITH CEDILLA..LATIN SMALL LETTER KRA ++0139 ; mapped ; 013A # 1.1 LATIN CAPITAL LETTER L WITH ACUTE ++013A ; valid # 1.1 LATIN SMALL LETTER L WITH ACUTE ++013B ; mapped ; 013C # 1.1 LATIN CAPITAL LETTER L WITH CEDILLA ++013C ; valid # 1.1 LATIN SMALL LETTER L WITH CEDILLA ++013D ; mapped ; 013E # 1.1 LATIN CAPITAL LETTER L WITH CARON ++013E ; valid # 1.1 LATIN SMALL LETTER L WITH CARON ++013F..0140 ; mapped ; 006C 00B7 # 1.1 LATIN CAPITAL LETTER L WITH MIDDLE DOT..LATIN SMALL LETTER L WITH MIDDLE DOT ++0141 ; mapped ; 0142 # 1.1 LATIN CAPITAL LETTER L WITH STROKE ++0142 ; valid # 1.1 LATIN SMALL LETTER L WITH STROKE ++0143 ; mapped ; 0144 # 1.1 LATIN CAPITAL LETTER N WITH ACUTE ++0144 ; valid # 1.1 LATIN SMALL LETTER N WITH ACUTE ++0145 ; mapped ; 0146 # 1.1 LATIN CAPITAL LETTER N WITH CEDILLA ++0146 ; valid # 1.1 LATIN SMALL LETTER N WITH CEDILLA ++0147 ; mapped ; 0148 # 1.1 LATIN CAPITAL LETTER N WITH CARON ++0148 ; valid # 1.1 LATIN SMALL LETTER N WITH CARON ++0149 ; mapped ; 02BC 006E # 1.1 LATIN SMALL LETTER N PRECEDED BY APOSTROPHE ++014A ; mapped ; 014B # 1.1 LATIN CAPITAL LETTER ENG ++014B ; valid # 1.1 LATIN SMALL LETTER ENG ++014C ; mapped ; 014D # 1.1 LATIN CAPITAL LETTER O WITH MACRON ++014D ; valid # 1.1 LATIN SMALL LETTER O WITH MACRON ++014E ; mapped ; 014F # 1.1 LATIN CAPITAL LETTER O WITH BREVE ++014F ; valid # 1.1 LATIN SMALL LETTER O WITH BREVE ++0150 ; mapped ; 0151 # 1.1 LATIN CAPITAL LETTER O WITH DOUBLE ACUTE ++0151 ; valid # 1.1 LATIN SMALL LETTER O WITH DOUBLE ACUTE ++0152 ; mapped ; 0153 # 1.1 LATIN CAPITAL LIGATURE OE ++0153 ; valid # 1.1 LATIN SMALL LIGATURE OE ++0154 ; mapped ; 0155 # 1.1 LATIN CAPITAL LETTER R WITH ACUTE ++0155 ; valid # 1.1 LATIN SMALL LETTER R WITH ACUTE ++0156 ; mapped ; 0157 # 1.1 LATIN CAPITAL LETTER R WITH CEDILLA ++0157 ; valid # 1.1 LATIN SMALL LETTER R WITH CEDILLA ++0158 ; mapped ; 0159 # 1.1 LATIN CAPITAL LETTER R WITH CARON ++0159 ; valid # 1.1 LATIN SMALL LETTER R WITH CARON ++015A ; mapped ; 015B # 1.1 LATIN CAPITAL LETTER S WITH ACUTE ++015B ; valid # 1.1 LATIN SMALL LETTER S WITH ACUTE ++015C ; mapped ; 015D # 1.1 LATIN CAPITAL LETTER S WITH CIRCUMFLEX ++015D ; valid # 1.1 LATIN SMALL LETTER S WITH CIRCUMFLEX ++015E ; mapped ; 015F # 1.1 LATIN CAPITAL LETTER S WITH CEDILLA ++015F ; valid # 1.1 LATIN SMALL LETTER S WITH CEDILLA ++0160 ; mapped ; 0161 # 1.1 LATIN CAPITAL LETTER S WITH CARON ++0161 ; valid # 1.1 LATIN SMALL LETTER S WITH CARON ++0162 ; mapped ; 0163 # 1.1 LATIN CAPITAL LETTER T WITH CEDILLA ++0163 ; valid # 1.1 LATIN SMALL LETTER T WITH CEDILLA ++0164 ; mapped ; 0165 # 1.1 LATIN CAPITAL LETTER T WITH CARON ++0165 ; valid # 1.1 LATIN SMALL LETTER T WITH CARON ++0166 ; mapped ; 0167 # 1.1 LATIN CAPITAL LETTER T WITH STROKE ++0167 ; valid # 1.1 LATIN SMALL LETTER T WITH STROKE ++0168 ; mapped ; 0169 # 1.1 LATIN CAPITAL LETTER U WITH TILDE ++0169 ; valid # 1.1 LATIN SMALL LETTER U WITH TILDE ++016A ; mapped ; 016B # 1.1 LATIN CAPITAL LETTER U WITH MACRON ++016B ; valid # 1.1 LATIN SMALL LETTER U WITH MACRON ++016C ; mapped ; 016D # 1.1 LATIN CAPITAL LETTER U WITH BREVE ++016D ; valid # 1.1 LATIN SMALL LETTER U WITH BREVE ++016E ; mapped ; 016F # 1.1 LATIN CAPITAL LETTER U WITH RING ABOVE ++016F ; valid # 1.1 LATIN SMALL LETTER U WITH RING ABOVE ++0170 ; mapped ; 0171 # 1.1 LATIN CAPITAL LETTER U WITH DOUBLE ACUTE ++0171 ; valid # 1.1 LATIN SMALL LETTER U WITH DOUBLE ACUTE ++0172 ; mapped ; 0173 # 1.1 LATIN CAPITAL LETTER U WITH OGONEK ++0173 ; valid # 1.1 LATIN SMALL LETTER U WITH OGONEK ++0174 ; mapped ; 0175 # 1.1 LATIN CAPITAL LETTER W WITH CIRCUMFLEX ++0175 ; valid # 1.1 LATIN SMALL LETTER W WITH CIRCUMFLEX ++0176 ; mapped ; 0177 # 1.1 LATIN CAPITAL LETTER Y WITH CIRCUMFLEX ++0177 ; valid # 1.1 LATIN SMALL LETTER Y WITH CIRCUMFLEX ++0178 ; mapped ; 00FF # 1.1 LATIN CAPITAL LETTER Y WITH DIAERESIS ++0179 ; mapped ; 017A # 1.1 LATIN CAPITAL LETTER Z WITH ACUTE ++017A ; valid # 1.1 LATIN SMALL LETTER Z WITH ACUTE ++017B ; mapped ; 017C # 1.1 LATIN CAPITAL LETTER Z WITH DOT ABOVE ++017C ; valid # 1.1 LATIN SMALL LETTER Z WITH DOT ABOVE ++017D ; mapped ; 017E # 1.1 LATIN CAPITAL LETTER Z WITH CARON ++017E ; valid # 1.1 LATIN SMALL LETTER Z WITH CARON ++017F ; mapped ; 0073 # 1.1 LATIN SMALL LETTER LONG S ++0180 ; valid # 1.1 LATIN SMALL LETTER B WITH STROKE ++0181 ; mapped ; 0253 # 1.1 LATIN CAPITAL LETTER B WITH HOOK ++0182 ; mapped ; 0183 # 1.1 LATIN CAPITAL LETTER B WITH TOPBAR ++0183 ; valid # 1.1 LATIN SMALL LETTER B WITH TOPBAR ++0184 ; mapped ; 0185 # 1.1 LATIN CAPITAL LETTER TONE SIX ++0185 ; valid # 1.1 LATIN SMALL LETTER TONE SIX ++0186 ; mapped ; 0254 # 1.1 LATIN CAPITAL LETTER OPEN O ++0187 ; mapped ; 0188 # 1.1 LATIN CAPITAL LETTER C WITH HOOK ++0188 ; valid # 1.1 LATIN SMALL LETTER C WITH HOOK ++0189 ; mapped ; 0256 # 1.1 LATIN CAPITAL LETTER AFRICAN D ++018A ; mapped ; 0257 # 1.1 LATIN CAPITAL LETTER D WITH HOOK ++018B ; mapped ; 018C # 1.1 LATIN CAPITAL LETTER D WITH TOPBAR ++018C..018D ; valid # 1.1 LATIN SMALL LETTER D WITH TOPBAR..LATIN SMALL LETTER TURNED DELTA ++018E ; mapped ; 01DD # 1.1 LATIN CAPITAL LETTER REVERSED E ++018F ; mapped ; 0259 # 1.1 LATIN CAPITAL LETTER SCHWA ++0190 ; mapped ; 025B # 1.1 LATIN CAPITAL LETTER OPEN E ++0191 ; mapped ; 0192 # 1.1 LATIN CAPITAL LETTER F WITH HOOK ++0192 ; valid # 1.1 LATIN SMALL LETTER F WITH HOOK ++0193 ; mapped ; 0260 # 1.1 LATIN CAPITAL LETTER G WITH HOOK ++0194 ; mapped ; 0263 # 1.1 LATIN CAPITAL LETTER GAMMA ++0195 ; valid # 1.1 LATIN SMALL LETTER HV ++0196 ; mapped ; 0269 # 1.1 LATIN CAPITAL LETTER IOTA ++0197 ; mapped ; 0268 # 1.1 LATIN CAPITAL LETTER I WITH STROKE ++0198 ; mapped ; 0199 # 1.1 LATIN CAPITAL LETTER K WITH HOOK ++0199..019B ; valid # 1.1 LATIN SMALL LETTER K WITH HOOK..LATIN SMALL LETTER LAMBDA WITH STROKE ++019C ; mapped ; 026F # 1.1 LATIN CAPITAL LETTER TURNED M ++019D ; mapped ; 0272 # 1.1 LATIN CAPITAL LETTER N WITH LEFT HOOK ++019E ; valid # 1.1 LATIN SMALL LETTER N WITH LONG RIGHT LEG ++019F ; mapped ; 0275 # 1.1 LATIN CAPITAL LETTER O WITH MIDDLE TILDE ++01A0 ; mapped ; 01A1 # 1.1 LATIN CAPITAL LETTER O WITH HORN ++01A1 ; valid # 1.1 LATIN SMALL LETTER O WITH HORN ++01A2 ; mapped ; 01A3 # 1.1 LATIN CAPITAL LETTER OI ++01A3 ; valid # 1.1 LATIN SMALL LETTER OI ++01A4 ; mapped ; 01A5 # 1.1 LATIN CAPITAL LETTER P WITH HOOK ++01A5 ; valid # 1.1 LATIN SMALL LETTER P WITH HOOK ++01A6 ; mapped ; 0280 # 1.1 LATIN LETTER YR ++01A7 ; mapped ; 01A8 # 1.1 LATIN CAPITAL LETTER TONE TWO ++01A8 ; valid # 1.1 LATIN SMALL LETTER TONE TWO ++01A9 ; mapped ; 0283 # 1.1 LATIN CAPITAL LETTER ESH ++01AA..01AB ; valid # 1.1 LATIN LETTER REVERSED ESH LOOP..LATIN SMALL LETTER T WITH PALATAL HOOK ++01AC ; mapped ; 01AD # 1.1 LATIN CAPITAL LETTER T WITH HOOK ++01AD ; valid # 1.1 LATIN SMALL LETTER T WITH HOOK ++01AE ; mapped ; 0288 # 1.1 LATIN CAPITAL LETTER T WITH RETROFLEX HOOK ++01AF ; mapped ; 01B0 # 1.1 LATIN CAPITAL LETTER U WITH HORN ++01B0 ; valid # 1.1 LATIN SMALL LETTER U WITH HORN ++01B1 ; mapped ; 028A # 1.1 LATIN CAPITAL LETTER UPSILON ++01B2 ; mapped ; 028B # 1.1 LATIN CAPITAL LETTER V WITH HOOK ++01B3 ; mapped ; 01B4 # 1.1 LATIN CAPITAL LETTER Y WITH HOOK ++01B4 ; valid # 1.1 LATIN SMALL LETTER Y WITH HOOK ++01B5 ; mapped ; 01B6 # 1.1 LATIN CAPITAL LETTER Z WITH STROKE ++01B6 ; valid # 1.1 LATIN SMALL LETTER Z WITH STROKE ++01B7 ; mapped ; 0292 # 1.1 LATIN CAPITAL LETTER EZH ++01B8 ; mapped ; 01B9 # 1.1 LATIN CAPITAL LETTER EZH REVERSED ++01B9..01BB ; valid # 1.1 LATIN SMALL LETTER EZH REVERSED..LATIN LETTER TWO WITH STROKE ++01BC ; mapped ; 01BD # 1.1 LATIN CAPITAL LETTER TONE FIVE ++01BD..01C3 ; valid # 1.1 LATIN SMALL LETTER TONE FIVE..LATIN LETTER RETROFLEX CLICK ++01C4..01C6 ; mapped ; 0064 017E # 1.1 LATIN CAPITAL LETTER DZ WITH CARON..LATIN SMALL LETTER DZ WITH CARON ++01C7..01C9 ; mapped ; 006C 006A # 1.1 LATIN CAPITAL LETTER LJ..LATIN SMALL LETTER LJ ++01CA..01CC ; mapped ; 006E 006A # 1.1 LATIN CAPITAL LETTER NJ..LATIN SMALL LETTER NJ ++01CD ; mapped ; 01CE # 1.1 LATIN CAPITAL LETTER A WITH CARON ++01CE ; valid # 1.1 LATIN SMALL LETTER A WITH CARON ++01CF ; mapped ; 01D0 # 1.1 LATIN CAPITAL LETTER I WITH CARON ++01D0 ; valid # 1.1 LATIN SMALL LETTER I WITH CARON ++01D1 ; mapped ; 01D2 # 1.1 LATIN CAPITAL LETTER O WITH CARON ++01D2 ; valid # 1.1 LATIN SMALL LETTER O WITH CARON ++01D3 ; mapped ; 01D4 # 1.1 LATIN CAPITAL LETTER U WITH CARON ++01D4 ; valid # 1.1 LATIN SMALL LETTER U WITH CARON ++01D5 ; mapped ; 01D6 # 1.1 LATIN CAPITAL LETTER U WITH DIAERESIS AND MACRON ++01D6 ; valid # 1.1 LATIN SMALL LETTER U WITH DIAERESIS AND MACRON ++01D7 ; mapped ; 01D8 # 1.1 LATIN CAPITAL LETTER U WITH DIAERESIS AND ACUTE ++01D8 ; valid # 1.1 LATIN SMALL LETTER U WITH DIAERESIS AND ACUTE ++01D9 ; mapped ; 01DA # 1.1 LATIN CAPITAL LETTER U WITH DIAERESIS AND CARON ++01DA ; valid # 1.1 LATIN SMALL LETTER U WITH DIAERESIS AND CARON ++01DB ; mapped ; 01DC # 1.1 LATIN CAPITAL LETTER U WITH DIAERESIS AND GRAVE ++01DC..01DD ; valid # 1.1 LATIN SMALL LETTER U WITH DIAERESIS AND GRAVE..LATIN SMALL LETTER TURNED E ++01DE ; mapped ; 01DF # 1.1 LATIN CAPITAL LETTER A WITH DIAERESIS AND MACRON ++01DF ; valid # 1.1 LATIN SMALL LETTER A WITH DIAERESIS AND MACRON ++01E0 ; mapped ; 01E1 # 1.1 LATIN CAPITAL LETTER A WITH DOT ABOVE AND MACRON ++01E1 ; valid # 1.1 LATIN SMALL LETTER A WITH DOT ABOVE AND MACRON ++01E2 ; mapped ; 01E3 # 1.1 LATIN CAPITAL LETTER AE WITH MACRON ++01E3 ; valid # 1.1 LATIN SMALL LETTER AE WITH MACRON ++01E4 ; mapped ; 01E5 # 1.1 LATIN CAPITAL LETTER G WITH STROKE ++01E5 ; valid # 1.1 LATIN SMALL LETTER G WITH STROKE ++01E6 ; mapped ; 01E7 # 1.1 LATIN CAPITAL LETTER G WITH CARON ++01E7 ; valid # 1.1 LATIN SMALL LETTER G WITH CARON ++01E8 ; mapped ; 01E9 # 1.1 LATIN CAPITAL LETTER K WITH CARON ++01E9 ; valid # 1.1 LATIN SMALL LETTER K WITH CARON ++01EA ; mapped ; 01EB # 1.1 LATIN CAPITAL LETTER O WITH OGONEK ++01EB ; valid # 1.1 LATIN SMALL LETTER O WITH OGONEK ++01EC ; mapped ; 01ED # 1.1 LATIN CAPITAL LETTER O WITH OGONEK AND MACRON ++01ED ; valid # 1.1 LATIN SMALL LETTER O WITH OGONEK AND MACRON ++01EE ; mapped ; 01EF # 1.1 LATIN CAPITAL LETTER EZH WITH CARON ++01EF..01F0 ; valid # 1.1 LATIN SMALL LETTER EZH WITH CARON..LATIN SMALL LETTER J WITH CARON ++01F1..01F3 ; mapped ; 0064 007A # 1.1 LATIN CAPITAL LETTER DZ..LATIN SMALL LETTER DZ ++01F4 ; mapped ; 01F5 # 1.1 LATIN CAPITAL LETTER G WITH ACUTE ++01F5 ; valid # 1.1 LATIN SMALL LETTER G WITH ACUTE ++01F6 ; mapped ; 0195 # 3.0 LATIN CAPITAL LETTER HWAIR ++01F7 ; mapped ; 01BF # 3.0 LATIN CAPITAL LETTER WYNN ++01F8 ; mapped ; 01F9 # 3.0 LATIN CAPITAL LETTER N WITH GRAVE ++01F9 ; valid # 3.0 LATIN SMALL LETTER N WITH GRAVE ++01FA ; mapped ; 01FB # 1.1 LATIN CAPITAL LETTER A WITH RING ABOVE AND ACUTE ++01FB ; valid # 1.1 LATIN SMALL LETTER A WITH RING ABOVE AND ACUTE ++01FC ; mapped ; 01FD # 1.1 LATIN CAPITAL LETTER AE WITH ACUTE ++01FD ; valid # 1.1 LATIN SMALL LETTER AE WITH ACUTE ++01FE ; mapped ; 01FF # 1.1 LATIN CAPITAL LETTER O WITH STROKE AND ACUTE ++01FF ; valid # 1.1 LATIN SMALL LETTER O WITH STROKE AND ACUTE ++0200 ; mapped ; 0201 # 1.1 LATIN CAPITAL LETTER A WITH DOUBLE GRAVE ++0201 ; valid # 1.1 LATIN SMALL LETTER A WITH DOUBLE GRAVE ++0202 ; mapped ; 0203 # 1.1 LATIN CAPITAL LETTER A WITH INVERTED BREVE ++0203 ; valid # 1.1 LATIN SMALL LETTER A WITH INVERTED BREVE ++0204 ; mapped ; 0205 # 1.1 LATIN CAPITAL LETTER E WITH DOUBLE GRAVE ++0205 ; valid # 1.1 LATIN SMALL LETTER E WITH DOUBLE GRAVE ++0206 ; mapped ; 0207 # 1.1 LATIN CAPITAL LETTER E WITH INVERTED BREVE ++0207 ; valid # 1.1 LATIN SMALL LETTER E WITH INVERTED BREVE ++0208 ; mapped ; 0209 # 1.1 LATIN CAPITAL LETTER I WITH DOUBLE GRAVE ++0209 ; valid # 1.1 LATIN SMALL LETTER I WITH DOUBLE GRAVE ++020A ; mapped ; 020B # 1.1 LATIN CAPITAL LETTER I WITH INVERTED BREVE ++020B ; valid # 1.1 LATIN SMALL LETTER I WITH INVERTED BREVE ++020C ; mapped ; 020D # 1.1 LATIN CAPITAL LETTER O WITH DOUBLE GRAVE ++020D ; valid # 1.1 LATIN SMALL LETTER O WITH DOUBLE GRAVE ++020E ; mapped ; 020F # 1.1 LATIN CAPITAL LETTER O WITH INVERTED BREVE ++020F ; valid # 1.1 LATIN SMALL LETTER O WITH INVERTED BREVE ++0210 ; mapped ; 0211 # 1.1 LATIN CAPITAL LETTER R WITH DOUBLE GRAVE ++0211 ; valid # 1.1 LATIN SMALL LETTER R WITH DOUBLE GRAVE ++0212 ; mapped ; 0213 # 1.1 LATIN CAPITAL LETTER R WITH INVERTED BREVE ++0213 ; valid # 1.1 LATIN SMALL LETTER R WITH INVERTED BREVE ++0214 ; mapped ; 0215 # 1.1 LATIN CAPITAL LETTER U WITH DOUBLE GRAVE ++0215 ; valid # 1.1 LATIN SMALL LETTER U WITH DOUBLE GRAVE ++0216 ; mapped ; 0217 # 1.1 LATIN CAPITAL LETTER U WITH INVERTED BREVE ++0217 ; valid # 1.1 LATIN SMALL LETTER U WITH INVERTED BREVE ++0218 ; mapped ; 0219 # 3.0 LATIN CAPITAL LETTER S WITH COMMA BELOW ++0219 ; valid # 3.0 LATIN SMALL LETTER S WITH COMMA BELOW ++021A ; mapped ; 021B # 3.0 LATIN CAPITAL LETTER T WITH COMMA BELOW ++021B ; valid # 3.0 LATIN SMALL LETTER T WITH COMMA BELOW ++021C ; mapped ; 021D # 3.0 LATIN CAPITAL LETTER YOGH ++021D ; valid # 3.0 LATIN SMALL LETTER YOGH ++021E ; mapped ; 021F # 3.0 LATIN CAPITAL LETTER H WITH CARON ++021F ; valid # 3.0 LATIN SMALL LETTER H WITH CARON ++0220 ; mapped ; 019E # 3.2 LATIN CAPITAL LETTER N WITH LONG RIGHT LEG ++0221 ; valid # 4.0 LATIN SMALL LETTER D WITH CURL ++0222 ; mapped ; 0223 # 3.0 LATIN CAPITAL LETTER OU ++0223 ; valid # 3.0 LATIN SMALL LETTER OU ++0224 ; mapped ; 0225 # 3.0 LATIN CAPITAL LETTER Z WITH HOOK ++0225 ; valid # 3.0 LATIN SMALL LETTER Z WITH HOOK ++0226 ; mapped ; 0227 # 3.0 LATIN CAPITAL LETTER A WITH DOT ABOVE ++0227 ; valid # 3.0 LATIN SMALL LETTER A WITH DOT ABOVE ++0228 ; mapped ; 0229 # 3.0 LATIN CAPITAL LETTER E WITH CEDILLA ++0229 ; valid # 3.0 LATIN SMALL LETTER E WITH CEDILLA ++022A ; mapped ; 022B # 3.0 LATIN CAPITAL LETTER O WITH DIAERESIS AND MACRON ++022B ; valid # 3.0 LATIN SMALL LETTER O WITH DIAERESIS AND MACRON ++022C ; mapped ; 022D # 3.0 LATIN CAPITAL LETTER O WITH TILDE AND MACRON ++022D ; valid # 3.0 LATIN SMALL LETTER O WITH TILDE AND MACRON ++022E ; mapped ; 022F # 3.0 LATIN CAPITAL LETTER O WITH DOT ABOVE ++022F ; valid # 3.0 LATIN SMALL LETTER O WITH DOT ABOVE ++0230 ; mapped ; 0231 # 3.0 LATIN CAPITAL LETTER O WITH DOT ABOVE AND MACRON ++0231 ; valid # 3.0 LATIN SMALL LETTER O WITH DOT ABOVE AND MACRON ++0232 ; mapped ; 0233 # 3.0 LATIN CAPITAL LETTER Y WITH MACRON ++0233 ; valid # 3.0 LATIN SMALL LETTER Y WITH MACRON ++0234..0236 ; valid # 4.0 LATIN SMALL LETTER L WITH CURL..LATIN SMALL LETTER T WITH CURL ++0237..0239 ; valid # 4.1 LATIN SMALL LETTER DOTLESS J..LATIN SMALL LETTER QP DIGRAPH ++023A ; mapped ; 2C65 # 4.1 LATIN CAPITAL LETTER A WITH STROKE ++023B ; mapped ; 023C # 4.1 LATIN CAPITAL LETTER C WITH STROKE ++023C ; valid # 4.1 LATIN SMALL LETTER C WITH STROKE ++023D ; mapped ; 019A # 4.1 LATIN CAPITAL LETTER L WITH BAR ++023E ; mapped ; 2C66 # 4.1 LATIN CAPITAL LETTER T WITH DIAGONAL STROKE ++023F..0240 ; valid # 4.1 LATIN SMALL LETTER S WITH SWASH TAIL..LATIN SMALL LETTER Z WITH SWASH TAIL ++0241 ; mapped ; 0242 # 4.1 LATIN CAPITAL LETTER GLOTTAL STOP ++0242 ; valid # 5.0 LATIN SMALL LETTER GLOTTAL STOP ++0243 ; mapped ; 0180 # 5.0 LATIN CAPITAL LETTER B WITH STROKE ++0244 ; mapped ; 0289 # 5.0 LATIN CAPITAL LETTER U BAR ++0245 ; mapped ; 028C # 5.0 LATIN CAPITAL LETTER TURNED V ++0246 ; mapped ; 0247 # 5.0 LATIN CAPITAL LETTER E WITH STROKE ++0247 ; valid # 5.0 LATIN SMALL LETTER E WITH STROKE ++0248 ; mapped ; 0249 # 5.0 LATIN CAPITAL LETTER J WITH STROKE ++0249 ; valid # 5.0 LATIN SMALL LETTER J WITH STROKE ++024A ; mapped ; 024B # 5.0 LATIN CAPITAL LETTER SMALL Q WITH HOOK TAIL ++024B ; valid # 5.0 LATIN SMALL LETTER Q WITH HOOK TAIL ++024C ; mapped ; 024D # 5.0 LATIN CAPITAL LETTER R WITH STROKE ++024D ; valid # 5.0 LATIN SMALL LETTER R WITH STROKE ++024E ; mapped ; 024F # 5.0 LATIN CAPITAL LETTER Y WITH STROKE ++024F ; valid # 5.0 LATIN SMALL LETTER Y WITH STROKE ++0250..02A8 ; valid # 1.1 LATIN SMALL LETTER TURNED A..LATIN SMALL LETTER TC DIGRAPH WITH CURL ++02A9..02AD ; valid # 3.0 LATIN SMALL LETTER FENG DIGRAPH..LATIN LETTER BIDENTAL PERCUSSIVE ++02AE..02AF ; valid # 4.0 LATIN SMALL LETTER TURNED H WITH FISHHOOK..LATIN SMALL LETTER TURNED H WITH FISHHOOK AND TAIL ++02B0 ; mapped ; 0068 # 1.1 MODIFIER LETTER SMALL H ++02B1 ; mapped ; 0266 # 1.1 MODIFIER LETTER SMALL H WITH HOOK ++02B2 ; mapped ; 006A # 1.1 MODIFIER LETTER SMALL J ++02B3 ; mapped ; 0072 # 1.1 MODIFIER LETTER SMALL R ++02B4 ; mapped ; 0279 # 1.1 MODIFIER LETTER SMALL TURNED R ++02B5 ; mapped ; 027B # 1.1 MODIFIER LETTER SMALL TURNED R WITH HOOK ++02B6 ; mapped ; 0281 # 1.1 MODIFIER LETTER SMALL CAPITAL INVERTED R ++02B7 ; mapped ; 0077 # 1.1 MODIFIER LETTER SMALL W ++02B8 ; mapped ; 0079 # 1.1 MODIFIER LETTER SMALL Y ++02B9..02C1 ; valid # 1.1 MODIFIER LETTER PRIME..MODIFIER LETTER REVERSED GLOTTAL STOP ++02C2..02C5 ; valid ; ; NV8 # 1.1 MODIFIER LETTER LEFT ARROWHEAD..MODIFIER LETTER DOWN ARROWHEAD ++02C6..02D1 ; valid # 1.1 MODIFIER LETTER CIRCUMFLEX ACCENT..MODIFIER LETTER HALF TRIANGULAR COLON ++02D2..02D7 ; valid ; ; NV8 # 1.1 MODIFIER LETTER CENTRED RIGHT HALF RING..MODIFIER LETTER MINUS SIGN ++02D8 ; disallowed_STD3_mapped ; 0020 0306 # 1.1 BREVE ++02D9 ; disallowed_STD3_mapped ; 0020 0307 # 1.1 DOT ABOVE ++02DA ; disallowed_STD3_mapped ; 0020 030A # 1.1 RING ABOVE ++02DB ; disallowed_STD3_mapped ; 0020 0328 # 1.1 OGONEK ++02DC ; disallowed_STD3_mapped ; 0020 0303 # 1.1 SMALL TILDE ++02DD ; disallowed_STD3_mapped ; 0020 030B # 1.1 DOUBLE ACUTE ACCENT ++02DE ; valid ; ; NV8 # 1.1 MODIFIER LETTER RHOTIC HOOK ++02DF ; valid ; ; NV8 # 3.0 MODIFIER LETTER CROSS ACCENT ++02E0 ; mapped ; 0263 # 1.1 MODIFIER LETTER SMALL GAMMA ++02E1 ; mapped ; 006C # 1.1 MODIFIER LETTER SMALL L ++02E2 ; mapped ; 0073 # 1.1 MODIFIER LETTER SMALL S ++02E3 ; mapped ; 0078 # 1.1 MODIFIER LETTER SMALL X ++02E4 ; mapped ; 0295 # 1.1 MODIFIER LETTER SMALL REVERSED GLOTTAL STOP ++02E5..02E9 ; valid ; ; NV8 # 1.1 MODIFIER LETTER EXTRA-HIGH TONE BAR..MODIFIER LETTER EXTRA-LOW TONE BAR ++02EA..02EB ; valid ; ; NV8 # 3.0 MODIFIER LETTER YIN DEPARTING TONE MARK..MODIFIER LETTER YANG DEPARTING TONE MARK ++02EC ; valid # 3.0 MODIFIER LETTER VOICING ++02ED ; valid ; ; NV8 # 3.0 MODIFIER LETTER UNASPIRATED ++02EE ; valid # 3.0 MODIFIER LETTER DOUBLE APOSTROPHE ++02EF..02FF ; valid ; ; NV8 # 4.0 MODIFIER LETTER LOW DOWN ARROWHEAD..MODIFIER LETTER LOW LEFT ARROW ++0300..033F ; valid # 1.1 COMBINING GRAVE ACCENT..COMBINING DOUBLE OVERLINE ++0340 ; mapped ; 0300 # 1.1 COMBINING GRAVE TONE MARK ++0341 ; mapped ; 0301 # 1.1 COMBINING ACUTE TONE MARK ++0342 ; valid # 1.1 COMBINING GREEK PERISPOMENI ++0343 ; mapped ; 0313 # 1.1 COMBINING GREEK KORONIS ++0344 ; mapped ; 0308 0301 # 1.1 COMBINING GREEK DIALYTIKA TONOS ++0345 ; mapped ; 03B9 # 1.1 COMBINING GREEK YPOGEGRAMMENI ++0346..034E ; valid # 3.0 COMBINING BRIDGE ABOVE..COMBINING UPWARDS ARROW BELOW ++034F ; ignored # 3.2 COMBINING GRAPHEME JOINER ++0350..0357 ; valid # 4.0 COMBINING RIGHT ARROWHEAD ABOVE..COMBINING RIGHT HALF RING ABOVE ++0358..035C ; valid # 4.1 COMBINING DOT ABOVE RIGHT..COMBINING DOUBLE BREVE BELOW ++035D..035F ; valid # 4.0 COMBINING DOUBLE BREVE..COMBINING DOUBLE MACRON BELOW ++0360..0361 ; valid # 1.1 COMBINING DOUBLE TILDE..COMBINING DOUBLE INVERTED BREVE ++0362 ; valid # 3.0 COMBINING DOUBLE RIGHTWARDS ARROW BELOW ++0363..036F ; valid # 3.2 COMBINING LATIN SMALL LETTER A..COMBINING LATIN SMALL LETTER X ++0370 ; mapped ; 0371 # 5.1 GREEK CAPITAL LETTER HETA ++0371 ; valid # 5.1 GREEK SMALL LETTER HETA ++0372 ; mapped ; 0373 # 5.1 GREEK CAPITAL LETTER ARCHAIC SAMPI ++0373 ; valid # 5.1 GREEK SMALL LETTER ARCHAIC SAMPI ++0374 ; mapped ; 02B9 # 1.1 GREEK NUMERAL SIGN ++0375 ; valid # 1.1 GREEK LOWER NUMERAL SIGN ++0376 ; mapped ; 0377 # 5.1 GREEK CAPITAL LETTER PAMPHYLIAN DIGAMMA ++0377 ; valid # 5.1 GREEK SMALL LETTER PAMPHYLIAN DIGAMMA ++0378..0379 ; disallowed # NA .. ++037A ; disallowed_STD3_mapped ; 0020 03B9 # 1.1 GREEK YPOGEGRAMMENI ++037B..037D ; valid # 5.0 GREEK SMALL REVERSED LUNATE SIGMA SYMBOL..GREEK SMALL REVERSED DOTTED LUNATE SIGMA SYMBOL ++037E ; disallowed_STD3_mapped ; 003B # 1.1 GREEK QUESTION MARK ++037F ; mapped ; 03F3 # 7.0 GREEK CAPITAL LETTER YOT ++0380..0383 ; disallowed # NA .. ++0384 ; disallowed_STD3_mapped ; 0020 0301 # 1.1 GREEK TONOS ++0385 ; disallowed_STD3_mapped ; 0020 0308 0301 #1.1 GREEK DIALYTIKA TONOS ++0386 ; mapped ; 03AC # 1.1 GREEK CAPITAL LETTER ALPHA WITH TONOS ++0387 ; mapped ; 00B7 # 1.1 GREEK ANO TELEIA ++0388 ; mapped ; 03AD # 1.1 GREEK CAPITAL LETTER EPSILON WITH TONOS ++0389 ; mapped ; 03AE # 1.1 GREEK CAPITAL LETTER ETA WITH TONOS ++038A ; mapped ; 03AF # 1.1 GREEK CAPITAL LETTER IOTA WITH TONOS ++038B ; disallowed # NA ++038C ; mapped ; 03CC # 1.1 GREEK CAPITAL LETTER OMICRON WITH TONOS ++038D ; disallowed # NA ++038E ; mapped ; 03CD # 1.1 GREEK CAPITAL LETTER UPSILON WITH TONOS ++038F ; mapped ; 03CE # 1.1 GREEK CAPITAL LETTER OMEGA WITH TONOS ++0390 ; valid # 1.1 GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS ++0391 ; mapped ; 03B1 # 1.1 GREEK CAPITAL LETTER ALPHA ++0392 ; mapped ; 03B2 # 1.1 GREEK CAPITAL LETTER BETA ++0393 ; mapped ; 03B3 # 1.1 GREEK CAPITAL LETTER GAMMA ++0394 ; mapped ; 03B4 # 1.1 GREEK CAPITAL LETTER DELTA ++0395 ; mapped ; 03B5 # 1.1 GREEK CAPITAL LETTER EPSILON ++0396 ; mapped ; 03B6 # 1.1 GREEK CAPITAL LETTER ZETA ++0397 ; mapped ; 03B7 # 1.1 GREEK CAPITAL LETTER ETA ++0398 ; mapped ; 03B8 # 1.1 GREEK CAPITAL LETTER THETA ++0399 ; mapped ; 03B9 # 1.1 GREEK CAPITAL LETTER IOTA ++039A ; mapped ; 03BA # 1.1 GREEK CAPITAL LETTER KAPPA ++039B ; mapped ; 03BB # 1.1 GREEK CAPITAL LETTER LAMDA ++039C ; mapped ; 03BC # 1.1 GREEK CAPITAL LETTER MU ++039D ; mapped ; 03BD # 1.1 GREEK CAPITAL LETTER NU ++039E ; mapped ; 03BE # 1.1 GREEK CAPITAL LETTER XI ++039F ; mapped ; 03BF # 1.1 GREEK CAPITAL LETTER OMICRON ++03A0 ; mapped ; 03C0 # 1.1 GREEK CAPITAL LETTER PI ++03A1 ; mapped ; 03C1 # 1.1 GREEK CAPITAL LETTER RHO ++03A2 ; disallowed # NA ++03A3 ; mapped ; 03C3 # 1.1 GREEK CAPITAL LETTER SIGMA ++03A4 ; mapped ; 03C4 # 1.1 GREEK CAPITAL LETTER TAU ++03A5 ; mapped ; 03C5 # 1.1 GREEK CAPITAL LETTER UPSILON ++03A6 ; mapped ; 03C6 # 1.1 GREEK CAPITAL LETTER PHI ++03A7 ; mapped ; 03C7 # 1.1 GREEK CAPITAL LETTER CHI ++03A8 ; mapped ; 03C8 # 1.1 GREEK CAPITAL LETTER PSI ++03A9 ; mapped ; 03C9 # 1.1 GREEK CAPITAL LETTER OMEGA ++03AA ; mapped ; 03CA # 1.1 GREEK CAPITAL LETTER IOTA WITH DIALYTIKA ++03AB ; mapped ; 03CB # 1.1 GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA ++03AC..03C1 ; valid # 1.1 GREEK SMALL LETTER ALPHA WITH TONOS..GREEK SMALL LETTER RHO ++03C2 ; deviation ; 03C3 # 1.1 GREEK SMALL LETTER FINAL SIGMA ++03C3..03CE ; valid # 1.1 GREEK SMALL LETTER SIGMA..GREEK SMALL LETTER OMEGA WITH TONOS ++03CF ; mapped ; 03D7 # 5.1 GREEK CAPITAL KAI SYMBOL ++03D0 ; mapped ; 03B2 # 1.1 GREEK BETA SYMBOL ++03D1 ; mapped ; 03B8 # 1.1 GREEK THETA SYMBOL ++03D2 ; mapped ; 03C5 # 1.1 GREEK UPSILON WITH HOOK SYMBOL ++03D3 ; mapped ; 03CD # 1.1 GREEK UPSILON WITH ACUTE AND HOOK SYMBOL ++03D4 ; mapped ; 03CB # 1.1 GREEK UPSILON WITH DIAERESIS AND HOOK SYMBOL ++03D5 ; mapped ; 03C6 # 1.1 GREEK PHI SYMBOL ++03D6 ; mapped ; 03C0 # 1.1 GREEK PI SYMBOL ++03D7 ; valid # 3.0 GREEK KAI SYMBOL ++03D8 ; mapped ; 03D9 # 3.2 GREEK LETTER ARCHAIC KOPPA ++03D9 ; valid # 3.2 GREEK SMALL LETTER ARCHAIC KOPPA ++03DA ; mapped ; 03DB # 1.1 GREEK LETTER STIGMA ++03DB ; valid # 3.0 GREEK SMALL LETTER STIGMA ++03DC ; mapped ; 03DD # 1.1 GREEK LETTER DIGAMMA ++03DD ; valid # 3.0 GREEK SMALL LETTER DIGAMMA ++03DE ; mapped ; 03DF # 1.1 GREEK LETTER KOPPA ++03DF ; valid # 3.0 GREEK SMALL LETTER KOPPA ++03E0 ; mapped ; 03E1 # 1.1 GREEK LETTER SAMPI ++03E1 ; valid # 3.0 GREEK SMALL LETTER SAMPI ++03E2 ; mapped ; 03E3 # 1.1 COPTIC CAPITAL LETTER SHEI ++03E3 ; valid # 1.1 COPTIC SMALL LETTER SHEI ++03E4 ; mapped ; 03E5 # 1.1 COPTIC CAPITAL LETTER FEI ++03E5 ; valid # 1.1 COPTIC SMALL LETTER FEI ++03E6 ; mapped ; 03E7 # 1.1 COPTIC CAPITAL LETTER KHEI ++03E7 ; valid # 1.1 COPTIC SMALL LETTER KHEI ++03E8 ; mapped ; 03E9 # 1.1 COPTIC CAPITAL LETTER HORI ++03E9 ; valid # 1.1 COPTIC SMALL LETTER HORI ++03EA ; mapped ; 03EB # 1.1 COPTIC CAPITAL LETTER GANGIA ++03EB ; valid # 1.1 COPTIC SMALL LETTER GANGIA ++03EC ; mapped ; 03ED # 1.1 COPTIC CAPITAL LETTER SHIMA ++03ED ; valid # 1.1 COPTIC SMALL LETTER SHIMA ++03EE ; mapped ; 03EF # 1.1 COPTIC CAPITAL LETTER DEI ++03EF ; valid # 1.1 COPTIC SMALL LETTER DEI ++03F0 ; mapped ; 03BA # 1.1 GREEK KAPPA SYMBOL ++03F1 ; mapped ; 03C1 # 1.1 GREEK RHO SYMBOL ++03F2 ; mapped ; 03C3 # 1.1 GREEK LUNATE SIGMA SYMBOL ++03F3 ; valid # 1.1 GREEK LETTER YOT ++03F4 ; mapped ; 03B8 # 3.1 GREEK CAPITAL THETA SYMBOL ++03F5 ; mapped ; 03B5 # 3.1 GREEK LUNATE EPSILON SYMBOL ++03F6 ; valid ; ; NV8 # 3.2 GREEK REVERSED LUNATE EPSILON SYMBOL ++03F7 ; mapped ; 03F8 # 4.0 GREEK CAPITAL LETTER SHO ++03F8 ; valid # 4.0 GREEK SMALL LETTER SHO ++03F9 ; mapped ; 03C3 # 4.0 GREEK CAPITAL LUNATE SIGMA SYMBOL ++03FA ; mapped ; 03FB # 4.0 GREEK CAPITAL LETTER SAN ++03FB ; valid # 4.0 GREEK SMALL LETTER SAN ++03FC ; valid # 4.1 GREEK RHO WITH STROKE SYMBOL ++03FD ; mapped ; 037B # 4.1 GREEK CAPITAL REVERSED LUNATE SIGMA SYMBOL ++03FE ; mapped ; 037C # 4.1 GREEK CAPITAL DOTTED LUNATE SIGMA SYMBOL ++03FF ; mapped ; 037D # 4.1 GREEK CAPITAL REVERSED DOTTED LUNATE SIGMA SYMBOL ++0400 ; mapped ; 0450 # 3.0 CYRILLIC CAPITAL LETTER IE WITH GRAVE ++0401 ; mapped ; 0451 # 1.1 CYRILLIC CAPITAL LETTER IO ++0402 ; mapped ; 0452 # 1.1 CYRILLIC CAPITAL LETTER DJE ++0403 ; mapped ; 0453 # 1.1 CYRILLIC CAPITAL LETTER GJE ++0404 ; mapped ; 0454 # 1.1 CYRILLIC CAPITAL LETTER UKRAINIAN IE ++0405 ; mapped ; 0455 # 1.1 CYRILLIC CAPITAL LETTER DZE ++0406 ; mapped ; 0456 # 1.1 CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I ++0407 ; mapped ; 0457 # 1.1 CYRILLIC CAPITAL LETTER YI ++0408 ; mapped ; 0458 # 1.1 CYRILLIC CAPITAL LETTER JE ++0409 ; mapped ; 0459 # 1.1 CYRILLIC CAPITAL LETTER LJE ++040A ; mapped ; 045A # 1.1 CYRILLIC CAPITAL LETTER NJE ++040B ; mapped ; 045B # 1.1 CYRILLIC CAPITAL LETTER TSHE ++040C ; mapped ; 045C # 1.1 CYRILLIC CAPITAL LETTER KJE ++040D ; mapped ; 045D # 3.0 CYRILLIC CAPITAL LETTER I WITH GRAVE ++040E ; mapped ; 045E # 1.1 CYRILLIC CAPITAL LETTER SHORT U ++040F ; mapped ; 045F # 1.1 CYRILLIC CAPITAL LETTER DZHE ++0410 ; mapped ; 0430 # 1.1 CYRILLIC CAPITAL LETTER A ++0411 ; mapped ; 0431 # 1.1 CYRILLIC CAPITAL LETTER BE ++0412 ; mapped ; 0432 # 1.1 CYRILLIC CAPITAL LETTER VE ++0413 ; mapped ; 0433 # 1.1 CYRILLIC CAPITAL LETTER GHE ++0414 ; mapped ; 0434 # 1.1 CYRILLIC CAPITAL LETTER DE ++0415 ; mapped ; 0435 # 1.1 CYRILLIC CAPITAL LETTER IE ++0416 ; mapped ; 0436 # 1.1 CYRILLIC CAPITAL LETTER ZHE ++0417 ; mapped ; 0437 # 1.1 CYRILLIC CAPITAL LETTER ZE ++0418 ; mapped ; 0438 # 1.1 CYRILLIC CAPITAL LETTER I ++0419 ; mapped ; 0439 # 1.1 CYRILLIC CAPITAL LETTER SHORT I ++041A ; mapped ; 043A # 1.1 CYRILLIC CAPITAL LETTER KA ++041B ; mapped ; 043B # 1.1 CYRILLIC CAPITAL LETTER EL ++041C ; mapped ; 043C # 1.1 CYRILLIC CAPITAL LETTER EM ++041D ; mapped ; 043D # 1.1 CYRILLIC CAPITAL LETTER EN ++041E ; mapped ; 043E # 1.1 CYRILLIC CAPITAL LETTER O ++041F ; mapped ; 043F # 1.1 CYRILLIC CAPITAL LETTER PE ++0420 ; mapped ; 0440 # 1.1 CYRILLIC CAPITAL LETTER ER ++0421 ; mapped ; 0441 # 1.1 CYRILLIC CAPITAL LETTER ES ++0422 ; mapped ; 0442 # 1.1 CYRILLIC CAPITAL LETTER TE ++0423 ; mapped ; 0443 # 1.1 CYRILLIC CAPITAL LETTER U ++0424 ; mapped ; 0444 # 1.1 CYRILLIC CAPITAL LETTER EF ++0425 ; mapped ; 0445 # 1.1 CYRILLIC CAPITAL LETTER HA ++0426 ; mapped ; 0446 # 1.1 CYRILLIC CAPITAL LETTER TSE ++0427 ; mapped ; 0447 # 1.1 CYRILLIC CAPITAL LETTER CHE ++0428 ; mapped ; 0448 # 1.1 CYRILLIC CAPITAL LETTER SHA ++0429 ; mapped ; 0449 # 1.1 CYRILLIC CAPITAL LETTER SHCHA ++042A ; mapped ; 044A # 1.1 CYRILLIC CAPITAL LETTER HARD SIGN ++042B ; mapped ; 044B # 1.1 CYRILLIC CAPITAL LETTER YERU ++042C ; mapped ; 044C # 1.1 CYRILLIC CAPITAL LETTER SOFT SIGN ++042D ; mapped ; 044D # 1.1 CYRILLIC CAPITAL LETTER E ++042E ; mapped ; 044E # 1.1 CYRILLIC CAPITAL LETTER YU ++042F ; mapped ; 044F # 1.1 CYRILLIC CAPITAL LETTER YA ++0430..044F ; valid # 1.1 CYRILLIC SMALL LETTER A..CYRILLIC SMALL LETTER YA ++0450 ; valid # 3.0 CYRILLIC SMALL LETTER IE WITH GRAVE ++0451..045C ; valid # 1.1 CYRILLIC SMALL LETTER IO..CYRILLIC SMALL LETTER KJE ++045D ; valid # 3.0 CYRILLIC SMALL LETTER I WITH GRAVE ++045E..045F ; valid # 1.1 CYRILLIC SMALL LETTER SHORT U..CYRILLIC SMALL LETTER DZHE ++0460 ; mapped ; 0461 # 1.1 CYRILLIC CAPITAL LETTER OMEGA ++0461 ; valid # 1.1 CYRILLIC SMALL LETTER OMEGA ++0462 ; mapped ; 0463 # 1.1 CYRILLIC CAPITAL LETTER YAT ++0463 ; valid # 1.1 CYRILLIC SMALL LETTER YAT ++0464 ; mapped ; 0465 # 1.1 CYRILLIC CAPITAL LETTER IOTIFIED E ++0465 ; valid # 1.1 CYRILLIC SMALL LETTER IOTIFIED E ++0466 ; mapped ; 0467 # 1.1 CYRILLIC CAPITAL LETTER LITTLE YUS ++0467 ; valid # 1.1 CYRILLIC SMALL LETTER LITTLE YUS ++0468 ; mapped ; 0469 # 1.1 CYRILLIC CAPITAL LETTER IOTIFIED LITTLE YUS ++0469 ; valid # 1.1 CYRILLIC SMALL LETTER IOTIFIED LITTLE YUS ++046A ; mapped ; 046B # 1.1 CYRILLIC CAPITAL LETTER BIG YUS ++046B ; valid # 1.1 CYRILLIC SMALL LETTER BIG YUS ++046C ; mapped ; 046D # 1.1 CYRILLIC CAPITAL LETTER IOTIFIED BIG YUS ++046D ; valid # 1.1 CYRILLIC SMALL LETTER IOTIFIED BIG YUS ++046E ; mapped ; 046F # 1.1 CYRILLIC CAPITAL LETTER KSI ++046F ; valid # 1.1 CYRILLIC SMALL LETTER KSI ++0470 ; mapped ; 0471 # 1.1 CYRILLIC CAPITAL LETTER PSI ++0471 ; valid # 1.1 CYRILLIC SMALL LETTER PSI ++0472 ; mapped ; 0473 # 1.1 CYRILLIC CAPITAL LETTER FITA ++0473 ; valid # 1.1 CYRILLIC SMALL LETTER FITA ++0474 ; mapped ; 0475 # 1.1 CYRILLIC CAPITAL LETTER IZHITSA ++0475 ; valid # 1.1 CYRILLIC SMALL LETTER IZHITSA ++0476 ; mapped ; 0477 # 1.1 CYRILLIC CAPITAL LETTER IZHITSA WITH DOUBLE GRAVE ACCENT ++0477 ; valid # 1.1 CYRILLIC SMALL LETTER IZHITSA WITH DOUBLE GRAVE ACCENT ++0478 ; mapped ; 0479 # 1.1 CYRILLIC CAPITAL LETTER UK ++0479 ; valid # 1.1 CYRILLIC SMALL LETTER UK ++047A ; mapped ; 047B # 1.1 CYRILLIC CAPITAL LETTER ROUND OMEGA ++047B ; valid # 1.1 CYRILLIC SMALL LETTER ROUND OMEGA ++047C ; mapped ; 047D # 1.1 CYRILLIC CAPITAL LETTER OMEGA WITH TITLO ++047D ; valid # 1.1 CYRILLIC SMALL LETTER OMEGA WITH TITLO ++047E ; mapped ; 047F # 1.1 CYRILLIC CAPITAL LETTER OT ++047F ; valid # 1.1 CYRILLIC SMALL LETTER OT ++0480 ; mapped ; 0481 # 1.1 CYRILLIC CAPITAL LETTER KOPPA ++0481 ; valid # 1.1 CYRILLIC SMALL LETTER KOPPA ++0482 ; valid ; ; NV8 # 1.1 CYRILLIC THOUSANDS SIGN ++0483..0486 ; valid # 1.1 COMBINING CYRILLIC TITLO..COMBINING CYRILLIC PSILI PNEUMATA ++0487 ; valid # 5.1 COMBINING CYRILLIC POKRYTIE ++0488..0489 ; valid ; ; NV8 # 3.0 COMBINING CYRILLIC HUNDRED THOUSANDS SIGN..COMBINING CYRILLIC MILLIONS SIGN ++048A ; mapped ; 048B # 3.2 CYRILLIC CAPITAL LETTER SHORT I WITH TAIL ++048B ; valid # 3.2 CYRILLIC SMALL LETTER SHORT I WITH TAIL ++048C ; mapped ; 048D # 3.0 CYRILLIC CAPITAL LETTER SEMISOFT SIGN ++048D ; valid # 3.0 CYRILLIC SMALL LETTER SEMISOFT SIGN ++048E ; mapped ; 048F # 3.0 CYRILLIC CAPITAL LETTER ER WITH TICK ++048F ; valid # 3.0 CYRILLIC SMALL LETTER ER WITH TICK ++0490 ; mapped ; 0491 # 1.1 CYRILLIC CAPITAL LETTER GHE WITH UPTURN ++0491 ; valid # 1.1 CYRILLIC SMALL LETTER GHE WITH UPTURN ++0492 ; mapped ; 0493 # 1.1 CYRILLIC CAPITAL LETTER GHE WITH STROKE ++0493 ; valid # 1.1 CYRILLIC SMALL LETTER GHE WITH STROKE ++0494 ; mapped ; 0495 # 1.1 CYRILLIC CAPITAL LETTER GHE WITH MIDDLE HOOK ++0495 ; valid # 1.1 CYRILLIC SMALL LETTER GHE WITH MIDDLE HOOK ++0496 ; mapped ; 0497 # 1.1 CYRILLIC CAPITAL LETTER ZHE WITH DESCENDER ++0497 ; valid # 1.1 CYRILLIC SMALL LETTER ZHE WITH DESCENDER ++0498 ; mapped ; 0499 # 1.1 CYRILLIC CAPITAL LETTER ZE WITH DESCENDER ++0499 ; valid # 1.1 CYRILLIC SMALL LETTER ZE WITH DESCENDER ++049A ; mapped ; 049B # 1.1 CYRILLIC CAPITAL LETTER KA WITH DESCENDER ++049B ; valid # 1.1 CYRILLIC SMALL LETTER KA WITH DESCENDER ++049C ; mapped ; 049D # 1.1 CYRILLIC CAPITAL LETTER KA WITH VERTICAL STROKE ++049D ; valid # 1.1 CYRILLIC SMALL LETTER KA WITH VERTICAL STROKE ++049E ; mapped ; 049F # 1.1 CYRILLIC CAPITAL LETTER KA WITH STROKE ++049F ; valid # 1.1 CYRILLIC SMALL LETTER KA WITH STROKE ++04A0 ; mapped ; 04A1 # 1.1 CYRILLIC CAPITAL LETTER BASHKIR KA ++04A1 ; valid # 1.1 CYRILLIC SMALL LETTER BASHKIR KA ++04A2 ; mapped ; 04A3 # 1.1 CYRILLIC CAPITAL LETTER EN WITH DESCENDER ++04A3 ; valid # 1.1 CYRILLIC SMALL LETTER EN WITH DESCENDER ++04A4 ; mapped ; 04A5 # 1.1 CYRILLIC CAPITAL LIGATURE EN GHE ++04A5 ; valid # 1.1 CYRILLIC SMALL LIGATURE EN GHE ++04A6 ; mapped ; 04A7 # 1.1 CYRILLIC CAPITAL LETTER PE WITH MIDDLE HOOK ++04A7 ; valid # 1.1 CYRILLIC SMALL LETTER PE WITH MIDDLE HOOK ++04A8 ; mapped ; 04A9 # 1.1 CYRILLIC CAPITAL LETTER ABKHASIAN HA ++04A9 ; valid # 1.1 CYRILLIC SMALL LETTER ABKHASIAN HA ++04AA ; mapped ; 04AB # 1.1 CYRILLIC CAPITAL LETTER ES WITH DESCENDER ++04AB ; valid # 1.1 CYRILLIC SMALL LETTER ES WITH DESCENDER ++04AC ; mapped ; 04AD # 1.1 CYRILLIC CAPITAL LETTER TE WITH DESCENDER ++04AD ; valid # 1.1 CYRILLIC SMALL LETTER TE WITH DESCENDER ++04AE ; mapped ; 04AF # 1.1 CYRILLIC CAPITAL LETTER STRAIGHT U ++04AF ; valid # 1.1 CYRILLIC SMALL LETTER STRAIGHT U ++04B0 ; mapped ; 04B1 # 1.1 CYRILLIC CAPITAL LETTER STRAIGHT U WITH STROKE ++04B1 ; valid # 1.1 CYRILLIC SMALL LETTER STRAIGHT U WITH STROKE ++04B2 ; mapped ; 04B3 # 1.1 CYRILLIC CAPITAL LETTER HA WITH DESCENDER ++04B3 ; valid # 1.1 CYRILLIC SMALL LETTER HA WITH DESCENDER ++04B4 ; mapped ; 04B5 # 1.1 CYRILLIC CAPITAL LIGATURE TE TSE ++04B5 ; valid # 1.1 CYRILLIC SMALL LIGATURE TE TSE ++04B6 ; mapped ; 04B7 # 1.1 CYRILLIC CAPITAL LETTER CHE WITH DESCENDER ++04B7 ; valid # 1.1 CYRILLIC SMALL LETTER CHE WITH DESCENDER ++04B8 ; mapped ; 04B9 # 1.1 CYRILLIC CAPITAL LETTER CHE WITH VERTICAL STROKE ++04B9 ; valid # 1.1 CYRILLIC SMALL LETTER CHE WITH VERTICAL STROKE ++04BA ; mapped ; 04BB # 1.1 CYRILLIC CAPITAL LETTER SHHA ++04BB ; valid # 1.1 CYRILLIC SMALL LETTER SHHA ++04BC ; mapped ; 04BD # 1.1 CYRILLIC CAPITAL LETTER ABKHASIAN CHE ++04BD ; valid # 1.1 CYRILLIC SMALL LETTER ABKHASIAN CHE ++04BE ; mapped ; 04BF # 1.1 CYRILLIC CAPITAL LETTER ABKHASIAN CHE WITH DESCENDER ++04BF ; valid # 1.1 CYRILLIC SMALL LETTER ABKHASIAN CHE WITH DESCENDER ++04C0 ; disallowed # 1.1 CYRILLIC LETTER PALOCHKA ++04C1 ; mapped ; 04C2 # 1.1 CYRILLIC CAPITAL LETTER ZHE WITH BREVE ++04C2 ; valid # 1.1 CYRILLIC SMALL LETTER ZHE WITH BREVE ++04C3 ; mapped ; 04C4 # 1.1 CYRILLIC CAPITAL LETTER KA WITH HOOK ++04C4 ; valid # 1.1 CYRILLIC SMALL LETTER KA WITH HOOK ++04C5 ; mapped ; 04C6 # 3.2 CYRILLIC CAPITAL LETTER EL WITH TAIL ++04C6 ; valid # 3.2 CYRILLIC SMALL LETTER EL WITH TAIL ++04C7 ; mapped ; 04C8 # 1.1 CYRILLIC CAPITAL LETTER EN WITH HOOK ++04C8 ; valid # 1.1 CYRILLIC SMALL LETTER EN WITH HOOK ++04C9 ; mapped ; 04CA # 3.2 CYRILLIC CAPITAL LETTER EN WITH TAIL ++04CA ; valid # 3.2 CYRILLIC SMALL LETTER EN WITH TAIL ++04CB ; mapped ; 04CC # 1.1 CYRILLIC CAPITAL LETTER KHAKASSIAN CHE ++04CC ; valid # 1.1 CYRILLIC SMALL LETTER KHAKASSIAN CHE ++04CD ; mapped ; 04CE # 3.2 CYRILLIC CAPITAL LETTER EM WITH TAIL ++04CE ; valid # 3.2 CYRILLIC SMALL LETTER EM WITH TAIL ++04CF ; valid # 5.0 CYRILLIC SMALL LETTER PALOCHKA ++04D0 ; mapped ; 04D1 # 1.1 CYRILLIC CAPITAL LETTER A WITH BREVE ++04D1 ; valid # 1.1 CYRILLIC SMALL LETTER A WITH BREVE ++04D2 ; mapped ; 04D3 # 1.1 CYRILLIC CAPITAL LETTER A WITH DIAERESIS ++04D3 ; valid # 1.1 CYRILLIC SMALL LETTER A WITH DIAERESIS ++04D4 ; mapped ; 04D5 # 1.1 CYRILLIC CAPITAL LIGATURE A IE ++04D5 ; valid # 1.1 CYRILLIC SMALL LIGATURE A IE ++04D6 ; mapped ; 04D7 # 1.1 CYRILLIC CAPITAL LETTER IE WITH BREVE ++04D7 ; valid # 1.1 CYRILLIC SMALL LETTER IE WITH BREVE ++04D8 ; mapped ; 04D9 # 1.1 CYRILLIC CAPITAL LETTER SCHWA ++04D9 ; valid # 1.1 CYRILLIC SMALL LETTER SCHWA ++04DA ; mapped ; 04DB # 1.1 CYRILLIC CAPITAL LETTER SCHWA WITH DIAERESIS ++04DB ; valid # 1.1 CYRILLIC SMALL LETTER SCHWA WITH DIAERESIS ++04DC ; mapped ; 04DD # 1.1 CYRILLIC CAPITAL LETTER ZHE WITH DIAERESIS ++04DD ; valid # 1.1 CYRILLIC SMALL LETTER ZHE WITH DIAERESIS ++04DE ; mapped ; 04DF # 1.1 CYRILLIC CAPITAL LETTER ZE WITH DIAERESIS ++04DF ; valid # 1.1 CYRILLIC SMALL LETTER ZE WITH DIAERESIS ++04E0 ; mapped ; 04E1 # 1.1 CYRILLIC CAPITAL LETTER ABKHASIAN DZE ++04E1 ; valid # 1.1 CYRILLIC SMALL LETTER ABKHASIAN DZE ++04E2 ; mapped ; 04E3 # 1.1 CYRILLIC CAPITAL LETTER I WITH MACRON ++04E3 ; valid # 1.1 CYRILLIC SMALL LETTER I WITH MACRON ++04E4 ; mapped ; 04E5 # 1.1 CYRILLIC CAPITAL LETTER I WITH DIAERESIS ++04E5 ; valid # 1.1 CYRILLIC SMALL LETTER I WITH DIAERESIS ++04E6 ; mapped ; 04E7 # 1.1 CYRILLIC CAPITAL LETTER O WITH DIAERESIS ++04E7 ; valid # 1.1 CYRILLIC SMALL LETTER O WITH DIAERESIS ++04E8 ; mapped ; 04E9 # 1.1 CYRILLIC CAPITAL LETTER BARRED O ++04E9 ; valid # 1.1 CYRILLIC SMALL LETTER BARRED O ++04EA ; mapped ; 04EB # 1.1 CYRILLIC CAPITAL LETTER BARRED O WITH DIAERESIS ++04EB ; valid # 1.1 CYRILLIC SMALL LETTER BARRED O WITH DIAERESIS ++04EC ; mapped ; 04ED # 3.0 CYRILLIC CAPITAL LETTER E WITH DIAERESIS ++04ED ; valid # 3.0 CYRILLIC SMALL LETTER E WITH DIAERESIS ++04EE ; mapped ; 04EF # 1.1 CYRILLIC CAPITAL LETTER U WITH MACRON ++04EF ; valid # 1.1 CYRILLIC SMALL LETTER U WITH MACRON ++04F0 ; mapped ; 04F1 # 1.1 CYRILLIC CAPITAL LETTER U WITH DIAERESIS ++04F1 ; valid # 1.1 CYRILLIC SMALL LETTER U WITH DIAERESIS ++04F2 ; mapped ; 04F3 # 1.1 CYRILLIC CAPITAL LETTER U WITH DOUBLE ACUTE ++04F3 ; valid # 1.1 CYRILLIC SMALL LETTER U WITH DOUBLE ACUTE ++04F4 ; mapped ; 04F5 # 1.1 CYRILLIC CAPITAL LETTER CHE WITH DIAERESIS ++04F5 ; valid # 1.1 CYRILLIC SMALL LETTER CHE WITH DIAERESIS ++04F6 ; mapped ; 04F7 # 4.1 CYRILLIC CAPITAL LETTER GHE WITH DESCENDER ++04F7 ; valid # 4.1 CYRILLIC SMALL LETTER GHE WITH DESCENDER ++04F8 ; mapped ; 04F9 # 1.1 CYRILLIC CAPITAL LETTER YERU WITH DIAERESIS ++04F9 ; valid # 1.1 CYRILLIC SMALL LETTER YERU WITH DIAERESIS ++04FA ; mapped ; 04FB # 5.0 CYRILLIC CAPITAL LETTER GHE WITH STROKE AND HOOK ++04FB ; valid # 5.0 CYRILLIC SMALL LETTER GHE WITH STROKE AND HOOK ++04FC ; mapped ; 04FD # 5.0 CYRILLIC CAPITAL LETTER HA WITH HOOK ++04FD ; valid # 5.0 CYRILLIC SMALL LETTER HA WITH HOOK ++04FE ; mapped ; 04FF # 5.0 CYRILLIC CAPITAL LETTER HA WITH STROKE ++04FF ; valid # 5.0 CYRILLIC SMALL LETTER HA WITH STROKE ++0500 ; mapped ; 0501 # 3.2 CYRILLIC CAPITAL LETTER KOMI DE ++0501 ; valid # 3.2 CYRILLIC SMALL LETTER KOMI DE ++0502 ; mapped ; 0503 # 3.2 CYRILLIC CAPITAL LETTER KOMI DJE ++0503 ; valid # 3.2 CYRILLIC SMALL LETTER KOMI DJE ++0504 ; mapped ; 0505 # 3.2 CYRILLIC CAPITAL LETTER KOMI ZJE ++0505 ; valid # 3.2 CYRILLIC SMALL LETTER KOMI ZJE ++0506 ; mapped ; 0507 # 3.2 CYRILLIC CAPITAL LETTER KOMI DZJE ++0507 ; valid # 3.2 CYRILLIC SMALL LETTER KOMI DZJE ++0508 ; mapped ; 0509 # 3.2 CYRILLIC CAPITAL LETTER KOMI LJE ++0509 ; valid # 3.2 CYRILLIC SMALL LETTER KOMI LJE ++050A ; mapped ; 050B # 3.2 CYRILLIC CAPITAL LETTER KOMI NJE ++050B ; valid # 3.2 CYRILLIC SMALL LETTER KOMI NJE ++050C ; mapped ; 050D # 3.2 CYRILLIC CAPITAL LETTER KOMI SJE ++050D ; valid # 3.2 CYRILLIC SMALL LETTER KOMI SJE ++050E ; mapped ; 050F # 3.2 CYRILLIC CAPITAL LETTER KOMI TJE ++050F ; valid # 3.2 CYRILLIC SMALL LETTER KOMI TJE ++0510 ; mapped ; 0511 # 5.0 CYRILLIC CAPITAL LETTER REVERSED ZE ++0511 ; valid # 5.0 CYRILLIC SMALL LETTER REVERSED ZE ++0512 ; mapped ; 0513 # 5.0 CYRILLIC CAPITAL LETTER EL WITH HOOK ++0513 ; valid # 5.0 CYRILLIC SMALL LETTER EL WITH HOOK ++0514 ; mapped ; 0515 # 5.1 CYRILLIC CAPITAL LETTER LHA ++0515 ; valid # 5.1 CYRILLIC SMALL LETTER LHA ++0516 ; mapped ; 0517 # 5.1 CYRILLIC CAPITAL LETTER RHA ++0517 ; valid # 5.1 CYRILLIC SMALL LETTER RHA ++0518 ; mapped ; 0519 # 5.1 CYRILLIC CAPITAL LETTER YAE ++0519 ; valid # 5.1 CYRILLIC SMALL LETTER YAE ++051A ; mapped ; 051B # 5.1 CYRILLIC CAPITAL LETTER QA ++051B ; valid # 5.1 CYRILLIC SMALL LETTER QA ++051C ; mapped ; 051D # 5.1 CYRILLIC CAPITAL LETTER WE ++051D ; valid # 5.1 CYRILLIC SMALL LETTER WE ++051E ; mapped ; 051F # 5.1 CYRILLIC CAPITAL LETTER ALEUT KA ++051F ; valid # 5.1 CYRILLIC SMALL LETTER ALEUT KA ++0520 ; mapped ; 0521 # 5.1 CYRILLIC CAPITAL LETTER EL WITH MIDDLE HOOK ++0521 ; valid # 5.1 CYRILLIC SMALL LETTER EL WITH MIDDLE HOOK ++0522 ; mapped ; 0523 # 5.1 CYRILLIC CAPITAL LETTER EN WITH MIDDLE HOOK ++0523 ; valid # 5.1 CYRILLIC SMALL LETTER EN WITH MIDDLE HOOK ++0524 ; mapped ; 0525 # 5.2 CYRILLIC CAPITAL LETTER PE WITH DESCENDER ++0525 ; valid # 5.2 CYRILLIC SMALL LETTER PE WITH DESCENDER ++0526 ; mapped ; 0527 # 6.0 CYRILLIC CAPITAL LETTER SHHA WITH DESCENDER ++0527 ; valid # 6.0 CYRILLIC SMALL LETTER SHHA WITH DESCENDER ++0528 ; mapped ; 0529 # 7.0 CYRILLIC CAPITAL LETTER EN WITH LEFT HOOK ++0529 ; valid # 7.0 CYRILLIC SMALL LETTER EN WITH LEFT HOOK ++052A ; mapped ; 052B # 7.0 CYRILLIC CAPITAL LETTER DZZHE ++052B ; valid # 7.0 CYRILLIC SMALL LETTER DZZHE ++052C ; mapped ; 052D # 7.0 CYRILLIC CAPITAL LETTER DCHE ++052D ; valid # 7.0 CYRILLIC SMALL LETTER DCHE ++052E ; mapped ; 052F # 7.0 CYRILLIC CAPITAL LETTER EL WITH DESCENDER ++052F ; valid # 7.0 CYRILLIC SMALL LETTER EL WITH DESCENDER ++0530 ; disallowed # NA ++0531 ; mapped ; 0561 # 1.1 ARMENIAN CAPITAL LETTER AYB ++0532 ; mapped ; 0562 # 1.1 ARMENIAN CAPITAL LETTER BEN ++0533 ; mapped ; 0563 # 1.1 ARMENIAN CAPITAL LETTER GIM ++0534 ; mapped ; 0564 # 1.1 ARMENIAN CAPITAL LETTER DA ++0535 ; mapped ; 0565 # 1.1 ARMENIAN CAPITAL LETTER ECH ++0536 ; mapped ; 0566 # 1.1 ARMENIAN CAPITAL LETTER ZA ++0537 ; mapped ; 0567 # 1.1 ARMENIAN CAPITAL LETTER EH ++0538 ; mapped ; 0568 # 1.1 ARMENIAN CAPITAL LETTER ET ++0539 ; mapped ; 0569 # 1.1 ARMENIAN CAPITAL LETTER TO ++053A ; mapped ; 056A # 1.1 ARMENIAN CAPITAL LETTER ZHE ++053B ; mapped ; 056B # 1.1 ARMENIAN CAPITAL LETTER INI ++053C ; mapped ; 056C # 1.1 ARMENIAN CAPITAL LETTER LIWN ++053D ; mapped ; 056D # 1.1 ARMENIAN CAPITAL LETTER XEH ++053E ; mapped ; 056E # 1.1 ARMENIAN CAPITAL LETTER CA ++053F ; mapped ; 056F # 1.1 ARMENIAN CAPITAL LETTER KEN ++0540 ; mapped ; 0570 # 1.1 ARMENIAN CAPITAL LETTER HO ++0541 ; mapped ; 0571 # 1.1 ARMENIAN CAPITAL LETTER JA ++0542 ; mapped ; 0572 # 1.1 ARMENIAN CAPITAL LETTER GHAD ++0543 ; mapped ; 0573 # 1.1 ARMENIAN CAPITAL LETTER CHEH ++0544 ; mapped ; 0574 # 1.1 ARMENIAN CAPITAL LETTER MEN ++0545 ; mapped ; 0575 # 1.1 ARMENIAN CAPITAL LETTER YI ++0546 ; mapped ; 0576 # 1.1 ARMENIAN CAPITAL LETTER NOW ++0547 ; mapped ; 0577 # 1.1 ARMENIAN CAPITAL LETTER SHA ++0548 ; mapped ; 0578 # 1.1 ARMENIAN CAPITAL LETTER VO ++0549 ; mapped ; 0579 # 1.1 ARMENIAN CAPITAL LETTER CHA ++054A ; mapped ; 057A # 1.1 ARMENIAN CAPITAL LETTER PEH ++054B ; mapped ; 057B # 1.1 ARMENIAN CAPITAL LETTER JHEH ++054C ; mapped ; 057C # 1.1 ARMENIAN CAPITAL LETTER RA ++054D ; mapped ; 057D # 1.1 ARMENIAN CAPITAL LETTER SEH ++054E ; mapped ; 057E # 1.1 ARMENIAN CAPITAL LETTER VEW ++054F ; mapped ; 057F # 1.1 ARMENIAN CAPITAL LETTER TIWN ++0550 ; mapped ; 0580 # 1.1 ARMENIAN CAPITAL LETTER REH ++0551 ; mapped ; 0581 # 1.1 ARMENIAN CAPITAL LETTER CO ++0552 ; mapped ; 0582 # 1.1 ARMENIAN CAPITAL LETTER YIWN ++0553 ; mapped ; 0583 # 1.1 ARMENIAN CAPITAL LETTER PIWR ++0554 ; mapped ; 0584 # 1.1 ARMENIAN CAPITAL LETTER KEH ++0555 ; mapped ; 0585 # 1.1 ARMENIAN CAPITAL LETTER OH ++0556 ; mapped ; 0586 # 1.1 ARMENIAN CAPITAL LETTER FEH ++0557..0558 ; disallowed # NA .. ++0559 ; valid # 1.1 ARMENIAN MODIFIER LETTER LEFT HALF RING ++055A..055F ; valid ; ; NV8 # 1.1 ARMENIAN APOSTROPHE..ARMENIAN ABBREVIATION MARK ++0560 ; disallowed # NA ++0561..0586 ; valid # 1.1 ARMENIAN SMALL LETTER AYB..ARMENIAN SMALL LETTER FEH ++0587 ; mapped ; 0565 0582 # 1.1 ARMENIAN SMALL LIGATURE ECH YIWN ++0588 ; disallowed # NA ++0589 ; valid ; ; NV8 # 1.1 ARMENIAN FULL STOP ++058A ; valid ; ; NV8 # 3.0 ARMENIAN HYPHEN ++058B..058C ; disallowed # NA .. ++058D..058E ; valid ; ; NV8 # 7.0 RIGHT-FACING ARMENIAN ETERNITY SIGN..LEFT-FACING ARMENIAN ETERNITY SIGN ++058F ; valid ; ; NV8 # 6.1 ARMENIAN DRAM SIGN ++0590 ; disallowed # NA ++0591..05A1 ; valid # 2.0 HEBREW ACCENT ETNAHTA..HEBREW ACCENT PAZER ++05A2 ; valid # 4.1 HEBREW ACCENT ATNAH HAFUKH ++05A3..05AF ; valid # 2.0 HEBREW ACCENT MUNAH..HEBREW MARK MASORA CIRCLE ++05B0..05B9 ; valid # 1.1 HEBREW POINT SHEVA..HEBREW POINT HOLAM ++05BA ; valid # 5.0 HEBREW POINT HOLAM HASER FOR VAV ++05BB..05BD ; valid # 1.1 HEBREW POINT QUBUTS..HEBREW POINT METEG ++05BE ; valid ; ; NV8 # 1.1 HEBREW PUNCTUATION MAQAF ++05BF ; valid # 1.1 HEBREW POINT RAFE ++05C0 ; valid ; ; NV8 # 1.1 HEBREW PUNCTUATION PASEQ ++05C1..05C2 ; valid # 1.1 HEBREW POINT SHIN DOT..HEBREW POINT SIN DOT ++05C3 ; valid ; ; NV8 # 1.1 HEBREW PUNCTUATION SOF PASUQ ++05C4 ; valid # 2.0 HEBREW MARK UPPER DOT ++05C5 ; valid # 4.1 HEBREW MARK LOWER DOT ++05C6 ; valid ; ; NV8 # 4.1 HEBREW PUNCTUATION NUN HAFUKHA ++05C7 ; valid # 4.1 HEBREW POINT QAMATS QATAN ++05C8..05CF ; disallowed # NA .. ++05D0..05EA ; valid # 1.1 HEBREW LETTER ALEF..HEBREW LETTER TAV ++05EB..05EF ; disallowed # NA .. ++05F0..05F4 ; valid # 1.1 HEBREW LIGATURE YIDDISH DOUBLE VAV..HEBREW PUNCTUATION GERSHAYIM ++05F5..05FF ; disallowed # NA .. ++0600..0603 ; disallowed # 4.0 ARABIC NUMBER SIGN..ARABIC SIGN SAFHA ++0604 ; disallowed # 6.1 ARABIC SIGN SAMVAT ++0605 ; disallowed # 7.0 ARABIC NUMBER MARK ABOVE ++0606..060A ; valid ; ; NV8 # 5.1 ARABIC-INDIC CUBE ROOT..ARABIC-INDIC PER TEN THOUSAND SIGN ++060B ; valid ; ; NV8 # 4.1 AFGHANI SIGN ++060C ; valid ; ; NV8 # 1.1 ARABIC COMMA ++060D..060F ; valid ; ; NV8 # 4.0 ARABIC DATE SEPARATOR..ARABIC SIGN MISRA ++0610..0615 ; valid # 4.0 ARABIC SIGN SALLALLAHOU ALAYHE WASSALLAM..ARABIC SMALL HIGH TAH ++0616..061A ; valid # 5.1 ARABIC SMALL HIGH LIGATURE ALEF WITH LAM WITH YEH..ARABIC SMALL KASRA ++061B ; valid ; ; NV8 # 1.1 ARABIC SEMICOLON ++061C ; disallowed # 6.3 ARABIC LETTER MARK ++061D ; disallowed # NA ++061E ; valid ; ; NV8 # 4.1 ARABIC TRIPLE DOT PUNCTUATION MARK ++061F ; valid ; ; NV8 # 1.1 ARABIC QUESTION MARK ++0620 ; valid # 6.0 ARABIC LETTER KASHMIRI YEH ++0621..063A ; valid # 1.1 ARABIC LETTER HAMZA..ARABIC LETTER GHAIN ++063B..063F ; valid # 5.1 ARABIC LETTER KEHEH WITH TWO DOTS ABOVE..ARABIC LETTER FARSI YEH WITH THREE DOTS ABOVE ++0640 ; valid ; ; NV8 # 1.1 ARABIC TATWEEL ++0641..0652 ; valid # 1.1 ARABIC LETTER FEH..ARABIC SUKUN ++0653..0655 ; valid # 3.0 ARABIC MADDAH ABOVE..ARABIC HAMZA BELOW ++0656..0658 ; valid # 4.0 ARABIC SUBSCRIPT ALEF..ARABIC MARK NOON GHUNNA ++0659..065E ; valid # 4.1 ARABIC ZWARAKAY..ARABIC FATHA WITH TWO DOTS ++065F ; valid # 6.0 ARABIC WAVY HAMZA BELOW ++0660..0669 ; valid # 1.1 ARABIC-INDIC DIGIT ZERO..ARABIC-INDIC DIGIT NINE ++066A..066D ; valid ; ; NV8 # 1.1 ARABIC PERCENT SIGN..ARABIC FIVE POINTED STAR ++066E..066F ; valid # 3.2 ARABIC LETTER DOTLESS BEH..ARABIC LETTER DOTLESS QAF ++0670..0674 ; valid # 1.1 ARABIC LETTER SUPERSCRIPT ALEF..ARABIC LETTER HIGH HAMZA ++0675 ; mapped ; 0627 0674 # 1.1 ARABIC LETTER HIGH HAMZA ALEF ++0676 ; mapped ; 0648 0674 # 1.1 ARABIC LETTER HIGH HAMZA WAW ++0677 ; mapped ; 06C7 0674 # 1.1 ARABIC LETTER U WITH HAMZA ABOVE ++0678 ; mapped ; 064A 0674 # 1.1 ARABIC LETTER HIGH HAMZA YEH ++0679..06B7 ; valid # 1.1 ARABIC LETTER TTEH..ARABIC LETTER LAM WITH THREE DOTS ABOVE ++06B8..06B9 ; valid # 3.0 ARABIC LETTER LAM WITH THREE DOTS BELOW..ARABIC LETTER NOON WITH DOT BELOW ++06BA..06BE ; valid # 1.1 ARABIC LETTER NOON GHUNNA..ARABIC LETTER HEH DOACHASHMEE ++06BF ; valid # 3.0 ARABIC LETTER TCHEH WITH DOT ABOVE ++06C0..06CE ; valid # 1.1 ARABIC LETTER HEH WITH YEH ABOVE..ARABIC LETTER YEH WITH SMALL V ++06CF ; valid # 3.0 ARABIC LETTER WAW WITH DOT ABOVE ++06D0..06D3 ; valid # 1.1 ARABIC LETTER E..ARABIC LETTER YEH BARREE WITH HAMZA ABOVE ++06D4 ; valid ; ; NV8 # 1.1 ARABIC FULL STOP ++06D5..06DC ; valid # 1.1 ARABIC LETTER AE..ARABIC SMALL HIGH SEEN ++06DD ; disallowed # 1.1 ARABIC END OF AYAH ++06DE ; valid ; ; NV8 # 1.1 ARABIC START OF RUB EL HIZB ++06DF..06E8 ; valid # 1.1 ARABIC SMALL HIGH ROUNDED ZERO..ARABIC SMALL HIGH NOON ++06E9 ; valid ; ; NV8 # 1.1 ARABIC PLACE OF SAJDAH ++06EA..06ED ; valid # 1.1 ARABIC EMPTY CENTRE LOW STOP..ARABIC SMALL LOW MEEM ++06EE..06EF ; valid # 4.0 ARABIC LETTER DAL WITH INVERTED V..ARABIC LETTER REH WITH INVERTED V ++06F0..06F9 ; valid # 1.1 EXTENDED ARABIC-INDIC DIGIT ZERO..EXTENDED ARABIC-INDIC DIGIT NINE ++06FA..06FE ; valid # 3.0 ARABIC LETTER SHEEN WITH DOT BELOW..ARABIC SIGN SINDHI POSTPOSITION MEN ++06FF ; valid # 4.0 ARABIC LETTER HEH WITH INVERTED V ++0700..070D ; valid ; ; NV8 # 3.0 SYRIAC END OF PARAGRAPH..SYRIAC HARKLEAN ASTERISCUS ++070E ; disallowed # NA ++070F ; disallowed # 3.0 SYRIAC ABBREVIATION MARK ++0710..072C ; valid # 3.0 SYRIAC LETTER ALAPH..SYRIAC LETTER TAW ++072D..072F ; valid # 4.0 SYRIAC LETTER PERSIAN BHETH..SYRIAC LETTER PERSIAN DHALATH ++0730..074A ; valid # 3.0 SYRIAC PTHAHA ABOVE..SYRIAC BARREKH ++074B..074C ; disallowed # NA .. ++074D..074F ; valid # 4.0 SYRIAC LETTER SOGDIAN ZHAIN..SYRIAC LETTER SOGDIAN FE ++0750..076D ; valid # 4.1 ARABIC LETTER BEH WITH THREE DOTS HORIZONTALLY BELOW..ARABIC LETTER SEEN WITH TWO DOTS VERTICALLY ABOVE ++076E..077F ; valid # 5.1 ARABIC LETTER HAH WITH SMALL ARABIC LETTER TAH BELOW..ARABIC LETTER KAF WITH TWO DOTS ABOVE ++0780..07B0 ; valid # 3.0 THAANA LETTER HAA..THAANA SUKUN ++07B1 ; valid # 3.2 THAANA LETTER NAA ++07B2..07BF ; disallowed # NA .. ++07C0..07F5 ; valid # 5.0 NKO DIGIT ZERO..NKO LOW TONE APOSTROPHE ++07F6..07FA ; valid ; ; NV8 # 5.0 NKO SYMBOL OO DENNEN..NKO LAJANYALAN ++07FB..07FF ; disallowed # NA .. ++0800..082D ; valid # 5.2 SAMARITAN LETTER ALAF..SAMARITAN MARK NEQUDAA ++082E..082F ; disallowed # NA .. ++0830..083E ; valid ; ; NV8 # 5.2 SAMARITAN PUNCTUATION NEQUDAA..SAMARITAN PUNCTUATION ANNAAU ++083F ; disallowed # NA ++0840..085B ; valid # 6.0 MANDAIC LETTER HALQA..MANDAIC GEMINATION MARK ++085C..085D ; disallowed # NA .. ++085E ; valid ; ; NV8 # 6.0 MANDAIC PUNCTUATION ++085F ; disallowed # NA ++0860..086A ; valid # 10.0 SYRIAC LETTER MALAYALAM NGA..SYRIAC LETTER MALAYALAM SSA ++086B..089F ; disallowed # NA .. ++08A0 ; valid # 6.1 ARABIC LETTER BEH WITH SMALL V BELOW ++08A1 ; valid # 7.0 ARABIC LETTER BEH WITH HAMZA ABOVE ++08A2..08AC ; valid # 6.1 ARABIC LETTER JEEM WITH TWO DOTS ABOVE..ARABIC LETTER ROHINGYA YEH ++08AD..08B2 ; valid # 7.0 ARABIC LETTER LOW ALEF..ARABIC LETTER ZAIN WITH INVERTED V ABOVE ++08B3..08B4 ; valid # 8.0 ARABIC LETTER AIN WITH THREE DOTS BELOW..ARABIC LETTER KAF WITH DOT BELOW ++08B5 ; disallowed # NA ++08B6..08BD ; valid # 9.0 ARABIC LETTER BEH WITH SMALL MEEM ABOVE..ARABIC LETTER AFRICAN NOON ++08BE..08D3 ; disallowed # NA .. ++08D4..08E1 ; valid # 9.0 ARABIC SMALL HIGH WORD AR-RUB..ARABIC SMALL HIGH SIGN SAFHA ++08E2 ; disallowed # 9.0 ARABIC DISPUTED END OF AYAH ++08E3 ; valid # 8.0 ARABIC TURNED DAMMA BELOW ++08E4..08FE ; valid # 6.1 ARABIC CURLY FATHA..ARABIC DAMMA WITH DOT ++08FF ; valid # 7.0 ARABIC MARK SIDEWAYS NOON GHUNNA ++0900 ; valid # 5.2 DEVANAGARI SIGN INVERTED CANDRABINDU ++0901..0903 ; valid # 1.1 DEVANAGARI SIGN CANDRABINDU..DEVANAGARI SIGN VISARGA ++0904 ; valid # 4.0 DEVANAGARI LETTER SHORT A ++0905..0939 ; valid # 1.1 DEVANAGARI LETTER A..DEVANAGARI LETTER HA ++093A..093B ; valid # 6.0 DEVANAGARI VOWEL SIGN OE..DEVANAGARI VOWEL SIGN OOE ++093C..094D ; valid # 1.1 DEVANAGARI SIGN NUKTA..DEVANAGARI SIGN VIRAMA ++094E ; valid # 5.2 DEVANAGARI VOWEL SIGN PRISHTHAMATRA E ++094F ; valid # 6.0 DEVANAGARI VOWEL SIGN AW ++0950..0954 ; valid # 1.1 DEVANAGARI OM..DEVANAGARI ACUTE ACCENT ++0955 ; valid # 5.2 DEVANAGARI VOWEL SIGN CANDRA LONG E ++0956..0957 ; valid # 6.0 DEVANAGARI VOWEL SIGN UE..DEVANAGARI VOWEL SIGN UUE ++0958 ; mapped ; 0915 093C # 1.1 DEVANAGARI LETTER QA ++0959 ; mapped ; 0916 093C # 1.1 DEVANAGARI LETTER KHHA ++095A ; mapped ; 0917 093C # 1.1 DEVANAGARI LETTER GHHA ++095B ; mapped ; 091C 093C # 1.1 DEVANAGARI LETTER ZA ++095C ; mapped ; 0921 093C # 1.1 DEVANAGARI LETTER DDDHA ++095D ; mapped ; 0922 093C # 1.1 DEVANAGARI LETTER RHA ++095E ; mapped ; 092B 093C # 1.1 DEVANAGARI LETTER FA ++095F ; mapped ; 092F 093C # 1.1 DEVANAGARI LETTER YYA ++0960..0963 ; valid # 1.1 DEVANAGARI LETTER VOCALIC RR..DEVANAGARI VOWEL SIGN VOCALIC LL ++0964..0965 ; valid ; ; NV8 # 1.1 DEVANAGARI DANDA..DEVANAGARI DOUBLE DANDA ++0966..096F ; valid # 1.1 DEVANAGARI DIGIT ZERO..DEVANAGARI DIGIT NINE ++0970 ; valid ; ; NV8 # 1.1 DEVANAGARI ABBREVIATION SIGN ++0971..0972 ; valid # 5.1 DEVANAGARI SIGN HIGH SPACING DOT..DEVANAGARI LETTER CANDRA A ++0973..0977 ; valid # 6.0 DEVANAGARI LETTER OE..DEVANAGARI LETTER UUE ++0978 ; valid # 7.0 DEVANAGARI LETTER MARWARI DDA ++0979..097A ; valid # 5.2 DEVANAGARI LETTER ZHA..DEVANAGARI LETTER HEAVY YA ++097B..097C ; valid # 5.0 DEVANAGARI LETTER GGA..DEVANAGARI LETTER JJA ++097D ; valid # 4.1 DEVANAGARI LETTER GLOTTAL STOP ++097E..097F ; valid # 5.0 DEVANAGARI LETTER DDDA..DEVANAGARI LETTER BBA ++0980 ; valid # 7.0 BENGALI ANJI ++0981..0983 ; valid # 1.1 BENGALI SIGN CANDRABINDU..BENGALI SIGN VISARGA ++0984 ; disallowed # NA ++0985..098C ; valid # 1.1 BENGALI LETTER A..BENGALI LETTER VOCALIC L ++098D..098E ; disallowed # NA .. ++098F..0990 ; valid # 1.1 BENGALI LETTER E..BENGALI LETTER AI ++0991..0992 ; disallowed # NA .. ++0993..09A8 ; valid # 1.1 BENGALI LETTER O..BENGALI LETTER NA ++09A9 ; disallowed # NA ++09AA..09B0 ; valid # 1.1 BENGALI LETTER PA..BENGALI LETTER RA ++09B1 ; disallowed # NA ++09B2 ; valid # 1.1 BENGALI LETTER LA ++09B3..09B5 ; disallowed # NA .. ++09B6..09B9 ; valid # 1.1 BENGALI LETTER SHA..BENGALI LETTER HA ++09BA..09BB ; disallowed # NA .. ++09BC ; valid # 1.1 BENGALI SIGN NUKTA ++09BD ; valid # 4.0 BENGALI SIGN AVAGRAHA ++09BE..09C4 ; valid # 1.1 BENGALI VOWEL SIGN AA..BENGALI VOWEL SIGN VOCALIC RR ++09C5..09C6 ; disallowed # NA .. ++09C7..09C8 ; valid # 1.1 BENGALI VOWEL SIGN E..BENGALI VOWEL SIGN AI ++09C9..09CA ; disallowed # NA .. ++09CB..09CD ; valid # 1.1 BENGALI VOWEL SIGN O..BENGALI SIGN VIRAMA ++09CE ; valid # 4.1 BENGALI LETTER KHANDA TA ++09CF..09D6 ; disallowed # NA .. ++09D7 ; valid # 1.1 BENGALI AU LENGTH MARK ++09D8..09DB ; disallowed # NA .. ++09DC ; mapped ; 09A1 09BC # 1.1 BENGALI LETTER RRA ++09DD ; mapped ; 09A2 09BC # 1.1 BENGALI LETTER RHA ++09DE ; disallowed # NA ++09DF ; mapped ; 09AF 09BC # 1.1 BENGALI LETTER YYA ++09E0..09E3 ; valid # 1.1 BENGALI LETTER VOCALIC RR..BENGALI VOWEL SIGN VOCALIC LL ++09E4..09E5 ; disallowed # NA .. ++09E6..09F1 ; valid # 1.1 BENGALI DIGIT ZERO..BENGALI LETTER RA WITH LOWER DIAGONAL ++09F2..09FA ; valid ; ; NV8 # 1.1 BENGALI RUPEE MARK..BENGALI ISSHAR ++09FB ; valid ; ; NV8 # 5.2 BENGALI GANDA MARK ++09FC ; valid # 10.0 BENGALI LETTER VEDIC ANUSVARA ++09FD ; valid ; ; NV8 # 10.0 BENGALI ABBREVIATION SIGN ++09FE..0A00 ; disallowed # NA .. ++0A01 ; valid # 4.0 GURMUKHI SIGN ADAK BINDI ++0A02 ; valid # 1.1 GURMUKHI SIGN BINDI ++0A03 ; valid # 4.0 GURMUKHI SIGN VISARGA ++0A04 ; disallowed # NA ++0A05..0A0A ; valid # 1.1 GURMUKHI LETTER A..GURMUKHI LETTER UU ++0A0B..0A0E ; disallowed # NA .. ++0A0F..0A10 ; valid # 1.1 GURMUKHI LETTER EE..GURMUKHI LETTER AI ++0A11..0A12 ; disallowed # NA .. ++0A13..0A28 ; valid # 1.1 GURMUKHI LETTER OO..GURMUKHI LETTER NA ++0A29 ; disallowed # NA ++0A2A..0A30 ; valid # 1.1 GURMUKHI LETTER PA..GURMUKHI LETTER RA ++0A31 ; disallowed # NA ++0A32 ; valid # 1.1 GURMUKHI LETTER LA ++0A33 ; mapped ; 0A32 0A3C # 1.1 GURMUKHI LETTER LLA ++0A34 ; disallowed # NA ++0A35 ; valid # 1.1 GURMUKHI LETTER VA ++0A36 ; mapped ; 0A38 0A3C # 1.1 GURMUKHI LETTER SHA ++0A37 ; disallowed # NA ++0A38..0A39 ; valid # 1.1 GURMUKHI LETTER SA..GURMUKHI LETTER HA ++0A3A..0A3B ; disallowed # NA .. ++0A3C ; valid # 1.1 GURMUKHI SIGN NUKTA ++0A3D ; disallowed # NA ++0A3E..0A42 ; valid # 1.1 GURMUKHI VOWEL SIGN AA..GURMUKHI VOWEL SIGN UU ++0A43..0A46 ; disallowed # NA .. ++0A47..0A48 ; valid # 1.1 GURMUKHI VOWEL SIGN EE..GURMUKHI VOWEL SIGN AI ++0A49..0A4A ; disallowed # NA .. ++0A4B..0A4D ; valid # 1.1 GURMUKHI VOWEL SIGN OO..GURMUKHI SIGN VIRAMA ++0A4E..0A50 ; disallowed # NA .. ++0A51 ; valid # 5.1 GURMUKHI SIGN UDAAT ++0A52..0A58 ; disallowed # NA .. ++0A59 ; mapped ; 0A16 0A3C # 1.1 GURMUKHI LETTER KHHA ++0A5A ; mapped ; 0A17 0A3C # 1.1 GURMUKHI LETTER GHHA ++0A5B ; mapped ; 0A1C 0A3C # 1.1 GURMUKHI LETTER ZA ++0A5C ; valid # 1.1 GURMUKHI LETTER RRA ++0A5D ; disallowed # NA ++0A5E ; mapped ; 0A2B 0A3C # 1.1 GURMUKHI LETTER FA ++0A5F..0A65 ; disallowed # NA .. ++0A66..0A74 ; valid # 1.1 GURMUKHI DIGIT ZERO..GURMUKHI EK ONKAR ++0A75 ; valid # 5.1 GURMUKHI SIGN YAKASH ++0A76..0A80 ; disallowed # NA .. ++0A81..0A83 ; valid # 1.1 GUJARATI SIGN CANDRABINDU..GUJARATI SIGN VISARGA ++0A84 ; disallowed # NA ++0A85..0A8B ; valid # 1.1 GUJARATI LETTER A..GUJARATI LETTER VOCALIC R ++0A8C ; valid # 4.0 GUJARATI LETTER VOCALIC L ++0A8D ; valid # 1.1 GUJARATI VOWEL CANDRA E ++0A8E ; disallowed # NA ++0A8F..0A91 ; valid # 1.1 GUJARATI LETTER E..GUJARATI VOWEL CANDRA O ++0A92 ; disallowed # NA ++0A93..0AA8 ; valid # 1.1 GUJARATI LETTER O..GUJARATI LETTER NA ++0AA9 ; disallowed # NA ++0AAA..0AB0 ; valid # 1.1 GUJARATI LETTER PA..GUJARATI LETTER RA ++0AB1 ; disallowed # NA ++0AB2..0AB3 ; valid # 1.1 GUJARATI LETTER LA..GUJARATI LETTER LLA ++0AB4 ; disallowed # NA ++0AB5..0AB9 ; valid # 1.1 GUJARATI LETTER VA..GUJARATI LETTER HA ++0ABA..0ABB ; disallowed # NA .. ++0ABC..0AC5 ; valid # 1.1 GUJARATI SIGN NUKTA..GUJARATI VOWEL SIGN CANDRA E ++0AC6 ; disallowed # NA ++0AC7..0AC9 ; valid # 1.1 GUJARATI VOWEL SIGN E..GUJARATI VOWEL SIGN CANDRA O ++0ACA ; disallowed # NA ++0ACB..0ACD ; valid # 1.1 GUJARATI VOWEL SIGN O..GUJARATI SIGN VIRAMA ++0ACE..0ACF ; disallowed # NA .. ++0AD0 ; valid # 1.1 GUJARATI OM ++0AD1..0ADF ; disallowed # NA .. ++0AE0 ; valid # 1.1 GUJARATI LETTER VOCALIC RR ++0AE1..0AE3 ; valid # 4.0 GUJARATI LETTER VOCALIC LL..GUJARATI VOWEL SIGN VOCALIC LL ++0AE4..0AE5 ; disallowed # NA .. ++0AE6..0AEF ; valid # 1.1 GUJARATI DIGIT ZERO..GUJARATI DIGIT NINE ++0AF0 ; valid ; ; NV8 # 6.1 GUJARATI ABBREVIATION SIGN ++0AF1 ; valid ; ; NV8 # 4.0 GUJARATI RUPEE SIGN ++0AF2..0AF8 ; disallowed # NA .. ++0AF9 ; valid # 8.0 GUJARATI LETTER ZHA ++0AFA..0AFF ; valid # 10.0 GUJARATI SIGN SUKUN..GUJARATI SIGN TWO-CIRCLE NUKTA ABOVE ++0B00 ; disallowed # NA ++0B01..0B03 ; valid # 1.1 ORIYA SIGN CANDRABINDU..ORIYA SIGN VISARGA ++0B04 ; disallowed # NA ++0B05..0B0C ; valid # 1.1 ORIYA LETTER A..ORIYA LETTER VOCALIC L ++0B0D..0B0E ; disallowed # NA .. ++0B0F..0B10 ; valid # 1.1 ORIYA LETTER E..ORIYA LETTER AI ++0B11..0B12 ; disallowed # NA .. ++0B13..0B28 ; valid # 1.1 ORIYA LETTER O..ORIYA LETTER NA ++0B29 ; disallowed # NA ++0B2A..0B30 ; valid # 1.1 ORIYA LETTER PA..ORIYA LETTER RA ++0B31 ; disallowed # NA ++0B32..0B33 ; valid # 1.1 ORIYA LETTER LA..ORIYA LETTER LLA ++0B34 ; disallowed # NA ++0B35 ; valid # 4.0 ORIYA LETTER VA ++0B36..0B39 ; valid # 1.1 ORIYA LETTER SHA..ORIYA LETTER HA ++0B3A..0B3B ; disallowed # NA .. ++0B3C..0B43 ; valid # 1.1 ORIYA SIGN NUKTA..ORIYA VOWEL SIGN VOCALIC R ++0B44 ; valid # 5.1 ORIYA VOWEL SIGN VOCALIC RR ++0B45..0B46 ; disallowed # NA .. ++0B47..0B48 ; valid # 1.1 ORIYA VOWEL SIGN E..ORIYA VOWEL SIGN AI ++0B49..0B4A ; disallowed # NA .. ++0B4B..0B4D ; valid # 1.1 ORIYA VOWEL SIGN O..ORIYA SIGN VIRAMA ++0B4E..0B55 ; disallowed # NA .. ++0B56..0B57 ; valid # 1.1 ORIYA AI LENGTH MARK..ORIYA AU LENGTH MARK ++0B58..0B5B ; disallowed # NA .. ++0B5C ; mapped ; 0B21 0B3C # 1.1 ORIYA LETTER RRA ++0B5D ; mapped ; 0B22 0B3C # 1.1 ORIYA LETTER RHA ++0B5E ; disallowed # NA ++0B5F..0B61 ; valid # 1.1 ORIYA LETTER YYA..ORIYA LETTER VOCALIC LL ++0B62..0B63 ; valid # 5.1 ORIYA VOWEL SIGN VOCALIC L..ORIYA VOWEL SIGN VOCALIC LL ++0B64..0B65 ; disallowed # NA .. ++0B66..0B6F ; valid # 1.1 ORIYA DIGIT ZERO..ORIYA DIGIT NINE ++0B70 ; valid ; ; NV8 # 1.1 ORIYA ISSHAR ++0B71 ; valid # 4.0 ORIYA LETTER WA ++0B72..0B77 ; valid ; ; NV8 # 6.0 ORIYA FRACTION ONE QUARTER..ORIYA FRACTION THREE SIXTEENTHS ++0B78..0B81 ; disallowed # NA .. ++0B82..0B83 ; valid # 1.1 TAMIL SIGN ANUSVARA..TAMIL SIGN VISARGA ++0B84 ; disallowed # NA ++0B85..0B8A ; valid # 1.1 TAMIL LETTER A..TAMIL LETTER UU ++0B8B..0B8D ; disallowed # NA .. ++0B8E..0B90 ; valid # 1.1 TAMIL LETTER E..TAMIL LETTER AI ++0B91 ; disallowed # NA ++0B92..0B95 ; valid # 1.1 TAMIL LETTER O..TAMIL LETTER KA ++0B96..0B98 ; disallowed # NA .. ++0B99..0B9A ; valid # 1.1 TAMIL LETTER NGA..TAMIL LETTER CA ++0B9B ; disallowed # NA ++0B9C ; valid # 1.1 TAMIL LETTER JA ++0B9D ; disallowed # NA ++0B9E..0B9F ; valid # 1.1 TAMIL LETTER NYA..TAMIL LETTER TTA ++0BA0..0BA2 ; disallowed # NA .. ++0BA3..0BA4 ; valid # 1.1 TAMIL LETTER NNA..TAMIL LETTER TA ++0BA5..0BA7 ; disallowed # NA .. ++0BA8..0BAA ; valid # 1.1 TAMIL LETTER NA..TAMIL LETTER PA ++0BAB..0BAD ; disallowed # NA .. ++0BAE..0BB5 ; valid # 1.1 TAMIL LETTER MA..TAMIL LETTER VA ++0BB6 ; valid # 4.1 TAMIL LETTER SHA ++0BB7..0BB9 ; valid # 1.1 TAMIL LETTER SSA..TAMIL LETTER HA ++0BBA..0BBD ; disallowed # NA .. ++0BBE..0BC2 ; valid # 1.1 TAMIL VOWEL SIGN AA..TAMIL VOWEL SIGN UU ++0BC3..0BC5 ; disallowed # NA .. ++0BC6..0BC8 ; valid # 1.1 TAMIL VOWEL SIGN E..TAMIL VOWEL SIGN AI ++0BC9 ; disallowed # NA ++0BCA..0BCD ; valid # 1.1 TAMIL VOWEL SIGN O..TAMIL SIGN VIRAMA ++0BCE..0BCF ; disallowed # NA .. ++0BD0 ; valid # 5.1 TAMIL OM ++0BD1..0BD6 ; disallowed # NA .. ++0BD7 ; valid # 1.1 TAMIL AU LENGTH MARK ++0BD8..0BE5 ; disallowed # NA .. ++0BE6 ; valid # 4.1 TAMIL DIGIT ZERO ++0BE7..0BEF ; valid # 1.1 TAMIL DIGIT ONE..TAMIL DIGIT NINE ++0BF0..0BF2 ; valid ; ; NV8 # 1.1 TAMIL NUMBER TEN..TAMIL NUMBER ONE THOUSAND ++0BF3..0BFA ; valid ; ; NV8 # 4.0 TAMIL DAY SIGN..TAMIL NUMBER SIGN ++0BFB..0BFF ; disallowed # NA .. ++0C00 ; valid # 7.0 TELUGU SIGN COMBINING CANDRABINDU ABOVE ++0C01..0C03 ; valid # 1.1 TELUGU SIGN CANDRABINDU..TELUGU SIGN VISARGA ++0C04 ; disallowed # NA ++0C05..0C0C ; valid # 1.1 TELUGU LETTER A..TELUGU LETTER VOCALIC L ++0C0D ; disallowed # NA ++0C0E..0C10 ; valid # 1.1 TELUGU LETTER E..TELUGU LETTER AI ++0C11 ; disallowed # NA ++0C12..0C28 ; valid # 1.1 TELUGU LETTER O..TELUGU LETTER NA ++0C29 ; disallowed # NA ++0C2A..0C33 ; valid # 1.1 TELUGU LETTER PA..TELUGU LETTER LLA ++0C34 ; valid # 7.0 TELUGU LETTER LLLA ++0C35..0C39 ; valid # 1.1 TELUGU LETTER VA..TELUGU LETTER HA ++0C3A..0C3C ; disallowed # NA .. ++0C3D ; valid # 5.1 TELUGU SIGN AVAGRAHA ++0C3E..0C44 ; valid # 1.1 TELUGU VOWEL SIGN AA..TELUGU VOWEL SIGN VOCALIC RR ++0C45 ; disallowed # NA ++0C46..0C48 ; valid # 1.1 TELUGU VOWEL SIGN E..TELUGU VOWEL SIGN AI ++0C49 ; disallowed # NA ++0C4A..0C4D ; valid # 1.1 TELUGU VOWEL SIGN O..TELUGU SIGN VIRAMA ++0C4E..0C54 ; disallowed # NA .. ++0C55..0C56 ; valid # 1.1 TELUGU LENGTH MARK..TELUGU AI LENGTH MARK ++0C57 ; disallowed # NA ++0C58..0C59 ; valid # 5.1 TELUGU LETTER TSA..TELUGU LETTER DZA ++0C5A ; valid # 8.0 TELUGU LETTER RRRA ++0C5B..0C5F ; disallowed # NA .. ++0C60..0C61 ; valid # 1.1 TELUGU LETTER VOCALIC RR..TELUGU LETTER VOCALIC LL ++0C62..0C63 ; valid # 5.1 TELUGU VOWEL SIGN VOCALIC L..TELUGU VOWEL SIGN VOCALIC LL ++0C64..0C65 ; disallowed # NA .. ++0C66..0C6F ; valid # 1.1 TELUGU DIGIT ZERO..TELUGU DIGIT NINE ++0C70..0C77 ; disallowed # NA .. ++0C78..0C7F ; valid ; ; NV8 # 5.1 TELUGU FRACTION DIGIT ZERO FOR ODD POWERS OF FOUR..TELUGU SIGN TUUMU ++0C80 ; valid # 9.0 KANNADA SIGN SPACING CANDRABINDU ++0C81 ; valid # 7.0 KANNADA SIGN CANDRABINDU ++0C82..0C83 ; valid # 1.1 KANNADA SIGN ANUSVARA..KANNADA SIGN VISARGA ++0C84 ; disallowed # NA ++0C85..0C8C ; valid # 1.1 KANNADA LETTER A..KANNADA LETTER VOCALIC L ++0C8D ; disallowed # NA ++0C8E..0C90 ; valid # 1.1 KANNADA LETTER E..KANNADA LETTER AI ++0C91 ; disallowed # NA ++0C92..0CA8 ; valid # 1.1 KANNADA LETTER O..KANNADA LETTER NA ++0CA9 ; disallowed # NA ++0CAA..0CB3 ; valid # 1.1 KANNADA LETTER PA..KANNADA LETTER LLA ++0CB4 ; disallowed # NA ++0CB5..0CB9 ; valid # 1.1 KANNADA LETTER VA..KANNADA LETTER HA ++0CBA..0CBB ; disallowed # NA .. ++0CBC..0CBD ; valid # 4.0 KANNADA SIGN NUKTA..KANNADA SIGN AVAGRAHA ++0CBE..0CC4 ; valid # 1.1 KANNADA VOWEL SIGN AA..KANNADA VOWEL SIGN VOCALIC RR ++0CC5 ; disallowed # NA ++0CC6..0CC8 ; valid # 1.1 KANNADA VOWEL SIGN E..KANNADA VOWEL SIGN AI ++0CC9 ; disallowed # NA ++0CCA..0CCD ; valid # 1.1 KANNADA VOWEL SIGN O..KANNADA SIGN VIRAMA ++0CCE..0CD4 ; disallowed # NA .. ++0CD5..0CD6 ; valid # 1.1 KANNADA LENGTH MARK..KANNADA AI LENGTH MARK ++0CD7..0CDD ; disallowed # NA .. ++0CDE ; valid # 1.1 KANNADA LETTER FA ++0CDF ; disallowed # NA ++0CE0..0CE1 ; valid # 1.1 KANNADA LETTER VOCALIC RR..KANNADA LETTER VOCALIC LL ++0CE2..0CE3 ; valid # 5.0 KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL ++0CE4..0CE5 ; disallowed # NA .. ++0CE6..0CEF ; valid # 1.1 KANNADA DIGIT ZERO..KANNADA DIGIT NINE ++0CF0 ; disallowed # NA ++0CF1..0CF2 ; valid # 5.0 KANNADA SIGN JIHVAMULIYA..KANNADA SIGN UPADHMANIYA ++0CF3..0CFF ; disallowed # NA .. ++0D00 ; valid # 10.0 MALAYALAM SIGN COMBINING ANUSVARA ABOVE ++0D01 ; valid # 7.0 MALAYALAM SIGN CANDRABINDU ++0D02..0D03 ; valid # 1.1 MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA ++0D04 ; disallowed # NA ++0D05..0D0C ; valid # 1.1 MALAYALAM LETTER A..MALAYALAM LETTER VOCALIC L ++0D0D ; disallowed # NA ++0D0E..0D10 ; valid # 1.1 MALAYALAM LETTER E..MALAYALAM LETTER AI ++0D11 ; disallowed # NA ++0D12..0D28 ; valid # 1.1 MALAYALAM LETTER O..MALAYALAM LETTER NA ++0D29 ; valid # 6.0 MALAYALAM LETTER NNNA ++0D2A..0D39 ; valid # 1.1 MALAYALAM LETTER PA..MALAYALAM LETTER HA ++0D3A ; valid # 6.0 MALAYALAM LETTER TTTA ++0D3B..0D3C ; valid # 10.0 MALAYALAM SIGN VERTICAL BAR VIRAMA..MALAYALAM SIGN CIRCULAR VIRAMA ++0D3D ; valid # 5.1 MALAYALAM SIGN AVAGRAHA ++0D3E..0D43 ; valid # 1.1 MALAYALAM VOWEL SIGN AA..MALAYALAM VOWEL SIGN VOCALIC R ++0D44 ; valid # 5.1 MALAYALAM VOWEL SIGN VOCALIC RR ++0D45 ; disallowed # NA ++0D46..0D48 ; valid # 1.1 MALAYALAM VOWEL SIGN E..MALAYALAM VOWEL SIGN AI ++0D49 ; disallowed # NA ++0D4A..0D4D ; valid # 1.1 MALAYALAM VOWEL SIGN O..MALAYALAM SIGN VIRAMA ++0D4E ; valid # 6.0 MALAYALAM LETTER DOT REPH ++0D4F ; valid ; ; NV8 # 9.0 MALAYALAM SIGN PARA ++0D50..0D53 ; disallowed # NA .. ++0D54..0D56 ; valid # 9.0 MALAYALAM LETTER CHILLU M..MALAYALAM LETTER CHILLU LLL ++0D57 ; valid # 1.1 MALAYALAM AU LENGTH MARK ++0D58..0D5E ; valid ; ; NV8 # 9.0 MALAYALAM FRACTION ONE ONE-HUNDRED-AND-SIXTIETH..MALAYALAM FRACTION ONE FIFTH ++0D5F ; valid # 8.0 MALAYALAM LETTER ARCHAIC II ++0D60..0D61 ; valid # 1.1 MALAYALAM LETTER VOCALIC RR..MALAYALAM LETTER VOCALIC LL ++0D62..0D63 ; valid # 5.1 MALAYALAM VOWEL SIGN VOCALIC L..MALAYALAM VOWEL SIGN VOCALIC LL ++0D64..0D65 ; disallowed # NA .. ++0D66..0D6F ; valid # 1.1 MALAYALAM DIGIT ZERO..MALAYALAM DIGIT NINE ++0D70..0D75 ; valid ; ; NV8 # 5.1 MALAYALAM NUMBER TEN..MALAYALAM FRACTION THREE QUARTERS ++0D76..0D78 ; valid ; ; NV8 # 9.0 MALAYALAM FRACTION ONE SIXTEENTH..MALAYALAM FRACTION THREE SIXTEENTHS ++0D79 ; valid ; ; NV8 # 5.1 MALAYALAM DATE MARK ++0D7A..0D7F ; valid # 5.1 MALAYALAM LETTER CHILLU NN..MALAYALAM LETTER CHILLU K ++0D80..0D81 ; disallowed # NA .. ++0D82..0D83 ; valid # 3.0 SINHALA SIGN ANUSVARAYA..SINHALA SIGN VISARGAYA ++0D84 ; disallowed # NA ++0D85..0D96 ; valid # 3.0 SINHALA LETTER AYANNA..SINHALA LETTER AUYANNA ++0D97..0D99 ; disallowed # NA .. ++0D9A..0DB1 ; valid # 3.0 SINHALA LETTER ALPAPRAANA KAYANNA..SINHALA LETTER DANTAJA NAYANNA ++0DB2 ; disallowed # NA ++0DB3..0DBB ; valid # 3.0 SINHALA LETTER SANYAKA DAYANNA..SINHALA LETTER RAYANNA ++0DBC ; disallowed # NA ++0DBD ; valid # 3.0 SINHALA LETTER DANTAJA LAYANNA ++0DBE..0DBF ; disallowed # NA .. ++0DC0..0DC6 ; valid # 3.0 SINHALA LETTER VAYANNA..SINHALA LETTER FAYANNA ++0DC7..0DC9 ; disallowed # NA .. ++0DCA ; valid # 3.0 SINHALA SIGN AL-LAKUNA ++0DCB..0DCE ; disallowed # NA .. ++0DCF..0DD4 ; valid # 3.0 SINHALA VOWEL SIGN AELA-PILLA..SINHALA VOWEL SIGN KETTI PAA-PILLA ++0DD5 ; disallowed # NA ++0DD6 ; valid # 3.0 SINHALA VOWEL SIGN DIGA PAA-PILLA ++0DD7 ; disallowed # NA ++0DD8..0DDF ; valid # 3.0 SINHALA VOWEL SIGN GAETTA-PILLA..SINHALA VOWEL SIGN GAYANUKITTA ++0DE0..0DE5 ; disallowed # NA .. ++0DE6..0DEF ; valid # 7.0 SINHALA LITH DIGIT ZERO..SINHALA LITH DIGIT NINE ++0DF0..0DF1 ; disallowed # NA .. ++0DF2..0DF3 ; valid # 3.0 SINHALA VOWEL SIGN DIGA GAETTA-PILLA..SINHALA VOWEL SIGN DIGA GAYANUKITTA ++0DF4 ; valid ; ; NV8 # 3.0 SINHALA PUNCTUATION KUNDDALIYA ++0DF5..0E00 ; disallowed # NA .. ++0E01..0E32 ; valid # 1.1 THAI CHARACTER KO KAI..THAI CHARACTER SARA AA ++0E33 ; mapped ; 0E4D 0E32 # 1.1 THAI CHARACTER SARA AM ++0E34..0E3A ; valid # 1.1 THAI CHARACTER SARA I..THAI CHARACTER PHINTHU ++0E3B..0E3E ; disallowed # NA .. ++0E3F ; valid ; ; NV8 # 1.1 THAI CURRENCY SYMBOL BAHT ++0E40..0E4E ; valid # 1.1 THAI CHARACTER SARA E..THAI CHARACTER YAMAKKAN ++0E4F ; valid ; ; NV8 # 1.1 THAI CHARACTER FONGMAN ++0E50..0E59 ; valid # 1.1 THAI DIGIT ZERO..THAI DIGIT NINE ++0E5A..0E5B ; valid ; ; NV8 # 1.1 THAI CHARACTER ANGKHANKHU..THAI CHARACTER KHOMUT ++0E5C..0E80 ; disallowed # NA .. ++0E81..0E82 ; valid # 1.1 LAO LETTER KO..LAO LETTER KHO SUNG ++0E83 ; disallowed # NA ++0E84 ; valid # 1.1 LAO LETTER KHO TAM ++0E85..0E86 ; disallowed # NA .. ++0E87..0E88 ; valid # 1.1 LAO LETTER NGO..LAO LETTER CO ++0E89 ; disallowed # NA ++0E8A ; valid # 1.1 LAO LETTER SO TAM ++0E8B..0E8C ; disallowed # NA .. ++0E8D ; valid # 1.1 LAO LETTER NYO ++0E8E..0E93 ; disallowed # NA .. ++0E94..0E97 ; valid # 1.1 LAO LETTER DO..LAO LETTER THO TAM ++0E98 ; disallowed # NA ++0E99..0E9F ; valid # 1.1 LAO LETTER NO..LAO LETTER FO SUNG ++0EA0 ; disallowed # NA ++0EA1..0EA3 ; valid # 1.1 LAO LETTER MO..LAO LETTER LO LING ++0EA4 ; disallowed # NA ++0EA5 ; valid # 1.1 LAO LETTER LO LOOT ++0EA6 ; disallowed # NA ++0EA7 ; valid # 1.1 LAO LETTER WO ++0EA8..0EA9 ; disallowed # NA .. ++0EAA..0EAB ; valid # 1.1 LAO LETTER SO SUNG..LAO LETTER HO SUNG ++0EAC ; disallowed # NA ++0EAD..0EB2 ; valid # 1.1 LAO LETTER O..LAO VOWEL SIGN AA ++0EB3 ; mapped ; 0ECD 0EB2 # 1.1 LAO VOWEL SIGN AM ++0EB4..0EB9 ; valid # 1.1 LAO VOWEL SIGN I..LAO VOWEL SIGN UU ++0EBA ; disallowed # NA ++0EBB..0EBD ; valid # 1.1 LAO VOWEL SIGN MAI KON..LAO SEMIVOWEL SIGN NYO ++0EBE..0EBF ; disallowed # NA .. ++0EC0..0EC4 ; valid # 1.1 LAO VOWEL SIGN E..LAO VOWEL SIGN AI ++0EC5 ; disallowed # NA ++0EC6 ; valid # 1.1 LAO KO LA ++0EC7 ; disallowed # NA ++0EC8..0ECD ; valid # 1.1 LAO TONE MAI EK..LAO NIGGAHITA ++0ECE..0ECF ; disallowed # NA .. ++0ED0..0ED9 ; valid # 1.1 LAO DIGIT ZERO..LAO DIGIT NINE ++0EDA..0EDB ; disallowed # NA .. ++0EDC ; mapped ; 0EAB 0E99 # 1.1 LAO HO NO ++0EDD ; mapped ; 0EAB 0EA1 # 1.1 LAO HO MO ++0EDE..0EDF ; valid # 6.1 LAO LETTER KHMU GO..LAO LETTER KHMU NYO ++0EE0..0EFF ; disallowed # NA .. ++0F00 ; valid # 2.0 TIBETAN SYLLABLE OM ++0F01..0F0A ; valid ; ; NV8 # 2.0 TIBETAN MARK GTER YIG MGO TRUNCATED A..TIBETAN MARK BKA- SHOG YIG MGO ++0F0B ; valid # 2.0 TIBETAN MARK INTERSYLLABIC TSHEG ++0F0C ; mapped ; 0F0B # 2.0 TIBETAN MARK DELIMITER TSHEG BSTAR ++0F0D..0F17 ; valid ; ; NV8 # 2.0 TIBETAN MARK SHAD..TIBETAN ASTROLOGICAL SIGN SGRA GCAN -CHAR RTAGS ++0F18..0F19 ; valid # 2.0 TIBETAN ASTROLOGICAL SIGN -KHYUD PA..TIBETAN ASTROLOGICAL SIGN SDONG TSHUGS ++0F1A..0F1F ; valid ; ; NV8 # 2.0 TIBETAN SIGN RDEL DKAR GCIG..TIBETAN SIGN RDEL DKAR RDEL NAG ++0F20..0F29 ; valid # 2.0 TIBETAN DIGIT ZERO..TIBETAN DIGIT NINE ++0F2A..0F34 ; valid ; ; NV8 # 2.0 TIBETAN DIGIT HALF ONE..TIBETAN MARK BSDUS RTAGS ++0F35 ; valid # 2.0 TIBETAN MARK NGAS BZUNG NYI ZLA ++0F36 ; valid ; ; NV8 # 2.0 TIBETAN MARK CARET -DZUD RTAGS BZHI MIG CAN ++0F37 ; valid # 2.0 TIBETAN MARK NGAS BZUNG SGOR RTAGS ++0F38 ; valid ; ; NV8 # 2.0 TIBETAN MARK CHE MGO ++0F39 ; valid # 2.0 TIBETAN MARK TSA -PHRU ++0F3A..0F3D ; valid ; ; NV8 # 2.0 TIBETAN MARK GUG RTAGS GYON..TIBETAN MARK ANG KHANG GYAS ++0F3E..0F42 ; valid # 2.0 TIBETAN SIGN YAR TSHES..TIBETAN LETTER GA ++0F43 ; mapped ; 0F42 0FB7 # 2.0 TIBETAN LETTER GHA ++0F44..0F47 ; valid # 2.0 TIBETAN LETTER NGA..TIBETAN LETTER JA ++0F48 ; disallowed # NA ++0F49..0F4C ; valid # 2.0 TIBETAN LETTER NYA..TIBETAN LETTER DDA ++0F4D ; mapped ; 0F4C 0FB7 # 2.0 TIBETAN LETTER DDHA ++0F4E..0F51 ; valid # 2.0 TIBETAN LETTER NNA..TIBETAN LETTER DA ++0F52 ; mapped ; 0F51 0FB7 # 2.0 TIBETAN LETTER DHA ++0F53..0F56 ; valid # 2.0 TIBETAN LETTER NA..TIBETAN LETTER BA ++0F57 ; mapped ; 0F56 0FB7 # 2.0 TIBETAN LETTER BHA ++0F58..0F5B ; valid # 2.0 TIBETAN LETTER MA..TIBETAN LETTER DZA ++0F5C ; mapped ; 0F5B 0FB7 # 2.0 TIBETAN LETTER DZHA ++0F5D..0F68 ; valid # 2.0 TIBETAN LETTER WA..TIBETAN LETTER A ++0F69 ; mapped ; 0F40 0FB5 # 2.0 TIBETAN LETTER KSSA ++0F6A ; valid # 3.0 TIBETAN LETTER FIXED-FORM RA ++0F6B..0F6C ; valid # 5.1 TIBETAN LETTER KKA..TIBETAN LETTER RRA ++0F6D..0F70 ; disallowed # NA .. ++0F71..0F72 ; valid # 2.0 TIBETAN VOWEL SIGN AA..TIBETAN VOWEL SIGN I ++0F73 ; mapped ; 0F71 0F72 # 2.0 TIBETAN VOWEL SIGN II ++0F74 ; valid # 2.0 TIBETAN VOWEL SIGN U ++0F75 ; mapped ; 0F71 0F74 # 2.0 TIBETAN VOWEL SIGN UU ++0F76 ; mapped ; 0FB2 0F80 # 2.0 TIBETAN VOWEL SIGN VOCALIC R ++0F77 ; mapped ; 0FB2 0F71 0F80 #2.0 TIBETAN VOWEL SIGN VOCALIC RR ++0F78 ; mapped ; 0FB3 0F80 # 2.0 TIBETAN VOWEL SIGN VOCALIC L ++0F79 ; mapped ; 0FB3 0F71 0F80 #2.0 TIBETAN VOWEL SIGN VOCALIC LL ++0F7A..0F80 ; valid # 2.0 TIBETAN VOWEL SIGN E..TIBETAN VOWEL SIGN REVERSED I ++0F81 ; mapped ; 0F71 0F80 # 2.0 TIBETAN VOWEL SIGN REVERSED II ++0F82..0F84 ; valid # 2.0 TIBETAN SIGN NYI ZLA NAA DA..TIBETAN MARK HALANTA ++0F85 ; valid ; ; NV8 # 2.0 TIBETAN MARK PALUTA ++0F86..0F8B ; valid # 2.0 TIBETAN SIGN LCI RTAGS..TIBETAN SIGN GRU MED RGYINGS ++0F8C..0F8F ; valid # 6.0 TIBETAN SIGN INVERTED MCHU CAN..TIBETAN SUBJOINED SIGN INVERTED MCHU CAN ++0F90..0F92 ; valid # 2.0 TIBETAN SUBJOINED LETTER KA..TIBETAN SUBJOINED LETTER GA ++0F93 ; mapped ; 0F92 0FB7 # 2.0 TIBETAN SUBJOINED LETTER GHA ++0F94..0F95 ; valid # 2.0 TIBETAN SUBJOINED LETTER NGA..TIBETAN SUBJOINED LETTER CA ++0F96 ; valid # 3.0 TIBETAN SUBJOINED LETTER CHA ++0F97 ; valid # 2.0 TIBETAN SUBJOINED LETTER JA ++0F98 ; disallowed # NA ++0F99..0F9C ; valid # 2.0 TIBETAN SUBJOINED LETTER NYA..TIBETAN SUBJOINED LETTER DDA ++0F9D ; mapped ; 0F9C 0FB7 # 2.0 TIBETAN SUBJOINED LETTER DDHA ++0F9E..0FA1 ; valid # 2.0 TIBETAN SUBJOINED LETTER NNA..TIBETAN SUBJOINED LETTER DA ++0FA2 ; mapped ; 0FA1 0FB7 # 2.0 TIBETAN SUBJOINED LETTER DHA ++0FA3..0FA6 ; valid # 2.0 TIBETAN SUBJOINED LETTER NA..TIBETAN SUBJOINED LETTER BA ++0FA7 ; mapped ; 0FA6 0FB7 # 2.0 TIBETAN SUBJOINED LETTER BHA ++0FA8..0FAB ; valid # 2.0 TIBETAN SUBJOINED LETTER MA..TIBETAN SUBJOINED LETTER DZA ++0FAC ; mapped ; 0FAB 0FB7 # 2.0 TIBETAN SUBJOINED LETTER DZHA ++0FAD ; valid # 2.0 TIBETAN SUBJOINED LETTER WA ++0FAE..0FB0 ; valid # 3.0 TIBETAN SUBJOINED LETTER ZHA..TIBETAN SUBJOINED LETTER -A ++0FB1..0FB7 ; valid # 2.0 TIBETAN SUBJOINED LETTER YA..TIBETAN SUBJOINED LETTER HA ++0FB8 ; valid # 3.0 TIBETAN SUBJOINED LETTER A ++0FB9 ; mapped ; 0F90 0FB5 # 2.0 TIBETAN SUBJOINED LETTER KSSA ++0FBA..0FBC ; valid # 3.0 TIBETAN SUBJOINED LETTER FIXED-FORM WA..TIBETAN SUBJOINED LETTER FIXED-FORM RA ++0FBD ; disallowed # NA ++0FBE..0FC5 ; valid ; ; NV8 # 3.0 TIBETAN KU RU KHA..TIBETAN SYMBOL RDO RJE ++0FC6 ; valid # 3.0 TIBETAN SYMBOL PADMA GDAN ++0FC7..0FCC ; valid ; ; NV8 # 3.0 TIBETAN SYMBOL RDO RJE RGYA GRAM..TIBETAN SYMBOL NOR BU BZHI -KHYIL ++0FCD ; disallowed # NA ++0FCE ; valid ; ; NV8 # 5.1 TIBETAN SIGN RDEL NAG RDEL DKAR ++0FCF ; valid ; ; NV8 # 3.0 TIBETAN SIGN RDEL NAG GSUM ++0FD0..0FD1 ; valid ; ; NV8 # 4.1 TIBETAN MARK BSKA- SHOG GI MGO RGYAN..TIBETAN MARK MNYAM YIG GI MGO RGYAN ++0FD2..0FD4 ; valid ; ; NV8 # 5.1 TIBETAN MARK NYIS TSHEG..TIBETAN MARK CLOSING BRDA RNYING YIG MGO SGAB MA ++0FD5..0FD8 ; valid ; ; NV8 # 5.2 RIGHT-FACING SVASTI SIGN..LEFT-FACING SVASTI SIGN WITH DOTS ++0FD9..0FDA ; valid ; ; NV8 # 6.0 TIBETAN MARK LEADING MCHAN RTAGS..TIBETAN MARK TRAILING MCHAN RTAGS ++0FDB..0FFF ; disallowed # NA .. ++1000..1021 ; valid # 3.0 MYANMAR LETTER KA..MYANMAR LETTER A ++1022 ; valid # 5.1 MYANMAR LETTER SHAN A ++1023..1027 ; valid # 3.0 MYANMAR LETTER I..MYANMAR LETTER E ++1028 ; valid # 5.1 MYANMAR LETTER MON E ++1029..102A ; valid # 3.0 MYANMAR LETTER O..MYANMAR LETTER AU ++102B ; valid # 5.1 MYANMAR VOWEL SIGN TALL AA ++102C..1032 ; valid # 3.0 MYANMAR VOWEL SIGN AA..MYANMAR VOWEL SIGN AI ++1033..1035 ; valid # 5.1 MYANMAR VOWEL SIGN MON II..MYANMAR VOWEL SIGN E ABOVE ++1036..1039 ; valid # 3.0 MYANMAR SIGN ANUSVARA..MYANMAR SIGN VIRAMA ++103A..103F ; valid # 5.1 MYANMAR SIGN ASAT..MYANMAR LETTER GREAT SA ++1040..1049 ; valid # 3.0 MYANMAR DIGIT ZERO..MYANMAR DIGIT NINE ++104A..104F ; valid ; ; NV8 # 3.0 MYANMAR SIGN LITTLE SECTION..MYANMAR SYMBOL GENITIVE ++1050..1059 ; valid # 3.0 MYANMAR LETTER SHA..MYANMAR VOWEL SIGN VOCALIC LL ++105A..1099 ; valid # 5.1 MYANMAR LETTER MON NGA..MYANMAR SHAN DIGIT NINE ++109A..109D ; valid # 5.2 MYANMAR SIGN KHAMTI TONE-1..MYANMAR VOWEL SIGN AITON AI ++109E..109F ; valid ; ; NV8 # 5.1 MYANMAR SYMBOL SHAN ONE..MYANMAR SYMBOL SHAN EXCLAMATION ++10A0..10C5 ; disallowed # 1.1 GEORGIAN CAPITAL LETTER AN..GEORGIAN CAPITAL LETTER HOE ++10C6 ; disallowed # NA ++10C7 ; mapped ; 2D27 # 6.1 GEORGIAN CAPITAL LETTER YN ++10C8..10CC ; disallowed # NA .. ++10CD ; mapped ; 2D2D # 6.1 GEORGIAN CAPITAL LETTER AEN ++10CE..10CF ; disallowed # NA .. ++10D0..10F6 ; valid # 1.1 GEORGIAN LETTER AN..GEORGIAN LETTER FI ++10F7..10F8 ; valid # 3.2 GEORGIAN LETTER YN..GEORGIAN LETTER ELIFI ++10F9..10FA ; valid # 4.1 GEORGIAN LETTER TURNED GAN..GEORGIAN LETTER AIN ++10FB ; valid ; ; NV8 # 1.1 GEORGIAN PARAGRAPH SEPARATOR ++10FC ; mapped ; 10DC # 4.1 MODIFIER LETTER GEORGIAN NAR ++10FD..10FF ; valid # 6.1 GEORGIAN LETTER AEN..GEORGIAN LETTER LABIAL SIGN ++1100..1159 ; valid ; ; NV8 # 1.1 HANGUL CHOSEONG KIYEOK..HANGUL CHOSEONG YEORINHIEUH ++115A..115E ; valid ; ; NV8 # 5.2 HANGUL CHOSEONG KIYEOK-TIKEUT..HANGUL CHOSEONG TIKEUT-RIEUL ++115F..1160 ; disallowed # 1.1 HANGUL CHOSEONG FILLER..HANGUL JUNGSEONG FILLER ++1161..11A2 ; valid ; ; NV8 # 1.1 HANGUL JUNGSEONG A..HANGUL JUNGSEONG SSANGARAEA ++11A3..11A7 ; valid ; ; NV8 # 5.2 HANGUL JUNGSEONG A-EU..HANGUL JUNGSEONG O-YAE ++11A8..11F9 ; valid ; ; NV8 # 1.1 HANGUL JONGSEONG KIYEOK..HANGUL JONGSEONG YEORINHIEUH ++11FA..11FF ; valid ; ; NV8 # 5.2 HANGUL JONGSEONG KIYEOK-NIEUN..HANGUL JONGSEONG SSANGNIEUN ++1200..1206 ; valid # 3.0 ETHIOPIC SYLLABLE HA..ETHIOPIC SYLLABLE HO ++1207 ; valid # 4.1 ETHIOPIC SYLLABLE HOA ++1208..1246 ; valid # 3.0 ETHIOPIC SYLLABLE LA..ETHIOPIC SYLLABLE QO ++1247 ; valid # 4.1 ETHIOPIC SYLLABLE QOA ++1248 ; valid # 3.0 ETHIOPIC SYLLABLE QWA ++1249 ; disallowed # NA ++124A..124D ; valid # 3.0 ETHIOPIC SYLLABLE QWI..ETHIOPIC SYLLABLE QWE ++124E..124F ; disallowed # NA .. ++1250..1256 ; valid # 3.0 ETHIOPIC SYLLABLE QHA..ETHIOPIC SYLLABLE QHO ++1257 ; disallowed # NA ++1258 ; valid # 3.0 ETHIOPIC SYLLABLE QHWA ++1259 ; disallowed # NA ++125A..125D ; valid # 3.0 ETHIOPIC SYLLABLE QHWI..ETHIOPIC SYLLABLE QHWE ++125E..125F ; disallowed # NA .. ++1260..1286 ; valid # 3.0 ETHIOPIC SYLLABLE BA..ETHIOPIC SYLLABLE XO ++1287 ; valid # 4.1 ETHIOPIC SYLLABLE XOA ++1288 ; valid # 3.0 ETHIOPIC SYLLABLE XWA ++1289 ; disallowed # NA ++128A..128D ; valid # 3.0 ETHIOPIC SYLLABLE XWI..ETHIOPIC SYLLABLE XWE ++128E..128F ; disallowed # NA .. ++1290..12AE ; valid # 3.0 ETHIOPIC SYLLABLE NA..ETHIOPIC SYLLABLE KO ++12AF ; valid # 4.1 ETHIOPIC SYLLABLE KOA ++12B0 ; valid # 3.0 ETHIOPIC SYLLABLE KWA ++12B1 ; disallowed # NA ++12B2..12B5 ; valid # 3.0 ETHIOPIC SYLLABLE KWI..ETHIOPIC SYLLABLE KWE ++12B6..12B7 ; disallowed # NA .. ++12B8..12BE ; valid # 3.0 ETHIOPIC SYLLABLE KXA..ETHIOPIC SYLLABLE KXO ++12BF ; disallowed # NA ++12C0 ; valid # 3.0 ETHIOPIC SYLLABLE KXWA ++12C1 ; disallowed # NA ++12C2..12C5 ; valid # 3.0 ETHIOPIC SYLLABLE KXWI..ETHIOPIC SYLLABLE KXWE ++12C6..12C7 ; disallowed # NA .. ++12C8..12CE ; valid # 3.0 ETHIOPIC SYLLABLE WA..ETHIOPIC SYLLABLE WO ++12CF ; valid # 4.1 ETHIOPIC SYLLABLE WOA ++12D0..12D6 ; valid # 3.0 ETHIOPIC SYLLABLE PHARYNGEAL A..ETHIOPIC SYLLABLE PHARYNGEAL O ++12D7 ; disallowed # NA ++12D8..12EE ; valid # 3.0 ETHIOPIC SYLLABLE ZA..ETHIOPIC SYLLABLE YO ++12EF ; valid # 4.1 ETHIOPIC SYLLABLE YOA ++12F0..130E ; valid # 3.0 ETHIOPIC SYLLABLE DA..ETHIOPIC SYLLABLE GO ++130F ; valid # 4.1 ETHIOPIC SYLLABLE GOA ++1310 ; valid # 3.0 ETHIOPIC SYLLABLE GWA ++1311 ; disallowed # NA ++1312..1315 ; valid # 3.0 ETHIOPIC SYLLABLE GWI..ETHIOPIC SYLLABLE GWE ++1316..1317 ; disallowed # NA .. ++1318..131E ; valid # 3.0 ETHIOPIC SYLLABLE GGA..ETHIOPIC SYLLABLE GGO ++131F ; valid # 4.1 ETHIOPIC SYLLABLE GGWAA ++1320..1346 ; valid # 3.0 ETHIOPIC SYLLABLE THA..ETHIOPIC SYLLABLE TZO ++1347 ; valid # 4.1 ETHIOPIC SYLLABLE TZOA ++1348..135A ; valid # 3.0 ETHIOPIC SYLLABLE FA..ETHIOPIC SYLLABLE FYA ++135B..135C ; disallowed # NA .. ++135D..135E ; valid # 6.0 ETHIOPIC COMBINING GEMINATION AND VOWEL LENGTH MARK..ETHIOPIC COMBINING VOWEL LENGTH MARK ++135F ; valid # 4.1 ETHIOPIC COMBINING GEMINATION MARK ++1360 ; valid ; ; NV8 # 4.1 ETHIOPIC SECTION MARK ++1361..137C ; valid ; ; NV8 # 3.0 ETHIOPIC WORDSPACE..ETHIOPIC NUMBER TEN THOUSAND ++137D..137F ; disallowed # NA .. ++1380..138F ; valid # 4.1 ETHIOPIC SYLLABLE SEBATBEIT MWA..ETHIOPIC SYLLABLE PWE ++1390..1399 ; valid ; ; NV8 # 4.1 ETHIOPIC TONAL MARK YIZET..ETHIOPIC TONAL MARK KURT ++139A..139F ; disallowed # NA .. ++13A0..13F4 ; valid # 3.0 CHEROKEE LETTER A..CHEROKEE LETTER YV ++13F5 ; valid # 8.0 CHEROKEE LETTER MV ++13F6..13F7 ; disallowed # NA .. ++13F8 ; mapped ; 13F0 # 8.0 CHEROKEE SMALL LETTER YE ++13F9 ; mapped ; 13F1 # 8.0 CHEROKEE SMALL LETTER YI ++13FA ; mapped ; 13F2 # 8.0 CHEROKEE SMALL LETTER YO ++13FB ; mapped ; 13F3 # 8.0 CHEROKEE SMALL LETTER YU ++13FC ; mapped ; 13F4 # 8.0 CHEROKEE SMALL LETTER YV ++13FD ; mapped ; 13F5 # 8.0 CHEROKEE SMALL LETTER MV ++13FE..13FF ; disallowed # NA .. ++1400 ; valid ; ; NV8 # 5.2 CANADIAN SYLLABICS HYPHEN ++1401..166C ; valid # 3.0 CANADIAN SYLLABICS E..CANADIAN SYLLABICS CARRIER TTSA ++166D..166E ; valid ; ; NV8 # 3.0 CANADIAN SYLLABICS CHI SIGN..CANADIAN SYLLABICS FULL STOP ++166F..1676 ; valid # 3.0 CANADIAN SYLLABICS QAI..CANADIAN SYLLABICS NNGAA ++1677..167F ; valid # 5.2 CANADIAN SYLLABICS WOODS-CREE THWEE..CANADIAN SYLLABICS BLACKFOOT W ++1680 ; disallowed # 3.0 OGHAM SPACE MARK ++1681..169A ; valid # 3.0 OGHAM LETTER BEITH..OGHAM LETTER PEITH ++169B..169C ; valid ; ; NV8 # 3.0 OGHAM FEATHER MARK..OGHAM REVERSED FEATHER MARK ++169D..169F ; disallowed # NA .. ++16A0..16EA ; valid # 3.0 RUNIC LETTER FEHU FEOH FE F..RUNIC LETTER X ++16EB..16F0 ; valid ; ; NV8 # 3.0 RUNIC SINGLE PUNCTUATION..RUNIC BELGTHOR SYMBOL ++16F1..16F8 ; valid # 7.0 RUNIC LETTER K..RUNIC LETTER FRANKS CASKET AESC ++16F9..16FF ; disallowed # NA .. ++1700..170C ; valid # 3.2 TAGALOG LETTER A..TAGALOG LETTER YA ++170D ; disallowed # NA ++170E..1714 ; valid # 3.2 TAGALOG LETTER LA..TAGALOG SIGN VIRAMA ++1715..171F ; disallowed # NA .. ++1720..1734 ; valid # 3.2 HANUNOO LETTER A..HANUNOO SIGN PAMUDPOD ++1735..1736 ; valid ; ; NV8 # 3.2 PHILIPPINE SINGLE PUNCTUATION..PHILIPPINE DOUBLE PUNCTUATION ++1737..173F ; disallowed # NA .. ++1740..1753 ; valid # 3.2 BUHID LETTER A..BUHID VOWEL SIGN U ++1754..175F ; disallowed # NA .. ++1760..176C ; valid # 3.2 TAGBANWA LETTER A..TAGBANWA LETTER YA ++176D ; disallowed # NA ++176E..1770 ; valid # 3.2 TAGBANWA LETTER LA..TAGBANWA LETTER SA ++1771 ; disallowed # NA ++1772..1773 ; valid # 3.2 TAGBANWA VOWEL SIGN I..TAGBANWA VOWEL SIGN U ++1774..177F ; disallowed # NA .. ++1780..17B3 ; valid # 3.0 KHMER LETTER KA..KHMER INDEPENDENT VOWEL QAU ++17B4..17B5 ; disallowed # 3.0 KHMER VOWEL INHERENT AQ..KHMER VOWEL INHERENT AA ++17B6..17D3 ; valid # 3.0 KHMER VOWEL SIGN AA..KHMER SIGN BATHAMASAT ++17D4..17D6 ; valid ; ; NV8 # 3.0 KHMER SIGN KHAN..KHMER SIGN CAMNUC PII KUUH ++17D7 ; valid # 3.0 KHMER SIGN LEK TOO ++17D8..17DB ; valid ; ; NV8 # 3.0 KHMER SIGN BEYYAL..KHMER CURRENCY SYMBOL RIEL ++17DC ; valid # 3.0 KHMER SIGN AVAKRAHASANYA ++17DD ; valid # 4.0 KHMER SIGN ATTHACAN ++17DE..17DF ; disallowed # NA .. ++17E0..17E9 ; valid # 3.0 KHMER DIGIT ZERO..KHMER DIGIT NINE ++17EA..17EF ; disallowed # NA .. ++17F0..17F9 ; valid ; ; NV8 # 4.0 KHMER SYMBOL LEK ATTAK SON..KHMER SYMBOL LEK ATTAK PRAM-BUON ++17FA..17FF ; disallowed # NA .. ++1800..1805 ; valid ; ; NV8 # 3.0 MONGOLIAN BIRGA..MONGOLIAN FOUR DOTS ++1806 ; disallowed # 3.0 MONGOLIAN TODO SOFT HYPHEN ++1807..180A ; valid ; ; NV8 # 3.0 MONGOLIAN SIBE SYLLABLE BOUNDARY MARKER..MONGOLIAN NIRUGU ++180B..180D ; ignored # 3.0 MONGOLIAN FREE VARIATION SELECTOR ONE..MONGOLIAN FREE VARIATION SELECTOR THREE ++180E ; disallowed # 3.0 MONGOLIAN VOWEL SEPARATOR ++180F ; disallowed # NA ++1810..1819 ; valid # 3.0 MONGOLIAN DIGIT ZERO..MONGOLIAN DIGIT NINE ++181A..181F ; disallowed # NA .. ++1820..1877 ; valid # 3.0 MONGOLIAN LETTER A..MONGOLIAN LETTER MANCHU ZHA ++1878..187F ; disallowed # NA .. ++1880..18A9 ; valid # 3.0 MONGOLIAN LETTER ALI GALI ANUSVARA ONE..MONGOLIAN LETTER ALI GALI DAGALGA ++18AA ; valid # 5.1 MONGOLIAN LETTER MANCHU ALI GALI LHA ++18AB..18AF ; disallowed # NA .. ++18B0..18F5 ; valid # 5.2 CANADIAN SYLLABICS OY..CANADIAN SYLLABICS CARRIER DENTAL S ++18F6..18FF ; disallowed # NA .. ++1900..191C ; valid # 4.0 LIMBU VOWEL-CARRIER LETTER..LIMBU LETTER HA ++191D..191E ; valid # 7.0 LIMBU LETTER GYAN..LIMBU LETTER TRA ++191F ; disallowed # NA ++1920..192B ; valid # 4.0 LIMBU VOWEL SIGN A..LIMBU SUBJOINED LETTER WA ++192C..192F ; disallowed # NA .. ++1930..193B ; valid # 4.0 LIMBU SMALL LETTER KA..LIMBU SIGN SA-I ++193C..193F ; disallowed # NA .. ++1940 ; valid ; ; NV8 # 4.0 LIMBU SIGN LOO ++1941..1943 ; disallowed # NA .. ++1944..1945 ; valid ; ; NV8 # 4.0 LIMBU EXCLAMATION MARK..LIMBU QUESTION MARK ++1946..196D ; valid # 4.0 LIMBU DIGIT ZERO..TAI LE LETTER AI ++196E..196F ; disallowed # NA .. ++1970..1974 ; valid # 4.0 TAI LE LETTER TONE-2..TAI LE LETTER TONE-6 ++1975..197F ; disallowed # NA .. ++1980..19A9 ; valid # 4.1 NEW TAI LUE LETTER HIGH QA..NEW TAI LUE LETTER LOW XVA ++19AA..19AB ; valid # 5.2 NEW TAI LUE LETTER HIGH SUA..NEW TAI LUE LETTER LOW SUA ++19AC..19AF ; disallowed # NA .. ++19B0..19C9 ; valid # 4.1 NEW TAI LUE VOWEL SIGN VOWEL SHORTENER..NEW TAI LUE TONE MARK-2 ++19CA..19CF ; disallowed # NA .. ++19D0..19D9 ; valid # 4.1 NEW TAI LUE DIGIT ZERO..NEW TAI LUE DIGIT NINE ++19DA ; valid ; ; XV8 # 5.2 NEW TAI LUE THAM DIGIT ONE ++19DB..19DD ; disallowed # NA .. ++19DE..19DF ; valid ; ; NV8 # 4.1 NEW TAI LUE SIGN LAE..NEW TAI LUE SIGN LAEV ++19E0..19FF ; valid ; ; NV8 # 4.0 KHMER SYMBOL PATHAMASAT..KHMER SYMBOL DAP-PRAM ROC ++1A00..1A1B ; valid # 4.1 BUGINESE LETTER KA..BUGINESE VOWEL SIGN AE ++1A1C..1A1D ; disallowed # NA .. ++1A1E..1A1F ; valid ; ; NV8 # 4.1 BUGINESE PALLAWA..BUGINESE END OF SECTION ++1A20..1A5E ; valid # 5.2 TAI THAM LETTER HIGH KA..TAI THAM CONSONANT SIGN SA ++1A5F ; disallowed # NA ++1A60..1A7C ; valid # 5.2 TAI THAM SIGN SAKOT..TAI THAM SIGN KHUEN-LUE KARAN ++1A7D..1A7E ; disallowed # NA .. ++1A7F..1A89 ; valid # 5.2 TAI THAM COMBINING CRYPTOGRAMMIC DOT..TAI THAM HORA DIGIT NINE ++1A8A..1A8F ; disallowed # NA .. ++1A90..1A99 ; valid # 5.2 TAI THAM THAM DIGIT ZERO..TAI THAM THAM DIGIT NINE ++1A9A..1A9F ; disallowed # NA .. ++1AA0..1AA6 ; valid ; ; NV8 # 5.2 TAI THAM SIGN WIANG..TAI THAM SIGN REVERSED ROTATED RANA ++1AA7 ; valid # 5.2 TAI THAM SIGN MAI YAMOK ++1AA8..1AAD ; valid ; ; NV8 # 5.2 TAI THAM SIGN KAAN..TAI THAM SIGN CAANG ++1AAE..1AAF ; disallowed # NA .. ++1AB0..1ABD ; valid # 7.0 COMBINING DOUBLED CIRCUMFLEX ACCENT..COMBINING PARENTHESES BELOW ++1ABE ; valid ; ; NV8 # 7.0 COMBINING PARENTHESES OVERLAY ++1ABF..1AFF ; disallowed # NA .. ++1B00..1B4B ; valid # 5.0 BALINESE SIGN ULU RICEM..BALINESE LETTER ASYURA SASAK ++1B4C..1B4F ; disallowed # NA .. ++1B50..1B59 ; valid # 5.0 BALINESE DIGIT ZERO..BALINESE DIGIT NINE ++1B5A..1B6A ; valid ; ; NV8 # 5.0 BALINESE PANTI..BALINESE MUSICAL SYMBOL DANG GEDE ++1B6B..1B73 ; valid # 5.0 BALINESE MUSICAL SYMBOL COMBINING TEGEH..BALINESE MUSICAL SYMBOL COMBINING GONG ++1B74..1B7C ; valid ; ; NV8 # 5.0 BALINESE MUSICAL SYMBOL RIGHT-HAND OPEN DUG..BALINESE MUSICAL SYMBOL LEFT-HAND OPEN PING ++1B7D..1B7F ; disallowed # NA .. ++1B80..1BAA ; valid # 5.1 SUNDANESE SIGN PANYECEK..SUNDANESE SIGN PAMAAEH ++1BAB..1BAD ; valid # 6.1 SUNDANESE SIGN VIRAMA..SUNDANESE CONSONANT SIGN PASANGAN WA ++1BAE..1BB9 ; valid # 5.1 SUNDANESE LETTER KHA..SUNDANESE DIGIT NINE ++1BBA..1BBF ; valid # 6.1 SUNDANESE AVAGRAHA..SUNDANESE LETTER FINAL M ++1BC0..1BF3 ; valid # 6.0 BATAK LETTER A..BATAK PANONGONAN ++1BF4..1BFB ; disallowed # NA .. ++1BFC..1BFF ; valid ; ; NV8 # 6.0 BATAK SYMBOL BINDU NA METEK..BATAK SYMBOL BINDU PANGOLAT ++1C00..1C37 ; valid # 5.1 LEPCHA LETTER KA..LEPCHA SIGN NUKTA ++1C38..1C3A ; disallowed # NA .. ++1C3B..1C3F ; valid ; ; NV8 # 5.1 LEPCHA PUNCTUATION TA-ROL..LEPCHA PUNCTUATION TSHOOK ++1C40..1C49 ; valid # 5.1 LEPCHA DIGIT ZERO..LEPCHA DIGIT NINE ++1C4A..1C4C ; disallowed # NA .. ++1C4D..1C7D ; valid # 5.1 LEPCHA LETTER TTA..OL CHIKI AHAD ++1C7E..1C7F ; valid ; ; NV8 # 5.1 OL CHIKI PUNCTUATION MUCAAD..OL CHIKI PUNCTUATION DOUBLE MUCAAD ++1C80 ; mapped ; 0432 # 9.0 CYRILLIC SMALL LETTER ROUNDED VE ++1C81 ; mapped ; 0434 # 9.0 CYRILLIC SMALL LETTER LONG-LEGGED DE ++1C82 ; mapped ; 043E # 9.0 CYRILLIC SMALL LETTER NARROW O ++1C83 ; mapped ; 0441 # 9.0 CYRILLIC SMALL LETTER WIDE ES ++1C84..1C85 ; mapped ; 0442 # 9.0 CYRILLIC SMALL LETTER TALL TE..CYRILLIC SMALL LETTER THREE-LEGGED TE ++1C86 ; mapped ; 044A # 9.0 CYRILLIC SMALL LETTER TALL HARD SIGN ++1C87 ; mapped ; 0463 # 9.0 CYRILLIC SMALL LETTER TALL YAT ++1C88 ; mapped ; A64B # 9.0 CYRILLIC SMALL LETTER UNBLENDED UK ++1C89..1CBF ; disallowed # NA .. ++1CC0..1CC7 ; valid ; ; NV8 # 6.1 SUNDANESE PUNCTUATION BINDU SURYA..SUNDANESE PUNCTUATION BINDU BA SATANGA ++1CC8..1CCF ; disallowed # NA .. ++1CD0..1CD2 ; valid # 5.2 VEDIC TONE KARSHANA..VEDIC TONE PRENKHA ++1CD3 ; valid ; ; NV8 # 5.2 VEDIC SIGN NIHSHVASA ++1CD4..1CF2 ; valid # 5.2 VEDIC SIGN YAJURVEDIC MIDLINE SVARITA..VEDIC SIGN ARDHAVISARGA ++1CF3..1CF6 ; valid # 6.1 VEDIC SIGN ROTATED ARDHAVISARGA..VEDIC SIGN UPADHMANIYA ++1CF7 ; valid # 10.0 VEDIC SIGN ATIKRAMA ++1CF8..1CF9 ; valid # 7.0 VEDIC TONE RING ABOVE..VEDIC TONE DOUBLE RING ABOVE ++1CFA..1CFF ; disallowed # NA .. ++1D00..1D2B ; valid # 4.0 LATIN LETTER SMALL CAPITAL A..CYRILLIC LETTER SMALL CAPITAL EL ++1D2C ; mapped ; 0061 # 4.0 MODIFIER LETTER CAPITAL A ++1D2D ; mapped ; 00E6 # 4.0 MODIFIER LETTER CAPITAL AE ++1D2E ; mapped ; 0062 # 4.0 MODIFIER LETTER CAPITAL B ++1D2F ; valid # 4.0 MODIFIER LETTER CAPITAL BARRED B ++1D30 ; mapped ; 0064 # 4.0 MODIFIER LETTER CAPITAL D ++1D31 ; mapped ; 0065 # 4.0 MODIFIER LETTER CAPITAL E ++1D32 ; mapped ; 01DD # 4.0 MODIFIER LETTER CAPITAL REVERSED E ++1D33 ; mapped ; 0067 # 4.0 MODIFIER LETTER CAPITAL G ++1D34 ; mapped ; 0068 # 4.0 MODIFIER LETTER CAPITAL H ++1D35 ; mapped ; 0069 # 4.0 MODIFIER LETTER CAPITAL I ++1D36 ; mapped ; 006A # 4.0 MODIFIER LETTER CAPITAL J ++1D37 ; mapped ; 006B # 4.0 MODIFIER LETTER CAPITAL K ++1D38 ; mapped ; 006C # 4.0 MODIFIER LETTER CAPITAL L ++1D39 ; mapped ; 006D # 4.0 MODIFIER LETTER CAPITAL M ++1D3A ; mapped ; 006E # 4.0 MODIFIER LETTER CAPITAL N ++1D3B ; valid # 4.0 MODIFIER LETTER CAPITAL REVERSED N ++1D3C ; mapped ; 006F # 4.0 MODIFIER LETTER CAPITAL O ++1D3D ; mapped ; 0223 # 4.0 MODIFIER LETTER CAPITAL OU ++1D3E ; mapped ; 0070 # 4.0 MODIFIER LETTER CAPITAL P ++1D3F ; mapped ; 0072 # 4.0 MODIFIER LETTER CAPITAL R ++1D40 ; mapped ; 0074 # 4.0 MODIFIER LETTER CAPITAL T ++1D41 ; mapped ; 0075 # 4.0 MODIFIER LETTER CAPITAL U ++1D42 ; mapped ; 0077 # 4.0 MODIFIER LETTER CAPITAL W ++1D43 ; mapped ; 0061 # 4.0 MODIFIER LETTER SMALL A ++1D44 ; mapped ; 0250 # 4.0 MODIFIER LETTER SMALL TURNED A ++1D45 ; mapped ; 0251 # 4.0 MODIFIER LETTER SMALL ALPHA ++1D46 ; mapped ; 1D02 # 4.0 MODIFIER LETTER SMALL TURNED AE ++1D47 ; mapped ; 0062 # 4.0 MODIFIER LETTER SMALL B ++1D48 ; mapped ; 0064 # 4.0 MODIFIER LETTER SMALL D ++1D49 ; mapped ; 0065 # 4.0 MODIFIER LETTER SMALL E ++1D4A ; mapped ; 0259 # 4.0 MODIFIER LETTER SMALL SCHWA ++1D4B ; mapped ; 025B # 4.0 MODIFIER LETTER SMALL OPEN E ++1D4C ; mapped ; 025C # 4.0 MODIFIER LETTER SMALL TURNED OPEN E ++1D4D ; mapped ; 0067 # 4.0 MODIFIER LETTER SMALL G ++1D4E ; valid # 4.0 MODIFIER LETTER SMALL TURNED I ++1D4F ; mapped ; 006B # 4.0 MODIFIER LETTER SMALL K ++1D50 ; mapped ; 006D # 4.0 MODIFIER LETTER SMALL M ++1D51 ; mapped ; 014B # 4.0 MODIFIER LETTER SMALL ENG ++1D52 ; mapped ; 006F # 4.0 MODIFIER LETTER SMALL O ++1D53 ; mapped ; 0254 # 4.0 MODIFIER LETTER SMALL OPEN O ++1D54 ; mapped ; 1D16 # 4.0 MODIFIER LETTER SMALL TOP HALF O ++1D55 ; mapped ; 1D17 # 4.0 MODIFIER LETTER SMALL BOTTOM HALF O ++1D56 ; mapped ; 0070 # 4.0 MODIFIER LETTER SMALL P ++1D57 ; mapped ; 0074 # 4.0 MODIFIER LETTER SMALL T ++1D58 ; mapped ; 0075 # 4.0 MODIFIER LETTER SMALL U ++1D59 ; mapped ; 1D1D # 4.0 MODIFIER LETTER SMALL SIDEWAYS U ++1D5A ; mapped ; 026F # 4.0 MODIFIER LETTER SMALL TURNED M ++1D5B ; mapped ; 0076 # 4.0 MODIFIER LETTER SMALL V ++1D5C ; mapped ; 1D25 # 4.0 MODIFIER LETTER SMALL AIN ++1D5D ; mapped ; 03B2 # 4.0 MODIFIER LETTER SMALL BETA ++1D5E ; mapped ; 03B3 # 4.0 MODIFIER LETTER SMALL GREEK GAMMA ++1D5F ; mapped ; 03B4 # 4.0 MODIFIER LETTER SMALL DELTA ++1D60 ; mapped ; 03C6 # 4.0 MODIFIER LETTER SMALL GREEK PHI ++1D61 ; mapped ; 03C7 # 4.0 MODIFIER LETTER SMALL CHI ++1D62 ; mapped ; 0069 # 4.0 LATIN SUBSCRIPT SMALL LETTER I ++1D63 ; mapped ; 0072 # 4.0 LATIN SUBSCRIPT SMALL LETTER R ++1D64 ; mapped ; 0075 # 4.0 LATIN SUBSCRIPT SMALL LETTER U ++1D65 ; mapped ; 0076 # 4.0 LATIN SUBSCRIPT SMALL LETTER V ++1D66 ; mapped ; 03B2 # 4.0 GREEK SUBSCRIPT SMALL LETTER BETA ++1D67 ; mapped ; 03B3 # 4.0 GREEK SUBSCRIPT SMALL LETTER GAMMA ++1D68 ; mapped ; 03C1 # 4.0 GREEK SUBSCRIPT SMALL LETTER RHO ++1D69 ; mapped ; 03C6 # 4.0 GREEK SUBSCRIPT SMALL LETTER PHI ++1D6A ; mapped ; 03C7 # 4.0 GREEK SUBSCRIPT SMALL LETTER CHI ++1D6B ; valid # 4.0 LATIN SMALL LETTER UE ++1D6C..1D77 ; valid # 4.1 LATIN SMALL LETTER B WITH MIDDLE TILDE..LATIN SMALL LETTER TURNED G ++1D78 ; mapped ; 043D # 4.1 MODIFIER LETTER CYRILLIC EN ++1D79..1D9A ; valid # 4.1 LATIN SMALL LETTER INSULAR G..LATIN SMALL LETTER EZH WITH RETROFLEX HOOK ++1D9B ; mapped ; 0252 # 4.1 MODIFIER LETTER SMALL TURNED ALPHA ++1D9C ; mapped ; 0063 # 4.1 MODIFIER LETTER SMALL C ++1D9D ; mapped ; 0255 # 4.1 MODIFIER LETTER SMALL C WITH CURL ++1D9E ; mapped ; 00F0 # 4.1 MODIFIER LETTER SMALL ETH ++1D9F ; mapped ; 025C # 4.1 MODIFIER LETTER SMALL REVERSED OPEN E ++1DA0 ; mapped ; 0066 # 4.1 MODIFIER LETTER SMALL F ++1DA1 ; mapped ; 025F # 4.1 MODIFIER LETTER SMALL DOTLESS J WITH STROKE ++1DA2 ; mapped ; 0261 # 4.1 MODIFIER LETTER SMALL SCRIPT G ++1DA3 ; mapped ; 0265 # 4.1 MODIFIER LETTER SMALL TURNED H ++1DA4 ; mapped ; 0268 # 4.1 MODIFIER LETTER SMALL I WITH STROKE ++1DA5 ; mapped ; 0269 # 4.1 MODIFIER LETTER SMALL IOTA ++1DA6 ; mapped ; 026A # 4.1 MODIFIER LETTER SMALL CAPITAL I ++1DA7 ; mapped ; 1D7B # 4.1 MODIFIER LETTER SMALL CAPITAL I WITH STROKE ++1DA8 ; mapped ; 029D # 4.1 MODIFIER LETTER SMALL J WITH CROSSED-TAIL ++1DA9 ; mapped ; 026D # 4.1 MODIFIER LETTER SMALL L WITH RETROFLEX HOOK ++1DAA ; mapped ; 1D85 # 4.1 MODIFIER LETTER SMALL L WITH PALATAL HOOK ++1DAB ; mapped ; 029F # 4.1 MODIFIER LETTER SMALL CAPITAL L ++1DAC ; mapped ; 0271 # 4.1 MODIFIER LETTER SMALL M WITH HOOK ++1DAD ; mapped ; 0270 # 4.1 MODIFIER LETTER SMALL TURNED M WITH LONG LEG ++1DAE ; mapped ; 0272 # 4.1 MODIFIER LETTER SMALL N WITH LEFT HOOK ++1DAF ; mapped ; 0273 # 4.1 MODIFIER LETTER SMALL N WITH RETROFLEX HOOK ++1DB0 ; mapped ; 0274 # 4.1 MODIFIER LETTER SMALL CAPITAL N ++1DB1 ; mapped ; 0275 # 4.1 MODIFIER LETTER SMALL BARRED O ++1DB2 ; mapped ; 0278 # 4.1 MODIFIER LETTER SMALL PHI ++1DB3 ; mapped ; 0282 # 4.1 MODIFIER LETTER SMALL S WITH HOOK ++1DB4 ; mapped ; 0283 # 4.1 MODIFIER LETTER SMALL ESH ++1DB5 ; mapped ; 01AB # 4.1 MODIFIER LETTER SMALL T WITH PALATAL HOOK ++1DB6 ; mapped ; 0289 # 4.1 MODIFIER LETTER SMALL U BAR ++1DB7 ; mapped ; 028A # 4.1 MODIFIER LETTER SMALL UPSILON ++1DB8 ; mapped ; 1D1C # 4.1 MODIFIER LETTER SMALL CAPITAL U ++1DB9 ; mapped ; 028B # 4.1 MODIFIER LETTER SMALL V WITH HOOK ++1DBA ; mapped ; 028C # 4.1 MODIFIER LETTER SMALL TURNED V ++1DBB ; mapped ; 007A # 4.1 MODIFIER LETTER SMALL Z ++1DBC ; mapped ; 0290 # 4.1 MODIFIER LETTER SMALL Z WITH RETROFLEX HOOK ++1DBD ; mapped ; 0291 # 4.1 MODIFIER LETTER SMALL Z WITH CURL ++1DBE ; mapped ; 0292 # 4.1 MODIFIER LETTER SMALL EZH ++1DBF ; mapped ; 03B8 # 4.1 MODIFIER LETTER SMALL THETA ++1DC0..1DC3 ; valid # 4.1 COMBINING DOTTED GRAVE ACCENT..COMBINING SUSPENSION MARK ++1DC4..1DCA ; valid # 5.0 COMBINING MACRON-ACUTE..COMBINING LATIN SMALL LETTER R BELOW ++1DCB..1DE6 ; valid # 5.1 COMBINING BREVE-MACRON..COMBINING LATIN SMALL LETTER Z ++1DE7..1DF5 ; valid # 7.0 COMBINING LATIN SMALL LETTER ALPHA..COMBINING UP TACK ABOVE ++1DF6..1DF9 ; valid # 10.0 COMBINING KAVYKA ABOVE RIGHT..COMBINING WIDE INVERTED BRIDGE BELOW ++1DFA ; disallowed # NA ++1DFB ; valid # 9.0 COMBINING DELETION MARK ++1DFC ; valid # 6.0 COMBINING DOUBLE INVERTED BREVE BELOW ++1DFD ; valid # 5.2 COMBINING ALMOST EQUAL TO BELOW ++1DFE..1DFF ; valid # 5.0 COMBINING LEFT ARROWHEAD ABOVE..COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW ++1E00 ; mapped ; 1E01 # 1.1 LATIN CAPITAL LETTER A WITH RING BELOW ++1E01 ; valid # 1.1 LATIN SMALL LETTER A WITH RING BELOW ++1E02 ; mapped ; 1E03 # 1.1 LATIN CAPITAL LETTER B WITH DOT ABOVE ++1E03 ; valid # 1.1 LATIN SMALL LETTER B WITH DOT ABOVE ++1E04 ; mapped ; 1E05 # 1.1 LATIN CAPITAL LETTER B WITH DOT BELOW ++1E05 ; valid # 1.1 LATIN SMALL LETTER B WITH DOT BELOW ++1E06 ; mapped ; 1E07 # 1.1 LATIN CAPITAL LETTER B WITH LINE BELOW ++1E07 ; valid # 1.1 LATIN SMALL LETTER B WITH LINE BELOW ++1E08 ; mapped ; 1E09 # 1.1 LATIN CAPITAL LETTER C WITH CEDILLA AND ACUTE ++1E09 ; valid # 1.1 LATIN SMALL LETTER C WITH CEDILLA AND ACUTE ++1E0A ; mapped ; 1E0B # 1.1 LATIN CAPITAL LETTER D WITH DOT ABOVE ++1E0B ; valid # 1.1 LATIN SMALL LETTER D WITH DOT ABOVE ++1E0C ; mapped ; 1E0D # 1.1 LATIN CAPITAL LETTER D WITH DOT BELOW ++1E0D ; valid # 1.1 LATIN SMALL LETTER D WITH DOT BELOW ++1E0E ; mapped ; 1E0F # 1.1 LATIN CAPITAL LETTER D WITH LINE BELOW ++1E0F ; valid # 1.1 LATIN SMALL LETTER D WITH LINE BELOW ++1E10 ; mapped ; 1E11 # 1.1 LATIN CAPITAL LETTER D WITH CEDILLA ++1E11 ; valid # 1.1 LATIN SMALL LETTER D WITH CEDILLA ++1E12 ; mapped ; 1E13 # 1.1 LATIN CAPITAL LETTER D WITH CIRCUMFLEX BELOW ++1E13 ; valid # 1.1 LATIN SMALL LETTER D WITH CIRCUMFLEX BELOW ++1E14 ; mapped ; 1E15 # 1.1 LATIN CAPITAL LETTER E WITH MACRON AND GRAVE ++1E15 ; valid # 1.1 LATIN SMALL LETTER E WITH MACRON AND GRAVE ++1E16 ; mapped ; 1E17 # 1.1 LATIN CAPITAL LETTER E WITH MACRON AND ACUTE ++1E17 ; valid # 1.1 LATIN SMALL LETTER E WITH MACRON AND ACUTE ++1E18 ; mapped ; 1E19 # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX BELOW ++1E19 ; valid # 1.1 LATIN SMALL LETTER E WITH CIRCUMFLEX BELOW ++1E1A ; mapped ; 1E1B # 1.1 LATIN CAPITAL LETTER E WITH TILDE BELOW ++1E1B ; valid # 1.1 LATIN SMALL LETTER E WITH TILDE BELOW ++1E1C ; mapped ; 1E1D # 1.1 LATIN CAPITAL LETTER E WITH CEDILLA AND BREVE ++1E1D ; valid # 1.1 LATIN SMALL LETTER E WITH CEDILLA AND BREVE ++1E1E ; mapped ; 1E1F # 1.1 LATIN CAPITAL LETTER F WITH DOT ABOVE ++1E1F ; valid # 1.1 LATIN SMALL LETTER F WITH DOT ABOVE ++1E20 ; mapped ; 1E21 # 1.1 LATIN CAPITAL LETTER G WITH MACRON ++1E21 ; valid # 1.1 LATIN SMALL LETTER G WITH MACRON ++1E22 ; mapped ; 1E23 # 1.1 LATIN CAPITAL LETTER H WITH DOT ABOVE ++1E23 ; valid # 1.1 LATIN SMALL LETTER H WITH DOT ABOVE ++1E24 ; mapped ; 1E25 # 1.1 LATIN CAPITAL LETTER H WITH DOT BELOW ++1E25 ; valid # 1.1 LATIN SMALL LETTER H WITH DOT BELOW ++1E26 ; mapped ; 1E27 # 1.1 LATIN CAPITAL LETTER H WITH DIAERESIS ++1E27 ; valid # 1.1 LATIN SMALL LETTER H WITH DIAERESIS ++1E28 ; mapped ; 1E29 # 1.1 LATIN CAPITAL LETTER H WITH CEDILLA ++1E29 ; valid # 1.1 LATIN SMALL LETTER H WITH CEDILLA ++1E2A ; mapped ; 1E2B # 1.1 LATIN CAPITAL LETTER H WITH BREVE BELOW ++1E2B ; valid # 1.1 LATIN SMALL LETTER H WITH BREVE BELOW ++1E2C ; mapped ; 1E2D # 1.1 LATIN CAPITAL LETTER I WITH TILDE BELOW ++1E2D ; valid # 1.1 LATIN SMALL LETTER I WITH TILDE BELOW ++1E2E ; mapped ; 1E2F # 1.1 LATIN CAPITAL LETTER I WITH DIAERESIS AND ACUTE ++1E2F ; valid # 1.1 LATIN SMALL LETTER I WITH DIAERESIS AND ACUTE ++1E30 ; mapped ; 1E31 # 1.1 LATIN CAPITAL LETTER K WITH ACUTE ++1E31 ; valid # 1.1 LATIN SMALL LETTER K WITH ACUTE ++1E32 ; mapped ; 1E33 # 1.1 LATIN CAPITAL LETTER K WITH DOT BELOW ++1E33 ; valid # 1.1 LATIN SMALL LETTER K WITH DOT BELOW ++1E34 ; mapped ; 1E35 # 1.1 LATIN CAPITAL LETTER K WITH LINE BELOW ++1E35 ; valid # 1.1 LATIN SMALL LETTER K WITH LINE BELOW ++1E36 ; mapped ; 1E37 # 1.1 LATIN CAPITAL LETTER L WITH DOT BELOW ++1E37 ; valid # 1.1 LATIN SMALL LETTER L WITH DOT BELOW ++1E38 ; mapped ; 1E39 # 1.1 LATIN CAPITAL LETTER L WITH DOT BELOW AND MACRON ++1E39 ; valid # 1.1 LATIN SMALL LETTER L WITH DOT BELOW AND MACRON ++1E3A ; mapped ; 1E3B # 1.1 LATIN CAPITAL LETTER L WITH LINE BELOW ++1E3B ; valid # 1.1 LATIN SMALL LETTER L WITH LINE BELOW ++1E3C ; mapped ; 1E3D # 1.1 LATIN CAPITAL LETTER L WITH CIRCUMFLEX BELOW ++1E3D ; valid # 1.1 LATIN SMALL LETTER L WITH CIRCUMFLEX BELOW ++1E3E ; mapped ; 1E3F # 1.1 LATIN CAPITAL LETTER M WITH ACUTE ++1E3F ; valid # 1.1 LATIN SMALL LETTER M WITH ACUTE ++1E40 ; mapped ; 1E41 # 1.1 LATIN CAPITAL LETTER M WITH DOT ABOVE ++1E41 ; valid # 1.1 LATIN SMALL LETTER M WITH DOT ABOVE ++1E42 ; mapped ; 1E43 # 1.1 LATIN CAPITAL LETTER M WITH DOT BELOW ++1E43 ; valid # 1.1 LATIN SMALL LETTER M WITH DOT BELOW ++1E44 ; mapped ; 1E45 # 1.1 LATIN CAPITAL LETTER N WITH DOT ABOVE ++1E45 ; valid # 1.1 LATIN SMALL LETTER N WITH DOT ABOVE ++1E46 ; mapped ; 1E47 # 1.1 LATIN CAPITAL LETTER N WITH DOT BELOW ++1E47 ; valid # 1.1 LATIN SMALL LETTER N WITH DOT BELOW ++1E48 ; mapped ; 1E49 # 1.1 LATIN CAPITAL LETTER N WITH LINE BELOW ++1E49 ; valid # 1.1 LATIN SMALL LETTER N WITH LINE BELOW ++1E4A ; mapped ; 1E4B # 1.1 LATIN CAPITAL LETTER N WITH CIRCUMFLEX BELOW ++1E4B ; valid # 1.1 LATIN SMALL LETTER N WITH CIRCUMFLEX BELOW ++1E4C ; mapped ; 1E4D # 1.1 LATIN CAPITAL LETTER O WITH TILDE AND ACUTE ++1E4D ; valid # 1.1 LATIN SMALL LETTER O WITH TILDE AND ACUTE ++1E4E ; mapped ; 1E4F # 1.1 LATIN CAPITAL LETTER O WITH TILDE AND DIAERESIS ++1E4F ; valid # 1.1 LATIN SMALL LETTER O WITH TILDE AND DIAERESIS ++1E50 ; mapped ; 1E51 # 1.1 LATIN CAPITAL LETTER O WITH MACRON AND GRAVE ++1E51 ; valid # 1.1 LATIN SMALL LETTER O WITH MACRON AND GRAVE ++1E52 ; mapped ; 1E53 # 1.1 LATIN CAPITAL LETTER O WITH MACRON AND ACUTE ++1E53 ; valid # 1.1 LATIN SMALL LETTER O WITH MACRON AND ACUTE ++1E54 ; mapped ; 1E55 # 1.1 LATIN CAPITAL LETTER P WITH ACUTE ++1E55 ; valid # 1.1 LATIN SMALL LETTER P WITH ACUTE ++1E56 ; mapped ; 1E57 # 1.1 LATIN CAPITAL LETTER P WITH DOT ABOVE ++1E57 ; valid # 1.1 LATIN SMALL LETTER P WITH DOT ABOVE ++1E58 ; mapped ; 1E59 # 1.1 LATIN CAPITAL LETTER R WITH DOT ABOVE ++1E59 ; valid # 1.1 LATIN SMALL LETTER R WITH DOT ABOVE ++1E5A ; mapped ; 1E5B # 1.1 LATIN CAPITAL LETTER R WITH DOT BELOW ++1E5B ; valid # 1.1 LATIN SMALL LETTER R WITH DOT BELOW ++1E5C ; mapped ; 1E5D # 1.1 LATIN CAPITAL LETTER R WITH DOT BELOW AND MACRON ++1E5D ; valid # 1.1 LATIN SMALL LETTER R WITH DOT BELOW AND MACRON ++1E5E ; mapped ; 1E5F # 1.1 LATIN CAPITAL LETTER R WITH LINE BELOW ++1E5F ; valid # 1.1 LATIN SMALL LETTER R WITH LINE BELOW ++1E60 ; mapped ; 1E61 # 1.1 LATIN CAPITAL LETTER S WITH DOT ABOVE ++1E61 ; valid # 1.1 LATIN SMALL LETTER S WITH DOT ABOVE ++1E62 ; mapped ; 1E63 # 1.1 LATIN CAPITAL LETTER S WITH DOT BELOW ++1E63 ; valid # 1.1 LATIN SMALL LETTER S WITH DOT BELOW ++1E64 ; mapped ; 1E65 # 1.1 LATIN CAPITAL LETTER S WITH ACUTE AND DOT ABOVE ++1E65 ; valid # 1.1 LATIN SMALL LETTER S WITH ACUTE AND DOT ABOVE ++1E66 ; mapped ; 1E67 # 1.1 LATIN CAPITAL LETTER S WITH CARON AND DOT ABOVE ++1E67 ; valid # 1.1 LATIN SMALL LETTER S WITH CARON AND DOT ABOVE ++1E68 ; mapped ; 1E69 # 1.1 LATIN CAPITAL LETTER S WITH DOT BELOW AND DOT ABOVE ++1E69 ; valid # 1.1 LATIN SMALL LETTER S WITH DOT BELOW AND DOT ABOVE ++1E6A ; mapped ; 1E6B # 1.1 LATIN CAPITAL LETTER T WITH DOT ABOVE ++1E6B ; valid # 1.1 LATIN SMALL LETTER T WITH DOT ABOVE ++1E6C ; mapped ; 1E6D # 1.1 LATIN CAPITAL LETTER T WITH DOT BELOW ++1E6D ; valid # 1.1 LATIN SMALL LETTER T WITH DOT BELOW ++1E6E ; mapped ; 1E6F # 1.1 LATIN CAPITAL LETTER T WITH LINE BELOW ++1E6F ; valid # 1.1 LATIN SMALL LETTER T WITH LINE BELOW ++1E70 ; mapped ; 1E71 # 1.1 LATIN CAPITAL LETTER T WITH CIRCUMFLEX BELOW ++1E71 ; valid # 1.1 LATIN SMALL LETTER T WITH CIRCUMFLEX BELOW ++1E72 ; mapped ; 1E73 # 1.1 LATIN CAPITAL LETTER U WITH DIAERESIS BELOW ++1E73 ; valid # 1.1 LATIN SMALL LETTER U WITH DIAERESIS BELOW ++1E74 ; mapped ; 1E75 # 1.1 LATIN CAPITAL LETTER U WITH TILDE BELOW ++1E75 ; valid # 1.1 LATIN SMALL LETTER U WITH TILDE BELOW ++1E76 ; mapped ; 1E77 # 1.1 LATIN CAPITAL LETTER U WITH CIRCUMFLEX BELOW ++1E77 ; valid # 1.1 LATIN SMALL LETTER U WITH CIRCUMFLEX BELOW ++1E78 ; mapped ; 1E79 # 1.1 LATIN CAPITAL LETTER U WITH TILDE AND ACUTE ++1E79 ; valid # 1.1 LATIN SMALL LETTER U WITH TILDE AND ACUTE ++1E7A ; mapped ; 1E7B # 1.1 LATIN CAPITAL LETTER U WITH MACRON AND DIAERESIS ++1E7B ; valid # 1.1 LATIN SMALL LETTER U WITH MACRON AND DIAERESIS ++1E7C ; mapped ; 1E7D # 1.1 LATIN CAPITAL LETTER V WITH TILDE ++1E7D ; valid # 1.1 LATIN SMALL LETTER V WITH TILDE ++1E7E ; mapped ; 1E7F # 1.1 LATIN CAPITAL LETTER V WITH DOT BELOW ++1E7F ; valid # 1.1 LATIN SMALL LETTER V WITH DOT BELOW ++1E80 ; mapped ; 1E81 # 1.1 LATIN CAPITAL LETTER W WITH GRAVE ++1E81 ; valid # 1.1 LATIN SMALL LETTER W WITH GRAVE ++1E82 ; mapped ; 1E83 # 1.1 LATIN CAPITAL LETTER W WITH ACUTE ++1E83 ; valid # 1.1 LATIN SMALL LETTER W WITH ACUTE ++1E84 ; mapped ; 1E85 # 1.1 LATIN CAPITAL LETTER W WITH DIAERESIS ++1E85 ; valid # 1.1 LATIN SMALL LETTER W WITH DIAERESIS ++1E86 ; mapped ; 1E87 # 1.1 LATIN CAPITAL LETTER W WITH DOT ABOVE ++1E87 ; valid # 1.1 LATIN SMALL LETTER W WITH DOT ABOVE ++1E88 ; mapped ; 1E89 # 1.1 LATIN CAPITAL LETTER W WITH DOT BELOW ++1E89 ; valid # 1.1 LATIN SMALL LETTER W WITH DOT BELOW ++1E8A ; mapped ; 1E8B # 1.1 LATIN CAPITAL LETTER X WITH DOT ABOVE ++1E8B ; valid # 1.1 LATIN SMALL LETTER X WITH DOT ABOVE ++1E8C ; mapped ; 1E8D # 1.1 LATIN CAPITAL LETTER X WITH DIAERESIS ++1E8D ; valid # 1.1 LATIN SMALL LETTER X WITH DIAERESIS ++1E8E ; mapped ; 1E8F # 1.1 LATIN CAPITAL LETTER Y WITH DOT ABOVE ++1E8F ; valid # 1.1 LATIN SMALL LETTER Y WITH DOT ABOVE ++1E90 ; mapped ; 1E91 # 1.1 LATIN CAPITAL LETTER Z WITH CIRCUMFLEX ++1E91 ; valid # 1.1 LATIN SMALL LETTER Z WITH CIRCUMFLEX ++1E92 ; mapped ; 1E93 # 1.1 LATIN CAPITAL LETTER Z WITH DOT BELOW ++1E93 ; valid # 1.1 LATIN SMALL LETTER Z WITH DOT BELOW ++1E94 ; mapped ; 1E95 # 1.1 LATIN CAPITAL LETTER Z WITH LINE BELOW ++1E95..1E99 ; valid # 1.1 LATIN SMALL LETTER Z WITH LINE BELOW..LATIN SMALL LETTER Y WITH RING ABOVE ++1E9A ; mapped ; 0061 02BE # 1.1 LATIN SMALL LETTER A WITH RIGHT HALF RING ++1E9B ; mapped ; 1E61 # 2.0 LATIN SMALL LETTER LONG S WITH DOT ABOVE ++1E9C..1E9D ; valid # 5.1 LATIN SMALL LETTER LONG S WITH DIAGONAL STROKE..LATIN SMALL LETTER LONG S WITH HIGH STROKE ++1E9E ; mapped ; 0073 0073 # 5.1 LATIN CAPITAL LETTER SHARP S ++1E9F ; valid # 5.1 LATIN SMALL LETTER DELTA ++1EA0 ; mapped ; 1EA1 # 1.1 LATIN CAPITAL LETTER A WITH DOT BELOW ++1EA1 ; valid # 1.1 LATIN SMALL LETTER A WITH DOT BELOW ++1EA2 ; mapped ; 1EA3 # 1.1 LATIN CAPITAL LETTER A WITH HOOK ABOVE ++1EA3 ; valid # 1.1 LATIN SMALL LETTER A WITH HOOK ABOVE ++1EA4 ; mapped ; 1EA5 # 1.1 LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND ACUTE ++1EA5 ; valid # 1.1 LATIN SMALL LETTER A WITH CIRCUMFLEX AND ACUTE ++1EA6 ; mapped ; 1EA7 # 1.1 LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND GRAVE ++1EA7 ; valid # 1.1 LATIN SMALL LETTER A WITH CIRCUMFLEX AND GRAVE ++1EA8 ; mapped ; 1EA9 # 1.1 LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND HOOK ABOVE ++1EA9 ; valid # 1.1 LATIN SMALL LETTER A WITH CIRCUMFLEX AND HOOK ABOVE ++1EAA ; mapped ; 1EAB # 1.1 LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND TILDE ++1EAB ; valid # 1.1 LATIN SMALL LETTER A WITH CIRCUMFLEX AND TILDE ++1EAC ; mapped ; 1EAD # 1.1 LATIN CAPITAL LETTER A WITH CIRCUMFLEX AND DOT BELOW ++1EAD ; valid # 1.1 LATIN SMALL LETTER A WITH CIRCUMFLEX AND DOT BELOW ++1EAE ; mapped ; 1EAF # 1.1 LATIN CAPITAL LETTER A WITH BREVE AND ACUTE ++1EAF ; valid # 1.1 LATIN SMALL LETTER A WITH BREVE AND ACUTE ++1EB0 ; mapped ; 1EB1 # 1.1 LATIN CAPITAL LETTER A WITH BREVE AND GRAVE ++1EB1 ; valid # 1.1 LATIN SMALL LETTER A WITH BREVE AND GRAVE ++1EB2 ; mapped ; 1EB3 # 1.1 LATIN CAPITAL LETTER A WITH BREVE AND HOOK ABOVE ++1EB3 ; valid # 1.1 LATIN SMALL LETTER A WITH BREVE AND HOOK ABOVE ++1EB4 ; mapped ; 1EB5 # 1.1 LATIN CAPITAL LETTER A WITH BREVE AND TILDE ++1EB5 ; valid # 1.1 LATIN SMALL LETTER A WITH BREVE AND TILDE ++1EB6 ; mapped ; 1EB7 # 1.1 LATIN CAPITAL LETTER A WITH BREVE AND DOT BELOW ++1EB7 ; valid # 1.1 LATIN SMALL LETTER A WITH BREVE AND DOT BELOW ++1EB8 ; mapped ; 1EB9 # 1.1 LATIN CAPITAL LETTER E WITH DOT BELOW ++1EB9 ; valid # 1.1 LATIN SMALL LETTER E WITH DOT BELOW ++1EBA ; mapped ; 1EBB # 1.1 LATIN CAPITAL LETTER E WITH HOOK ABOVE ++1EBB ; valid # 1.1 LATIN SMALL LETTER E WITH HOOK ABOVE ++1EBC ; mapped ; 1EBD # 1.1 LATIN CAPITAL LETTER E WITH TILDE ++1EBD ; valid # 1.1 LATIN SMALL LETTER E WITH TILDE ++1EBE ; mapped ; 1EBF # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND ACUTE ++1EBF ; valid # 1.1 LATIN SMALL LETTER E WITH CIRCUMFLEX AND ACUTE ++1EC0 ; mapped ; 1EC1 # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND GRAVE ++1EC1 ; valid # 1.1 LATIN SMALL LETTER E WITH CIRCUMFLEX AND GRAVE ++1EC2 ; mapped ; 1EC3 # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND HOOK ABOVE ++1EC3 ; valid # 1.1 LATIN SMALL LETTER E WITH CIRCUMFLEX AND HOOK ABOVE ++1EC4 ; mapped ; 1EC5 # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND TILDE ++1EC5 ; valid # 1.1 LATIN SMALL LETTER E WITH CIRCUMFLEX AND TILDE ++1EC6 ; mapped ; 1EC7 # 1.1 LATIN CAPITAL LETTER E WITH CIRCUMFLEX AND DOT BELOW ++1EC7 ; valid # 1.1 LATIN SMALL LETTER E WITH CIRCUMFLEX AND DOT BELOW ++1EC8 ; mapped ; 1EC9 # 1.1 LATIN CAPITAL LETTER I WITH HOOK ABOVE ++1EC9 ; valid # 1.1 LATIN SMALL LETTER I WITH HOOK ABOVE ++1ECA ; mapped ; 1ECB # 1.1 LATIN CAPITAL LETTER I WITH DOT BELOW ++1ECB ; valid # 1.1 LATIN SMALL LETTER I WITH DOT BELOW ++1ECC ; mapped ; 1ECD # 1.1 LATIN CAPITAL LETTER O WITH DOT BELOW ++1ECD ; valid # 1.1 LATIN SMALL LETTER O WITH DOT BELOW ++1ECE ; mapped ; 1ECF # 1.1 LATIN CAPITAL LETTER O WITH HOOK ABOVE ++1ECF ; valid # 1.1 LATIN SMALL LETTER O WITH HOOK ABOVE ++1ED0 ; mapped ; 1ED1 # 1.1 LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND ACUTE ++1ED1 ; valid # 1.1 LATIN SMALL LETTER O WITH CIRCUMFLEX AND ACUTE ++1ED2 ; mapped ; 1ED3 # 1.1 LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND GRAVE ++1ED3 ; valid # 1.1 LATIN SMALL LETTER O WITH CIRCUMFLEX AND GRAVE ++1ED4 ; mapped ; 1ED5 # 1.1 LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND HOOK ABOVE ++1ED5 ; valid # 1.1 LATIN SMALL LETTER O WITH CIRCUMFLEX AND HOOK ABOVE ++1ED6 ; mapped ; 1ED7 # 1.1 LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND TILDE ++1ED7 ; valid # 1.1 LATIN SMALL LETTER O WITH CIRCUMFLEX AND TILDE ++1ED8 ; mapped ; 1ED9 # 1.1 LATIN CAPITAL LETTER O WITH CIRCUMFLEX AND DOT BELOW ++1ED9 ; valid # 1.1 LATIN SMALL LETTER O WITH CIRCUMFLEX AND DOT BELOW ++1EDA ; mapped ; 1EDB # 1.1 LATIN CAPITAL LETTER O WITH HORN AND ACUTE ++1EDB ; valid # 1.1 LATIN SMALL LETTER O WITH HORN AND ACUTE ++1EDC ; mapped ; 1EDD # 1.1 LATIN CAPITAL LETTER O WITH HORN AND GRAVE ++1EDD ; valid # 1.1 LATIN SMALL LETTER O WITH HORN AND GRAVE ++1EDE ; mapped ; 1EDF # 1.1 LATIN CAPITAL LETTER O WITH HORN AND HOOK ABOVE ++1EDF ; valid # 1.1 LATIN SMALL LETTER O WITH HORN AND HOOK ABOVE ++1EE0 ; mapped ; 1EE1 # 1.1 LATIN CAPITAL LETTER O WITH HORN AND TILDE ++1EE1 ; valid # 1.1 LATIN SMALL LETTER O WITH HORN AND TILDE ++1EE2 ; mapped ; 1EE3 # 1.1 LATIN CAPITAL LETTER O WITH HORN AND DOT BELOW ++1EE3 ; valid # 1.1 LATIN SMALL LETTER O WITH HORN AND DOT BELOW ++1EE4 ; mapped ; 1EE5 # 1.1 LATIN CAPITAL LETTER U WITH DOT BELOW ++1EE5 ; valid # 1.1 LATIN SMALL LETTER U WITH DOT BELOW ++1EE6 ; mapped ; 1EE7 # 1.1 LATIN CAPITAL LETTER U WITH HOOK ABOVE ++1EE7 ; valid # 1.1 LATIN SMALL LETTER U WITH HOOK ABOVE ++1EE8 ; mapped ; 1EE9 # 1.1 LATIN CAPITAL LETTER U WITH HORN AND ACUTE ++1EE9 ; valid # 1.1 LATIN SMALL LETTER U WITH HORN AND ACUTE ++1EEA ; mapped ; 1EEB # 1.1 LATIN CAPITAL LETTER U WITH HORN AND GRAVE ++1EEB ; valid # 1.1 LATIN SMALL LETTER U WITH HORN AND GRAVE ++1EEC ; mapped ; 1EED # 1.1 LATIN CAPITAL LETTER U WITH HORN AND HOOK ABOVE ++1EED ; valid # 1.1 LATIN SMALL LETTER U WITH HORN AND HOOK ABOVE ++1EEE ; mapped ; 1EEF # 1.1 LATIN CAPITAL LETTER U WITH HORN AND TILDE ++1EEF ; valid # 1.1 LATIN SMALL LETTER U WITH HORN AND TILDE ++1EF0 ; mapped ; 1EF1 # 1.1 LATIN CAPITAL LETTER U WITH HORN AND DOT BELOW ++1EF1 ; valid # 1.1 LATIN SMALL LETTER U WITH HORN AND DOT BELOW ++1EF2 ; mapped ; 1EF3 # 1.1 LATIN CAPITAL LETTER Y WITH GRAVE ++1EF3 ; valid # 1.1 LATIN SMALL LETTER Y WITH GRAVE ++1EF4 ; mapped ; 1EF5 # 1.1 LATIN CAPITAL LETTER Y WITH DOT BELOW ++1EF5 ; valid # 1.1 LATIN SMALL LETTER Y WITH DOT BELOW ++1EF6 ; mapped ; 1EF7 # 1.1 LATIN CAPITAL LETTER Y WITH HOOK ABOVE ++1EF7 ; valid # 1.1 LATIN SMALL LETTER Y WITH HOOK ABOVE ++1EF8 ; mapped ; 1EF9 # 1.1 LATIN CAPITAL LETTER Y WITH TILDE ++1EF9 ; valid # 1.1 LATIN SMALL LETTER Y WITH TILDE ++1EFA ; mapped ; 1EFB # 5.1 LATIN CAPITAL LETTER MIDDLE-WELSH LL ++1EFB ; valid # 5.1 LATIN SMALL LETTER MIDDLE-WELSH LL ++1EFC ; mapped ; 1EFD # 5.1 LATIN CAPITAL LETTER MIDDLE-WELSH V ++1EFD ; valid # 5.1 LATIN SMALL LETTER MIDDLE-WELSH V ++1EFE ; mapped ; 1EFF # 5.1 LATIN CAPITAL LETTER Y WITH LOOP ++1EFF ; valid # 5.1 LATIN SMALL LETTER Y WITH LOOP ++1F00..1F07 ; valid # 1.1 GREEK SMALL LETTER ALPHA WITH PSILI..GREEK SMALL LETTER ALPHA WITH DASIA AND PERISPOMENI ++1F08 ; mapped ; 1F00 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI ++1F09 ; mapped ; 1F01 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA ++1F0A ; mapped ; 1F02 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND VARIA ++1F0B ; mapped ; 1F03 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND VARIA ++1F0C ; mapped ; 1F04 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND OXIA ++1F0D ; mapped ; 1F05 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND OXIA ++1F0E ; mapped ; 1F06 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND PERISPOMENI ++1F0F ; mapped ; 1F07 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND PERISPOMENI ++1F10..1F15 ; valid # 1.1 GREEK SMALL LETTER EPSILON WITH PSILI..GREEK SMALL LETTER EPSILON WITH DASIA AND OXIA ++1F16..1F17 ; disallowed # NA .. ++1F18 ; mapped ; 1F10 # 1.1 GREEK CAPITAL LETTER EPSILON WITH PSILI ++1F19 ; mapped ; 1F11 # 1.1 GREEK CAPITAL LETTER EPSILON WITH DASIA ++1F1A ; mapped ; 1F12 # 1.1 GREEK CAPITAL LETTER EPSILON WITH PSILI AND VARIA ++1F1B ; mapped ; 1F13 # 1.1 GREEK CAPITAL LETTER EPSILON WITH DASIA AND VARIA ++1F1C ; mapped ; 1F14 # 1.1 GREEK CAPITAL LETTER EPSILON WITH PSILI AND OXIA ++1F1D ; mapped ; 1F15 # 1.1 GREEK CAPITAL LETTER EPSILON WITH DASIA AND OXIA ++1F1E..1F1F ; disallowed # NA .. ++1F20..1F27 ; valid # 1.1 GREEK SMALL LETTER ETA WITH PSILI..GREEK SMALL LETTER ETA WITH DASIA AND PERISPOMENI ++1F28 ; mapped ; 1F20 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI ++1F29 ; mapped ; 1F21 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA ++1F2A ; mapped ; 1F22 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND VARIA ++1F2B ; mapped ; 1F23 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND VARIA ++1F2C ; mapped ; 1F24 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND OXIA ++1F2D ; mapped ; 1F25 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND OXIA ++1F2E ; mapped ; 1F26 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND PERISPOMENI ++1F2F ; mapped ; 1F27 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND PERISPOMENI ++1F30..1F37 ; valid # 1.1 GREEK SMALL LETTER IOTA WITH PSILI..GREEK SMALL LETTER IOTA WITH DASIA AND PERISPOMENI ++1F38 ; mapped ; 1F30 # 1.1 GREEK CAPITAL LETTER IOTA WITH PSILI ++1F39 ; mapped ; 1F31 # 1.1 GREEK CAPITAL LETTER IOTA WITH DASIA ++1F3A ; mapped ; 1F32 # 1.1 GREEK CAPITAL LETTER IOTA WITH PSILI AND VARIA ++1F3B ; mapped ; 1F33 # 1.1 GREEK CAPITAL LETTER IOTA WITH DASIA AND VARIA ++1F3C ; mapped ; 1F34 # 1.1 GREEK CAPITAL LETTER IOTA WITH PSILI AND OXIA ++1F3D ; mapped ; 1F35 # 1.1 GREEK CAPITAL LETTER IOTA WITH DASIA AND OXIA ++1F3E ; mapped ; 1F36 # 1.1 GREEK CAPITAL LETTER IOTA WITH PSILI AND PERISPOMENI ++1F3F ; mapped ; 1F37 # 1.1 GREEK CAPITAL LETTER IOTA WITH DASIA AND PERISPOMENI ++1F40..1F45 ; valid # 1.1 GREEK SMALL LETTER OMICRON WITH PSILI..GREEK SMALL LETTER OMICRON WITH DASIA AND OXIA ++1F46..1F47 ; disallowed # NA .. ++1F48 ; mapped ; 1F40 # 1.1 GREEK CAPITAL LETTER OMICRON WITH PSILI ++1F49 ; mapped ; 1F41 # 1.1 GREEK CAPITAL LETTER OMICRON WITH DASIA ++1F4A ; mapped ; 1F42 # 1.1 GREEK CAPITAL LETTER OMICRON WITH PSILI AND VARIA ++1F4B ; mapped ; 1F43 # 1.1 GREEK CAPITAL LETTER OMICRON WITH DASIA AND VARIA ++1F4C ; mapped ; 1F44 # 1.1 GREEK CAPITAL LETTER OMICRON WITH PSILI AND OXIA ++1F4D ; mapped ; 1F45 # 1.1 GREEK CAPITAL LETTER OMICRON WITH DASIA AND OXIA ++1F4E..1F4F ; disallowed # NA .. ++1F50..1F57 ; valid # 1.1 GREEK SMALL LETTER UPSILON WITH PSILI..GREEK SMALL LETTER UPSILON WITH DASIA AND PERISPOMENI ++1F58 ; disallowed # NA ++1F59 ; mapped ; 1F51 # 1.1 GREEK CAPITAL LETTER UPSILON WITH DASIA ++1F5A ; disallowed # NA ++1F5B ; mapped ; 1F53 # 1.1 GREEK CAPITAL LETTER UPSILON WITH DASIA AND VARIA ++1F5C ; disallowed # NA ++1F5D ; mapped ; 1F55 # 1.1 GREEK CAPITAL LETTER UPSILON WITH DASIA AND OXIA ++1F5E ; disallowed # NA ++1F5F ; mapped ; 1F57 # 1.1 GREEK CAPITAL LETTER UPSILON WITH DASIA AND PERISPOMENI ++1F60..1F67 ; valid # 1.1 GREEK SMALL LETTER OMEGA WITH PSILI..GREEK SMALL LETTER OMEGA WITH DASIA AND PERISPOMENI ++1F68 ; mapped ; 1F60 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI ++1F69 ; mapped ; 1F61 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA ++1F6A ; mapped ; 1F62 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND VARIA ++1F6B ; mapped ; 1F63 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND VARIA ++1F6C ; mapped ; 1F64 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND OXIA ++1F6D ; mapped ; 1F65 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND OXIA ++1F6E ; mapped ; 1F66 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND PERISPOMENI ++1F6F ; mapped ; 1F67 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND PERISPOMENI ++1F70 ; valid # 1.1 GREEK SMALL LETTER ALPHA WITH VARIA ++1F71 ; mapped ; 03AC # 1.1 GREEK SMALL LETTER ALPHA WITH OXIA ++1F72 ; valid # 1.1 GREEK SMALL LETTER EPSILON WITH VARIA ++1F73 ; mapped ; 03AD # 1.1 GREEK SMALL LETTER EPSILON WITH OXIA ++1F74 ; valid # 1.1 GREEK SMALL LETTER ETA WITH VARIA ++1F75 ; mapped ; 03AE # 1.1 GREEK SMALL LETTER ETA WITH OXIA ++1F76 ; valid # 1.1 GREEK SMALL LETTER IOTA WITH VARIA ++1F77 ; mapped ; 03AF # 1.1 GREEK SMALL LETTER IOTA WITH OXIA ++1F78 ; valid # 1.1 GREEK SMALL LETTER OMICRON WITH VARIA ++1F79 ; mapped ; 03CC # 1.1 GREEK SMALL LETTER OMICRON WITH OXIA ++1F7A ; valid # 1.1 GREEK SMALL LETTER UPSILON WITH VARIA ++1F7B ; mapped ; 03CD # 1.1 GREEK SMALL LETTER UPSILON WITH OXIA ++1F7C ; valid # 1.1 GREEK SMALL LETTER OMEGA WITH VARIA ++1F7D ; mapped ; 03CE # 1.1 GREEK SMALL LETTER OMEGA WITH OXIA ++1F7E..1F7F ; disallowed # NA .. ++1F80 ; mapped ; 1F00 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH PSILI AND YPOGEGRAMMENI ++1F81 ; mapped ; 1F01 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH DASIA AND YPOGEGRAMMENI ++1F82 ; mapped ; 1F02 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH PSILI AND VARIA AND YPOGEGRAMMENI ++1F83 ; mapped ; 1F03 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH DASIA AND VARIA AND YPOGEGRAMMENI ++1F84 ; mapped ; 1F04 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH PSILI AND OXIA AND YPOGEGRAMMENI ++1F85 ; mapped ; 1F05 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH DASIA AND OXIA AND YPOGEGRAMMENI ++1F86 ; mapped ; 1F06 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH PSILI AND PERISPOMENI AND YPOGEGRAMMENI ++1F87 ; mapped ; 1F07 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH DASIA AND PERISPOMENI AND YPOGEGRAMMENI ++1F88 ; mapped ; 1F00 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND PROSGEGRAMMENI ++1F89 ; mapped ; 1F01 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND PROSGEGRAMMENI ++1F8A ; mapped ; 1F02 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND VARIA AND PROSGEGRAMMENI ++1F8B ; mapped ; 1F03 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND VARIA AND PROSGEGRAMMENI ++1F8C ; mapped ; 1F04 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND OXIA AND PROSGEGRAMMENI ++1F8D ; mapped ; 1F05 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND OXIA AND PROSGEGRAMMENI ++1F8E ; mapped ; 1F06 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI ++1F8F ; mapped ; 1F07 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI ++1F90 ; mapped ; 1F20 03B9 # 1.1 GREEK SMALL LETTER ETA WITH PSILI AND YPOGEGRAMMENI ++1F91 ; mapped ; 1F21 03B9 # 1.1 GREEK SMALL LETTER ETA WITH DASIA AND YPOGEGRAMMENI ++1F92 ; mapped ; 1F22 03B9 # 1.1 GREEK SMALL LETTER ETA WITH PSILI AND VARIA AND YPOGEGRAMMENI ++1F93 ; mapped ; 1F23 03B9 # 1.1 GREEK SMALL LETTER ETA WITH DASIA AND VARIA AND YPOGEGRAMMENI ++1F94 ; mapped ; 1F24 03B9 # 1.1 GREEK SMALL LETTER ETA WITH PSILI AND OXIA AND YPOGEGRAMMENI ++1F95 ; mapped ; 1F25 03B9 # 1.1 GREEK SMALL LETTER ETA WITH DASIA AND OXIA AND YPOGEGRAMMENI ++1F96 ; mapped ; 1F26 03B9 # 1.1 GREEK SMALL LETTER ETA WITH PSILI AND PERISPOMENI AND YPOGEGRAMMENI ++1F97 ; mapped ; 1F27 03B9 # 1.1 GREEK SMALL LETTER ETA WITH DASIA AND PERISPOMENI AND YPOGEGRAMMENI ++1F98 ; mapped ; 1F20 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND PROSGEGRAMMENI ++1F99 ; mapped ; 1F21 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND PROSGEGRAMMENI ++1F9A ; mapped ; 1F22 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND VARIA AND PROSGEGRAMMENI ++1F9B ; mapped ; 1F23 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND VARIA AND PROSGEGRAMMENI ++1F9C ; mapped ; 1F24 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND OXIA AND PROSGEGRAMMENI ++1F9D ; mapped ; 1F25 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND OXIA AND PROSGEGRAMMENI ++1F9E ; mapped ; 1F26 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI ++1F9F ; mapped ; 1F27 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI ++1FA0 ; mapped ; 1F60 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH PSILI AND YPOGEGRAMMENI ++1FA1 ; mapped ; 1F61 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH DASIA AND YPOGEGRAMMENI ++1FA2 ; mapped ; 1F62 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH PSILI AND VARIA AND YPOGEGRAMMENI ++1FA3 ; mapped ; 1F63 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH DASIA AND VARIA AND YPOGEGRAMMENI ++1FA4 ; mapped ; 1F64 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH PSILI AND OXIA AND YPOGEGRAMMENI ++1FA5 ; mapped ; 1F65 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH DASIA AND OXIA AND YPOGEGRAMMENI ++1FA6 ; mapped ; 1F66 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH PSILI AND PERISPOMENI AND YPOGEGRAMMENI ++1FA7 ; mapped ; 1F67 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH DASIA AND PERISPOMENI AND YPOGEGRAMMENI ++1FA8 ; mapped ; 1F60 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND PROSGEGRAMMENI ++1FA9 ; mapped ; 1F61 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND PROSGEGRAMMENI ++1FAA ; mapped ; 1F62 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND VARIA AND PROSGEGRAMMENI ++1FAB ; mapped ; 1F63 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND VARIA AND PROSGEGRAMMENI ++1FAC ; mapped ; 1F64 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND OXIA AND PROSGEGRAMMENI ++1FAD ; mapped ; 1F65 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND OXIA AND PROSGEGRAMMENI ++1FAE ; mapped ; 1F66 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PSILI AND PERISPOMENI AND PROSGEGRAMMENI ++1FAF ; mapped ; 1F67 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH DASIA AND PERISPOMENI AND PROSGEGRAMMENI ++1FB0..1FB1 ; valid # 1.1 GREEK SMALL LETTER ALPHA WITH VRACHY..GREEK SMALL LETTER ALPHA WITH MACRON ++1FB2 ; mapped ; 1F70 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH VARIA AND YPOGEGRAMMENI ++1FB3 ; mapped ; 03B1 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH YPOGEGRAMMENI ++1FB4 ; mapped ; 03AC 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH OXIA AND YPOGEGRAMMENI ++1FB5 ; disallowed # NA ++1FB6 ; valid # 1.1 GREEK SMALL LETTER ALPHA WITH PERISPOMENI ++1FB7 ; mapped ; 1FB6 03B9 # 1.1 GREEK SMALL LETTER ALPHA WITH PERISPOMENI AND YPOGEGRAMMENI ++1FB8 ; mapped ; 1FB0 # 1.1 GREEK CAPITAL LETTER ALPHA WITH VRACHY ++1FB9 ; mapped ; 1FB1 # 1.1 GREEK CAPITAL LETTER ALPHA WITH MACRON ++1FBA ; mapped ; 1F70 # 1.1 GREEK CAPITAL LETTER ALPHA WITH VARIA ++1FBB ; mapped ; 03AC # 1.1 GREEK CAPITAL LETTER ALPHA WITH OXIA ++1FBC ; mapped ; 03B1 03B9 # 1.1 GREEK CAPITAL LETTER ALPHA WITH PROSGEGRAMMENI ++1FBD ; disallowed_STD3_mapped ; 0020 0313 # 1.1 GREEK KORONIS ++1FBE ; mapped ; 03B9 # 1.1 GREEK PROSGEGRAMMENI ++1FBF ; disallowed_STD3_mapped ; 0020 0313 # 1.1 GREEK PSILI ++1FC0 ; disallowed_STD3_mapped ; 0020 0342 # 1.1 GREEK PERISPOMENI ++1FC1 ; disallowed_STD3_mapped ; 0020 0308 0342 #1.1 GREEK DIALYTIKA AND PERISPOMENI ++1FC2 ; mapped ; 1F74 03B9 # 1.1 GREEK SMALL LETTER ETA WITH VARIA AND YPOGEGRAMMENI ++1FC3 ; mapped ; 03B7 03B9 # 1.1 GREEK SMALL LETTER ETA WITH YPOGEGRAMMENI ++1FC4 ; mapped ; 03AE 03B9 # 1.1 GREEK SMALL LETTER ETA WITH OXIA AND YPOGEGRAMMENI ++1FC5 ; disallowed # NA ++1FC6 ; valid # 1.1 GREEK SMALL LETTER ETA WITH PERISPOMENI ++1FC7 ; mapped ; 1FC6 03B9 # 1.1 GREEK SMALL LETTER ETA WITH PERISPOMENI AND YPOGEGRAMMENI ++1FC8 ; mapped ; 1F72 # 1.1 GREEK CAPITAL LETTER EPSILON WITH VARIA ++1FC9 ; mapped ; 03AD # 1.1 GREEK CAPITAL LETTER EPSILON WITH OXIA ++1FCA ; mapped ; 1F74 # 1.1 GREEK CAPITAL LETTER ETA WITH VARIA ++1FCB ; mapped ; 03AE # 1.1 GREEK CAPITAL LETTER ETA WITH OXIA ++1FCC ; mapped ; 03B7 03B9 # 1.1 GREEK CAPITAL LETTER ETA WITH PROSGEGRAMMENI ++1FCD ; disallowed_STD3_mapped ; 0020 0313 0300 #1.1 GREEK PSILI AND VARIA ++1FCE ; disallowed_STD3_mapped ; 0020 0313 0301 #1.1 GREEK PSILI AND OXIA ++1FCF ; disallowed_STD3_mapped ; 0020 0313 0342 #1.1 GREEK PSILI AND PERISPOMENI ++1FD0..1FD2 ; valid # 1.1 GREEK SMALL LETTER IOTA WITH VRACHY..GREEK SMALL LETTER IOTA WITH DIALYTIKA AND VARIA ++1FD3 ; mapped ; 0390 # 1.1 GREEK SMALL LETTER IOTA WITH DIALYTIKA AND OXIA ++1FD4..1FD5 ; disallowed # NA .. ++1FD6..1FD7 ; valid # 1.1 GREEK SMALL LETTER IOTA WITH PERISPOMENI..GREEK SMALL LETTER IOTA WITH DIALYTIKA AND PERISPOMENI ++1FD8 ; mapped ; 1FD0 # 1.1 GREEK CAPITAL LETTER IOTA WITH VRACHY ++1FD9 ; mapped ; 1FD1 # 1.1 GREEK CAPITAL LETTER IOTA WITH MACRON ++1FDA ; mapped ; 1F76 # 1.1 GREEK CAPITAL LETTER IOTA WITH VARIA ++1FDB ; mapped ; 03AF # 1.1 GREEK CAPITAL LETTER IOTA WITH OXIA ++1FDC ; disallowed # NA ++1FDD ; disallowed_STD3_mapped ; 0020 0314 0300 #1.1 GREEK DASIA AND VARIA ++1FDE ; disallowed_STD3_mapped ; 0020 0314 0301 #1.1 GREEK DASIA AND OXIA ++1FDF ; disallowed_STD3_mapped ; 0020 0314 0342 #1.1 GREEK DASIA AND PERISPOMENI ++1FE0..1FE2 ; valid # 1.1 GREEK SMALL LETTER UPSILON WITH VRACHY..GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND VARIA ++1FE3 ; mapped ; 03B0 # 1.1 GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND OXIA ++1FE4..1FE7 ; valid # 1.1 GREEK SMALL LETTER RHO WITH PSILI..GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND PERISPOMENI ++1FE8 ; mapped ; 1FE0 # 1.1 GREEK CAPITAL LETTER UPSILON WITH VRACHY ++1FE9 ; mapped ; 1FE1 # 1.1 GREEK CAPITAL LETTER UPSILON WITH MACRON ++1FEA ; mapped ; 1F7A # 1.1 GREEK CAPITAL LETTER UPSILON WITH VARIA ++1FEB ; mapped ; 03CD # 1.1 GREEK CAPITAL LETTER UPSILON WITH OXIA ++1FEC ; mapped ; 1FE5 # 1.1 GREEK CAPITAL LETTER RHO WITH DASIA ++1FED ; disallowed_STD3_mapped ; 0020 0308 0300 #1.1 GREEK DIALYTIKA AND VARIA ++1FEE ; disallowed_STD3_mapped ; 0020 0308 0301 #1.1 GREEK DIALYTIKA AND OXIA ++1FEF ; disallowed_STD3_mapped ; 0060 # 1.1 GREEK VARIA ++1FF0..1FF1 ; disallowed # NA .. ++1FF2 ; mapped ; 1F7C 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH VARIA AND YPOGEGRAMMENI ++1FF3 ; mapped ; 03C9 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH YPOGEGRAMMENI ++1FF4 ; mapped ; 03CE 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH OXIA AND YPOGEGRAMMENI ++1FF5 ; disallowed # NA ++1FF6 ; valid # 1.1 GREEK SMALL LETTER OMEGA WITH PERISPOMENI ++1FF7 ; mapped ; 1FF6 03B9 # 1.1 GREEK SMALL LETTER OMEGA WITH PERISPOMENI AND YPOGEGRAMMENI ++1FF8 ; mapped ; 1F78 # 1.1 GREEK CAPITAL LETTER OMICRON WITH VARIA ++1FF9 ; mapped ; 03CC # 1.1 GREEK CAPITAL LETTER OMICRON WITH OXIA ++1FFA ; mapped ; 1F7C # 1.1 GREEK CAPITAL LETTER OMEGA WITH VARIA ++1FFB ; mapped ; 03CE # 1.1 GREEK CAPITAL LETTER OMEGA WITH OXIA ++1FFC ; mapped ; 03C9 03B9 # 1.1 GREEK CAPITAL LETTER OMEGA WITH PROSGEGRAMMENI ++1FFD ; disallowed_STD3_mapped ; 0020 0301 # 1.1 GREEK OXIA ++1FFE ; disallowed_STD3_mapped ; 0020 0314 # 1.1 GREEK DASIA ++1FFF ; disallowed # NA ++2000..200A ; disallowed_STD3_mapped ; 0020 # 1.1 EN QUAD..HAIR SPACE ++200B ; ignored # 1.1 ZERO WIDTH SPACE ++200C..200D ; deviation ; # 1.1 ZERO WIDTH NON-JOINER..ZERO WIDTH JOINER ++200E..200F ; disallowed # 1.1 LEFT-TO-RIGHT MARK..RIGHT-TO-LEFT MARK ++2010 ; valid ; ; NV8 # 1.1 HYPHEN ++2011 ; mapped ; 2010 # 1.1 NON-BREAKING HYPHEN ++2012..2016 ; valid ; ; NV8 # 1.1 FIGURE DASH..DOUBLE VERTICAL LINE ++2017 ; disallowed_STD3_mapped ; 0020 0333 # 1.1 DOUBLE LOW LINE ++2018..2023 ; valid ; ; NV8 # 1.1 LEFT SINGLE QUOTATION MARK..TRIANGULAR BULLET ++2024..2026 ; disallowed # 1.1 ONE DOT LEADER..HORIZONTAL ELLIPSIS ++2027 ; valid ; ; NV8 # 1.1 HYPHENATION POINT ++2028..202E ; disallowed # 1.1 LINE SEPARATOR..RIGHT-TO-LEFT OVERRIDE ++202F ; disallowed_STD3_mapped ; 0020 # 3.0 NARROW NO-BREAK SPACE ++2030..2032 ; valid ; ; NV8 # 1.1 PER MILLE SIGN..PRIME ++2033 ; mapped ; 2032 2032 # 1.1 DOUBLE PRIME ++2034 ; mapped ; 2032 2032 2032 #1.1 TRIPLE PRIME ++2035 ; valid ; ; NV8 # 1.1 REVERSED PRIME ++2036 ; mapped ; 2035 2035 # 1.1 REVERSED DOUBLE PRIME ++2037 ; mapped ; 2035 2035 2035 #1.1 REVERSED TRIPLE PRIME ++2038..203B ; valid ; ; NV8 # 1.1 CARET..REFERENCE MARK ++203C ; disallowed_STD3_mapped ; 0021 0021 # 1.1 DOUBLE EXCLAMATION MARK ++203D ; valid ; ; NV8 # 1.1 INTERROBANG ++203E ; disallowed_STD3_mapped ; 0020 0305 # 1.1 OVERLINE ++203F..2046 ; valid ; ; NV8 # 1.1 UNDERTIE..RIGHT SQUARE BRACKET WITH QUILL ++2047 ; disallowed_STD3_mapped ; 003F 003F # 3.2 DOUBLE QUESTION MARK ++2048 ; disallowed_STD3_mapped ; 003F 0021 # 3.0 QUESTION EXCLAMATION MARK ++2049 ; disallowed_STD3_mapped ; 0021 003F # 3.0 EXCLAMATION QUESTION MARK ++204A..204D ; valid ; ; NV8 # 3.0 TIRONIAN SIGN ET..BLACK RIGHTWARDS BULLET ++204E..2052 ; valid ; ; NV8 # 3.2 LOW ASTERISK..COMMERCIAL MINUS SIGN ++2053..2054 ; valid ; ; NV8 # 4.0 SWUNG DASH..INVERTED UNDERTIE ++2055..2056 ; valid ; ; NV8 # 4.1 FLOWER PUNCTUATION MARK..THREE DOT PUNCTUATION ++2057 ; mapped ; 2032 2032 2032 2032 #3.2 QUADRUPLE PRIME ++2058..205E ; valid ; ; NV8 # 4.1 FOUR DOT PUNCTUATION..VERTICAL FOUR DOTS ++205F ; disallowed_STD3_mapped ; 0020 # 3.2 MEDIUM MATHEMATICAL SPACE ++2060 ; ignored # 3.2 WORD JOINER ++2061..2063 ; disallowed # 3.2 FUNCTION APPLICATION..INVISIBLE SEPARATOR ++2064 ; ignored # 5.1 INVISIBLE PLUS ++2065 ; disallowed # NA ++2066..2069 ; disallowed # 6.3 LEFT-TO-RIGHT ISOLATE..POP DIRECTIONAL ISOLATE ++206A..206F ; disallowed # 1.1 INHIBIT SYMMETRIC SWAPPING..NOMINAL DIGIT SHAPES ++2070 ; mapped ; 0030 # 1.1 SUPERSCRIPT ZERO ++2071 ; mapped ; 0069 # 3.2 SUPERSCRIPT LATIN SMALL LETTER I ++2072..2073 ; disallowed # NA .. ++2074 ; mapped ; 0034 # 1.1 SUPERSCRIPT FOUR ++2075 ; mapped ; 0035 # 1.1 SUPERSCRIPT FIVE ++2076 ; mapped ; 0036 # 1.1 SUPERSCRIPT SIX ++2077 ; mapped ; 0037 # 1.1 SUPERSCRIPT SEVEN ++2078 ; mapped ; 0038 # 1.1 SUPERSCRIPT EIGHT ++2079 ; mapped ; 0039 # 1.1 SUPERSCRIPT NINE ++207A ; disallowed_STD3_mapped ; 002B # 1.1 SUPERSCRIPT PLUS SIGN ++207B ; mapped ; 2212 # 1.1 SUPERSCRIPT MINUS ++207C ; disallowed_STD3_mapped ; 003D # 1.1 SUPERSCRIPT EQUALS SIGN ++207D ; disallowed_STD3_mapped ; 0028 # 1.1 SUPERSCRIPT LEFT PARENTHESIS ++207E ; disallowed_STD3_mapped ; 0029 # 1.1 SUPERSCRIPT RIGHT PARENTHESIS ++207F ; mapped ; 006E # 1.1 SUPERSCRIPT LATIN SMALL LETTER N ++2080 ; mapped ; 0030 # 1.1 SUBSCRIPT ZERO ++2081 ; mapped ; 0031 # 1.1 SUBSCRIPT ONE ++2082 ; mapped ; 0032 # 1.1 SUBSCRIPT TWO ++2083 ; mapped ; 0033 # 1.1 SUBSCRIPT THREE ++2084 ; mapped ; 0034 # 1.1 SUBSCRIPT FOUR ++2085 ; mapped ; 0035 # 1.1 SUBSCRIPT FIVE ++2086 ; mapped ; 0036 # 1.1 SUBSCRIPT SIX ++2087 ; mapped ; 0037 # 1.1 SUBSCRIPT SEVEN ++2088 ; mapped ; 0038 # 1.1 SUBSCRIPT EIGHT ++2089 ; mapped ; 0039 # 1.1 SUBSCRIPT NINE ++208A ; disallowed_STD3_mapped ; 002B # 1.1 SUBSCRIPT PLUS SIGN ++208B ; mapped ; 2212 # 1.1 SUBSCRIPT MINUS ++208C ; disallowed_STD3_mapped ; 003D # 1.1 SUBSCRIPT EQUALS SIGN ++208D ; disallowed_STD3_mapped ; 0028 # 1.1 SUBSCRIPT LEFT PARENTHESIS ++208E ; disallowed_STD3_mapped ; 0029 # 1.1 SUBSCRIPT RIGHT PARENTHESIS ++208F ; disallowed # NA ++2090 ; mapped ; 0061 # 4.1 LATIN SUBSCRIPT SMALL LETTER A ++2091 ; mapped ; 0065 # 4.1 LATIN SUBSCRIPT SMALL LETTER E ++2092 ; mapped ; 006F # 4.1 LATIN SUBSCRIPT SMALL LETTER O ++2093 ; mapped ; 0078 # 4.1 LATIN SUBSCRIPT SMALL LETTER X ++2094 ; mapped ; 0259 # 4.1 LATIN SUBSCRIPT SMALL LETTER SCHWA ++2095 ; mapped ; 0068 # 6.0 LATIN SUBSCRIPT SMALL LETTER H ++2096 ; mapped ; 006B # 6.0 LATIN SUBSCRIPT SMALL LETTER K ++2097 ; mapped ; 006C # 6.0 LATIN SUBSCRIPT SMALL LETTER L ++2098 ; mapped ; 006D # 6.0 LATIN SUBSCRIPT SMALL LETTER M ++2099 ; mapped ; 006E # 6.0 LATIN SUBSCRIPT SMALL LETTER N ++209A ; mapped ; 0070 # 6.0 LATIN SUBSCRIPT SMALL LETTER P ++209B ; mapped ; 0073 # 6.0 LATIN SUBSCRIPT SMALL LETTER S ++209C ; mapped ; 0074 # 6.0 LATIN SUBSCRIPT SMALL LETTER T ++209D..209F ; disallowed # NA .. ++20A0..20A7 ; valid ; ; NV8 # 1.1 EURO-CURRENCY SIGN..PESETA SIGN ++20A8 ; mapped ; 0072 0073 # 1.1 RUPEE SIGN ++20A9..20AA ; valid ; ; NV8 # 1.1 WON SIGN..NEW SHEQEL SIGN ++20AB ; valid ; ; NV8 # 2.0 DONG SIGN ++20AC ; valid ; ; NV8 # 2.1 EURO SIGN ++20AD..20AF ; valid ; ; NV8 # 3.0 KIP SIGN..DRACHMA SIGN ++20B0..20B1 ; valid ; ; NV8 # 3.2 GERMAN PENNY SIGN..PESO SIGN ++20B2..20B5 ; valid ; ; NV8 # 4.1 GUARANI SIGN..CEDI SIGN ++20B6..20B8 ; valid ; ; NV8 # 5.2 LIVRE TOURNOIS SIGN..TENGE SIGN ++20B9 ; valid ; ; NV8 # 6.0 INDIAN RUPEE SIGN ++20BA ; valid ; ; NV8 # 6.2 TURKISH LIRA SIGN ++20BB..20BD ; valid ; ; NV8 # 7.0 NORDIC MARK SIGN..RUBLE SIGN ++20BE ; valid ; ; NV8 # 8.0 LARI SIGN ++20BF ; valid ; ; NV8 # 10.0 BITCOIN SIGN ++20C0..20CF ; disallowed # NA .. ++20D0..20E1 ; valid ; ; NV8 # 1.1 COMBINING LEFT HARPOON ABOVE..COMBINING LEFT RIGHT ARROW ABOVE ++20E2..20E3 ; valid ; ; NV8 # 3.0 COMBINING ENCLOSING SCREEN..COMBINING ENCLOSING KEYCAP ++20E4..20EA ; valid ; ; NV8 # 3.2 COMBINING ENCLOSING UPWARD POINTING TRIANGLE..COMBINING LEFTWARDS ARROW OVERLAY ++20EB ; valid ; ; NV8 # 4.1 COMBINING LONG DOUBLE SOLIDUS OVERLAY ++20EC..20EF ; valid ; ; NV8 # 5.0 COMBINING RIGHTWARDS HARPOON WITH BARB DOWNWARDS..COMBINING RIGHT ARROW BELOW ++20F0 ; valid ; ; NV8 # 5.1 COMBINING ASTERISK ABOVE ++20F1..20FF ; disallowed # NA .. ++2100 ; disallowed_STD3_mapped ; 0061 002F 0063 #1.1 ACCOUNT OF ++2101 ; disallowed_STD3_mapped ; 0061 002F 0073 #1.1 ADDRESSED TO THE SUBJECT ++2102 ; mapped ; 0063 # 1.1 DOUBLE-STRUCK CAPITAL C ++2103 ; mapped ; 00B0 0063 # 1.1 DEGREE CELSIUS ++2104 ; valid ; ; NV8 # 1.1 CENTRE LINE SYMBOL ++2105 ; disallowed_STD3_mapped ; 0063 002F 006F #1.1 CARE OF ++2106 ; disallowed_STD3_mapped ; 0063 002F 0075 #1.1 CADA UNA ++2107 ; mapped ; 025B # 1.1 EULER CONSTANT ++2108 ; valid ; ; NV8 # 1.1 SCRUPLE ++2109 ; mapped ; 00B0 0066 # 1.1 DEGREE FAHRENHEIT ++210A ; mapped ; 0067 # 1.1 SCRIPT SMALL G ++210B..210E ; mapped ; 0068 # 1.1 SCRIPT CAPITAL H..PLANCK CONSTANT ++210F ; mapped ; 0127 # 1.1 PLANCK CONSTANT OVER TWO PI ++2110..2111 ; mapped ; 0069 # 1.1 SCRIPT CAPITAL I..BLACK-LETTER CAPITAL I ++2112..2113 ; mapped ; 006C # 1.1 SCRIPT CAPITAL L..SCRIPT SMALL L ++2114 ; valid ; ; NV8 # 1.1 L B BAR SYMBOL ++2115 ; mapped ; 006E # 1.1 DOUBLE-STRUCK CAPITAL N ++2116 ; mapped ; 006E 006F # 1.1 NUMERO SIGN ++2117..2118 ; valid ; ; NV8 # 1.1 SOUND RECORDING COPYRIGHT..SCRIPT CAPITAL P ++2119 ; mapped ; 0070 # 1.1 DOUBLE-STRUCK CAPITAL P ++211A ; mapped ; 0071 # 1.1 DOUBLE-STRUCK CAPITAL Q ++211B..211D ; mapped ; 0072 # 1.1 SCRIPT CAPITAL R..DOUBLE-STRUCK CAPITAL R ++211E..211F ; valid ; ; NV8 # 1.1 PRESCRIPTION TAKE..RESPONSE ++2120 ; mapped ; 0073 006D # 1.1 SERVICE MARK ++2121 ; mapped ; 0074 0065 006C #1.1 TELEPHONE SIGN ++2122 ; mapped ; 0074 006D # 1.1 TRADE MARK SIGN ++2123 ; valid ; ; NV8 # 1.1 VERSICLE ++2124 ; mapped ; 007A # 1.1 DOUBLE-STRUCK CAPITAL Z ++2125 ; valid ; ; NV8 # 1.1 OUNCE SIGN ++2126 ; mapped ; 03C9 # 1.1 OHM SIGN ++2127 ; valid ; ; NV8 # 1.1 INVERTED OHM SIGN ++2128 ; mapped ; 007A # 1.1 BLACK-LETTER CAPITAL Z ++2129 ; valid ; ; NV8 # 1.1 TURNED GREEK SMALL LETTER IOTA ++212A ; mapped ; 006B # 1.1 KELVIN SIGN ++212B ; mapped ; 00E5 # 1.1 ANGSTROM SIGN ++212C ; mapped ; 0062 # 1.1 SCRIPT CAPITAL B ++212D ; mapped ; 0063 # 1.1 BLACK-LETTER CAPITAL C ++212E ; valid ; ; NV8 # 1.1 ESTIMATED SYMBOL ++212F..2130 ; mapped ; 0065 # 1.1 SCRIPT SMALL E..SCRIPT CAPITAL E ++2131 ; mapped ; 0066 # 1.1 SCRIPT CAPITAL F ++2132 ; disallowed # 1.1 TURNED CAPITAL F ++2133 ; mapped ; 006D # 1.1 SCRIPT CAPITAL M ++2134 ; mapped ; 006F # 1.1 SCRIPT SMALL O ++2135 ; mapped ; 05D0 # 1.1 ALEF SYMBOL ++2136 ; mapped ; 05D1 # 1.1 BET SYMBOL ++2137 ; mapped ; 05D2 # 1.1 GIMEL SYMBOL ++2138 ; mapped ; 05D3 # 1.1 DALET SYMBOL ++2139 ; mapped ; 0069 # 3.0 INFORMATION SOURCE ++213A ; valid ; ; NV8 # 3.0 ROTATED CAPITAL Q ++213B ; mapped ; 0066 0061 0078 #4.0 FACSIMILE SIGN ++213C ; mapped ; 03C0 # 4.1 DOUBLE-STRUCK SMALL PI ++213D..213E ; mapped ; 03B3 # 3.2 DOUBLE-STRUCK SMALL GAMMA..DOUBLE-STRUCK CAPITAL GAMMA ++213F ; mapped ; 03C0 # 3.2 DOUBLE-STRUCK CAPITAL PI ++2140 ; mapped ; 2211 # 3.2 DOUBLE-STRUCK N-ARY SUMMATION ++2141..2144 ; valid ; ; NV8 # 3.2 TURNED SANS-SERIF CAPITAL G..TURNED SANS-SERIF CAPITAL Y ++2145..2146 ; mapped ; 0064 # 3.2 DOUBLE-STRUCK ITALIC CAPITAL D..DOUBLE-STRUCK ITALIC SMALL D ++2147 ; mapped ; 0065 # 3.2 DOUBLE-STRUCK ITALIC SMALL E ++2148 ; mapped ; 0069 # 3.2 DOUBLE-STRUCK ITALIC SMALL I ++2149 ; mapped ; 006A # 3.2 DOUBLE-STRUCK ITALIC SMALL J ++214A..214B ; valid ; ; NV8 # 3.2 PROPERTY LINE..TURNED AMPERSAND ++214C ; valid ; ; NV8 # 4.1 PER SIGN ++214D ; valid ; ; NV8 # 5.0 AKTIESELSKAB ++214E ; valid # 5.0 TURNED SMALL F ++214F ; valid ; ; NV8 # 5.1 SYMBOL FOR SAMARITAN SOURCE ++2150 ; mapped ; 0031 2044 0037 #5.2 VULGAR FRACTION ONE SEVENTH ++2151 ; mapped ; 0031 2044 0039 #5.2 VULGAR FRACTION ONE NINTH ++2152 ; mapped ; 0031 2044 0031 0030 #5.2 VULGAR FRACTION ONE TENTH ++2153 ; mapped ; 0031 2044 0033 #1.1 VULGAR FRACTION ONE THIRD ++2154 ; mapped ; 0032 2044 0033 #1.1 VULGAR FRACTION TWO THIRDS ++2155 ; mapped ; 0031 2044 0035 #1.1 VULGAR FRACTION ONE FIFTH ++2156 ; mapped ; 0032 2044 0035 #1.1 VULGAR FRACTION TWO FIFTHS ++2157 ; mapped ; 0033 2044 0035 #1.1 VULGAR FRACTION THREE FIFTHS ++2158 ; mapped ; 0034 2044 0035 #1.1 VULGAR FRACTION FOUR FIFTHS ++2159 ; mapped ; 0031 2044 0036 #1.1 VULGAR FRACTION ONE SIXTH ++215A ; mapped ; 0035 2044 0036 #1.1 VULGAR FRACTION FIVE SIXTHS ++215B ; mapped ; 0031 2044 0038 #1.1 VULGAR FRACTION ONE EIGHTH ++215C ; mapped ; 0033 2044 0038 #1.1 VULGAR FRACTION THREE EIGHTHS ++215D ; mapped ; 0035 2044 0038 #1.1 VULGAR FRACTION FIVE EIGHTHS ++215E ; mapped ; 0037 2044 0038 #1.1 VULGAR FRACTION SEVEN EIGHTHS ++215F ; mapped ; 0031 2044 # 1.1 FRACTION NUMERATOR ONE ++2160 ; mapped ; 0069 # 1.1 ROMAN NUMERAL ONE ++2161 ; mapped ; 0069 0069 # 1.1 ROMAN NUMERAL TWO ++2162 ; mapped ; 0069 0069 0069 #1.1 ROMAN NUMERAL THREE ++2163 ; mapped ; 0069 0076 # 1.1 ROMAN NUMERAL FOUR ++2164 ; mapped ; 0076 # 1.1 ROMAN NUMERAL FIVE ++2165 ; mapped ; 0076 0069 # 1.1 ROMAN NUMERAL SIX ++2166 ; mapped ; 0076 0069 0069 #1.1 ROMAN NUMERAL SEVEN ++2167 ; mapped ; 0076 0069 0069 0069 #1.1 ROMAN NUMERAL EIGHT ++2168 ; mapped ; 0069 0078 # 1.1 ROMAN NUMERAL NINE ++2169 ; mapped ; 0078 # 1.1 ROMAN NUMERAL TEN ++216A ; mapped ; 0078 0069 # 1.1 ROMAN NUMERAL ELEVEN ++216B ; mapped ; 0078 0069 0069 #1.1 ROMAN NUMERAL TWELVE ++216C ; mapped ; 006C # 1.1 ROMAN NUMERAL FIFTY ++216D ; mapped ; 0063 # 1.1 ROMAN NUMERAL ONE HUNDRED ++216E ; mapped ; 0064 # 1.1 ROMAN NUMERAL FIVE HUNDRED ++216F ; mapped ; 006D # 1.1 ROMAN NUMERAL ONE THOUSAND ++2170 ; mapped ; 0069 # 1.1 SMALL ROMAN NUMERAL ONE ++2171 ; mapped ; 0069 0069 # 1.1 SMALL ROMAN NUMERAL TWO ++2172 ; mapped ; 0069 0069 0069 #1.1 SMALL ROMAN NUMERAL THREE ++2173 ; mapped ; 0069 0076 # 1.1 SMALL ROMAN NUMERAL FOUR ++2174 ; mapped ; 0076 # 1.1 SMALL ROMAN NUMERAL FIVE ++2175 ; mapped ; 0076 0069 # 1.1 SMALL ROMAN NUMERAL SIX ++2176 ; mapped ; 0076 0069 0069 #1.1 SMALL ROMAN NUMERAL SEVEN ++2177 ; mapped ; 0076 0069 0069 0069 #1.1 SMALL ROMAN NUMERAL EIGHT ++2178 ; mapped ; 0069 0078 # 1.1 SMALL ROMAN NUMERAL NINE ++2179 ; mapped ; 0078 # 1.1 SMALL ROMAN NUMERAL TEN ++217A ; mapped ; 0078 0069 # 1.1 SMALL ROMAN NUMERAL ELEVEN ++217B ; mapped ; 0078 0069 0069 #1.1 SMALL ROMAN NUMERAL TWELVE ++217C ; mapped ; 006C # 1.1 SMALL ROMAN NUMERAL FIFTY ++217D ; mapped ; 0063 # 1.1 SMALL ROMAN NUMERAL ONE HUNDRED ++217E ; mapped ; 0064 # 1.1 SMALL ROMAN NUMERAL FIVE HUNDRED ++217F ; mapped ; 006D # 1.1 SMALL ROMAN NUMERAL ONE THOUSAND ++2180..2182 ; valid ; ; NV8 # 1.1 ROMAN NUMERAL ONE THOUSAND C D..ROMAN NUMERAL TEN THOUSAND ++2183 ; disallowed # 3.0 ROMAN NUMERAL REVERSED ONE HUNDRED ++2184 ; valid # 5.0 LATIN SMALL LETTER REVERSED C ++2185..2188 ; valid ; ; NV8 # 5.1 ROMAN NUMERAL SIX LATE FORM..ROMAN NUMERAL ONE HUNDRED THOUSAND ++2189 ; mapped ; 0030 2044 0033 #5.2 VULGAR FRACTION ZERO THIRDS ++218A..218B ; valid ; ; NV8 # 8.0 TURNED DIGIT TWO..TURNED DIGIT THREE ++218C..218F ; disallowed # NA .. ++2190..21EA ; valid ; ; NV8 # 1.1 LEFTWARDS ARROW..UPWARDS WHITE ARROW FROM BAR ++21EB..21F3 ; valid ; ; NV8 # 3.0 UPWARDS WHITE ARROW ON PEDESTAL..UP DOWN WHITE ARROW ++21F4..21FF ; valid ; ; NV8 # 3.2 RIGHT ARROW WITH SMALL CIRCLE..LEFT RIGHT OPEN-HEADED ARROW ++2200..222B ; valid ; ; NV8 # 1.1 FOR ALL..INTEGRAL ++222C ; mapped ; 222B 222B # 1.1 DOUBLE INTEGRAL ++222D ; mapped ; 222B 222B 222B #1.1 TRIPLE INTEGRAL ++222E ; valid ; ; NV8 # 1.1 CONTOUR INTEGRAL ++222F ; mapped ; 222E 222E # 1.1 SURFACE INTEGRAL ++2230 ; mapped ; 222E 222E 222E #1.1 VOLUME INTEGRAL ++2231..225F ; valid ; ; NV8 # 1.1 CLOCKWISE INTEGRAL..QUESTIONED EQUAL TO ++2260 ; disallowed_STD3_valid # 1.1 NOT EQUAL TO ++2261..226D ; valid ; ; NV8 # 1.1 IDENTICAL TO..NOT EQUIVALENT TO ++226E..226F ; disallowed_STD3_valid # 1.1 NOT LESS-THAN..NOT GREATER-THAN ++2270..22F1 ; valid ; ; NV8 # 1.1 NEITHER LESS-THAN NOR EQUAL TO..DOWN RIGHT DIAGONAL ELLIPSIS ++22F2..22FF ; valid ; ; NV8 # 3.2 ELEMENT OF WITH LONG HORIZONTAL STROKE..Z NOTATION BAG MEMBERSHIP ++2300 ; valid ; ; NV8 # 1.1 DIAMETER SIGN ++2301 ; valid ; ; NV8 # 3.0 ELECTRIC ARROW ++2302..2328 ; valid ; ; NV8 # 1.1 HOUSE..KEYBOARD ++2329 ; mapped ; 3008 # 1.1 LEFT-POINTING ANGLE BRACKET ++232A ; mapped ; 3009 # 1.1 RIGHT-POINTING ANGLE BRACKET ++232B..237A ; valid ; ; NV8 # 1.1 ERASE TO THE LEFT..APL FUNCTIONAL SYMBOL ALPHA ++237B ; valid ; ; NV8 # 3.0 NOT CHECK MARK ++237C ; valid ; ; NV8 # 3.2 RIGHT ANGLE WITH DOWNWARDS ZIGZAG ARROW ++237D..239A ; valid ; ; NV8 # 3.0 SHOULDERED OPEN BOX..CLEAR SCREEN SYMBOL ++239B..23CE ; valid ; ; NV8 # 3.2 LEFT PARENTHESIS UPPER HOOK..RETURN SYMBOL ++23CF..23D0 ; valid ; ; NV8 # 4.0 EJECT SYMBOL..VERTICAL LINE EXTENSION ++23D1..23DB ; valid ; ; NV8 # 4.1 METRICAL BREVE..FUSE ++23DC..23E7 ; valid ; ; NV8 # 5.0 TOP PARENTHESIS..ELECTRICAL INTERSECTION ++23E8 ; valid ; ; NV8 # 5.2 DECIMAL EXPONENT SYMBOL ++23E9..23F3 ; valid ; ; NV8 # 6.0 BLACK RIGHT-POINTING DOUBLE TRIANGLE..HOURGLASS WITH FLOWING SAND ++23F4..23FA ; valid ; ; NV8 # 7.0 BLACK MEDIUM LEFT-POINTING TRIANGLE..BLACK CIRCLE FOR RECORD ++23FB..23FE ; valid ; ; NV8 # 9.0 POWER SYMBOL..POWER SLEEP SYMBOL ++23FF ; valid ; ; NV8 # 10.0 OBSERVER EYE SYMBOL ++2400..2424 ; valid ; ; NV8 # 1.1 SYMBOL FOR NULL..SYMBOL FOR NEWLINE ++2425..2426 ; valid ; ; NV8 # 3.0 SYMBOL FOR DELETE FORM TWO..SYMBOL FOR SUBSTITUTE FORM TWO ++2427..243F ; disallowed # NA .. ++2440..244A ; valid ; ; NV8 # 1.1 OCR HOOK..OCR DOUBLE BACKSLASH ++244B..245F ; disallowed # NA .. ++2460 ; mapped ; 0031 # 1.1 CIRCLED DIGIT ONE ++2461 ; mapped ; 0032 # 1.1 CIRCLED DIGIT TWO ++2462 ; mapped ; 0033 # 1.1 CIRCLED DIGIT THREE ++2463 ; mapped ; 0034 # 1.1 CIRCLED DIGIT FOUR ++2464 ; mapped ; 0035 # 1.1 CIRCLED DIGIT FIVE ++2465 ; mapped ; 0036 # 1.1 CIRCLED DIGIT SIX ++2466 ; mapped ; 0037 # 1.1 CIRCLED DIGIT SEVEN ++2467 ; mapped ; 0038 # 1.1 CIRCLED DIGIT EIGHT ++2468 ; mapped ; 0039 # 1.1 CIRCLED DIGIT NINE ++2469 ; mapped ; 0031 0030 # 1.1 CIRCLED NUMBER TEN ++246A ; mapped ; 0031 0031 # 1.1 CIRCLED NUMBER ELEVEN ++246B ; mapped ; 0031 0032 # 1.1 CIRCLED NUMBER TWELVE ++246C ; mapped ; 0031 0033 # 1.1 CIRCLED NUMBER THIRTEEN ++246D ; mapped ; 0031 0034 # 1.1 CIRCLED NUMBER FOURTEEN ++246E ; mapped ; 0031 0035 # 1.1 CIRCLED NUMBER FIFTEEN ++246F ; mapped ; 0031 0036 # 1.1 CIRCLED NUMBER SIXTEEN ++2470 ; mapped ; 0031 0037 # 1.1 CIRCLED NUMBER SEVENTEEN ++2471 ; mapped ; 0031 0038 # 1.1 CIRCLED NUMBER EIGHTEEN ++2472 ; mapped ; 0031 0039 # 1.1 CIRCLED NUMBER NINETEEN ++2473 ; mapped ; 0032 0030 # 1.1 CIRCLED NUMBER TWENTY ++2474 ; disallowed_STD3_mapped ; 0028 0031 0029 #1.1 PARENTHESIZED DIGIT ONE ++2475 ; disallowed_STD3_mapped ; 0028 0032 0029 #1.1 PARENTHESIZED DIGIT TWO ++2476 ; disallowed_STD3_mapped ; 0028 0033 0029 #1.1 PARENTHESIZED DIGIT THREE ++2477 ; disallowed_STD3_mapped ; 0028 0034 0029 #1.1 PARENTHESIZED DIGIT FOUR ++2478 ; disallowed_STD3_mapped ; 0028 0035 0029 #1.1 PARENTHESIZED DIGIT FIVE ++2479 ; disallowed_STD3_mapped ; 0028 0036 0029 #1.1 PARENTHESIZED DIGIT SIX ++247A ; disallowed_STD3_mapped ; 0028 0037 0029 #1.1 PARENTHESIZED DIGIT SEVEN ++247B ; disallowed_STD3_mapped ; 0028 0038 0029 #1.1 PARENTHESIZED DIGIT EIGHT ++247C ; disallowed_STD3_mapped ; 0028 0039 0029 #1.1 PARENTHESIZED DIGIT NINE ++247D ; disallowed_STD3_mapped ; 0028 0031 0030 0029 #1.1 PARENTHESIZED NUMBER TEN ++247E ; disallowed_STD3_mapped ; 0028 0031 0031 0029 #1.1 PARENTHESIZED NUMBER ELEVEN ++247F ; disallowed_STD3_mapped ; 0028 0031 0032 0029 #1.1 PARENTHESIZED NUMBER TWELVE ++2480 ; disallowed_STD3_mapped ; 0028 0031 0033 0029 #1.1 PARENTHESIZED NUMBER THIRTEEN ++2481 ; disallowed_STD3_mapped ; 0028 0031 0034 0029 #1.1 PARENTHESIZED NUMBER FOURTEEN ++2482 ; disallowed_STD3_mapped ; 0028 0031 0035 0029 #1.1 PARENTHESIZED NUMBER FIFTEEN ++2483 ; disallowed_STD3_mapped ; 0028 0031 0036 0029 #1.1 PARENTHESIZED NUMBER SIXTEEN ++2484 ; disallowed_STD3_mapped ; 0028 0031 0037 0029 #1.1 PARENTHESIZED NUMBER SEVENTEEN ++2485 ; disallowed_STD3_mapped ; 0028 0031 0038 0029 #1.1 PARENTHESIZED NUMBER EIGHTEEN ++2486 ; disallowed_STD3_mapped ; 0028 0031 0039 0029 #1.1 PARENTHESIZED NUMBER NINETEEN ++2487 ; disallowed_STD3_mapped ; 0028 0032 0030 0029 #1.1 PARENTHESIZED NUMBER TWENTY ++2488..249B ; disallowed # 1.1 DIGIT ONE FULL STOP..NUMBER TWENTY FULL STOP ++249C ; disallowed_STD3_mapped ; 0028 0061 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER A ++249D ; disallowed_STD3_mapped ; 0028 0062 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER B ++249E ; disallowed_STD3_mapped ; 0028 0063 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER C ++249F ; disallowed_STD3_mapped ; 0028 0064 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER D ++24A0 ; disallowed_STD3_mapped ; 0028 0065 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER E ++24A1 ; disallowed_STD3_mapped ; 0028 0066 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER F ++24A2 ; disallowed_STD3_mapped ; 0028 0067 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER G ++24A3 ; disallowed_STD3_mapped ; 0028 0068 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER H ++24A4 ; disallowed_STD3_mapped ; 0028 0069 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER I ++24A5 ; disallowed_STD3_mapped ; 0028 006A 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER J ++24A6 ; disallowed_STD3_mapped ; 0028 006B 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER K ++24A7 ; disallowed_STD3_mapped ; 0028 006C 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER L ++24A8 ; disallowed_STD3_mapped ; 0028 006D 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER M ++24A9 ; disallowed_STD3_mapped ; 0028 006E 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER N ++24AA ; disallowed_STD3_mapped ; 0028 006F 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER O ++24AB ; disallowed_STD3_mapped ; 0028 0070 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER P ++24AC ; disallowed_STD3_mapped ; 0028 0071 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER Q ++24AD ; disallowed_STD3_mapped ; 0028 0072 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER R ++24AE ; disallowed_STD3_mapped ; 0028 0073 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER S ++24AF ; disallowed_STD3_mapped ; 0028 0074 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER T ++24B0 ; disallowed_STD3_mapped ; 0028 0075 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER U ++24B1 ; disallowed_STD3_mapped ; 0028 0076 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER V ++24B2 ; disallowed_STD3_mapped ; 0028 0077 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER W ++24B3 ; disallowed_STD3_mapped ; 0028 0078 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER X ++24B4 ; disallowed_STD3_mapped ; 0028 0079 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER Y ++24B5 ; disallowed_STD3_mapped ; 0028 007A 0029 #1.1 PARENTHESIZED LATIN SMALL LETTER Z ++24B6 ; mapped ; 0061 # 1.1 CIRCLED LATIN CAPITAL LETTER A ++24B7 ; mapped ; 0062 # 1.1 CIRCLED LATIN CAPITAL LETTER B ++24B8 ; mapped ; 0063 # 1.1 CIRCLED LATIN CAPITAL LETTER C ++24B9 ; mapped ; 0064 # 1.1 CIRCLED LATIN CAPITAL LETTER D ++24BA ; mapped ; 0065 # 1.1 CIRCLED LATIN CAPITAL LETTER E ++24BB ; mapped ; 0066 # 1.1 CIRCLED LATIN CAPITAL LETTER F ++24BC ; mapped ; 0067 # 1.1 CIRCLED LATIN CAPITAL LETTER G ++24BD ; mapped ; 0068 # 1.1 CIRCLED LATIN CAPITAL LETTER H ++24BE ; mapped ; 0069 # 1.1 CIRCLED LATIN CAPITAL LETTER I ++24BF ; mapped ; 006A # 1.1 CIRCLED LATIN CAPITAL LETTER J ++24C0 ; mapped ; 006B # 1.1 CIRCLED LATIN CAPITAL LETTER K ++24C1 ; mapped ; 006C # 1.1 CIRCLED LATIN CAPITAL LETTER L ++24C2 ; mapped ; 006D # 1.1 CIRCLED LATIN CAPITAL LETTER M ++24C3 ; mapped ; 006E # 1.1 CIRCLED LATIN CAPITAL LETTER N ++24C4 ; mapped ; 006F # 1.1 CIRCLED LATIN CAPITAL LETTER O ++24C5 ; mapped ; 0070 # 1.1 CIRCLED LATIN CAPITAL LETTER P ++24C6 ; mapped ; 0071 # 1.1 CIRCLED LATIN CAPITAL LETTER Q ++24C7 ; mapped ; 0072 # 1.1 CIRCLED LATIN CAPITAL LETTER R ++24C8 ; mapped ; 0073 # 1.1 CIRCLED LATIN CAPITAL LETTER S ++24C9 ; mapped ; 0074 # 1.1 CIRCLED LATIN CAPITAL LETTER T ++24CA ; mapped ; 0075 # 1.1 CIRCLED LATIN CAPITAL LETTER U ++24CB ; mapped ; 0076 # 1.1 CIRCLED LATIN CAPITAL LETTER V ++24CC ; mapped ; 0077 # 1.1 CIRCLED LATIN CAPITAL LETTER W ++24CD ; mapped ; 0078 # 1.1 CIRCLED LATIN CAPITAL LETTER X ++24CE ; mapped ; 0079 # 1.1 CIRCLED LATIN CAPITAL LETTER Y ++24CF ; mapped ; 007A # 1.1 CIRCLED LATIN CAPITAL LETTER Z ++24D0 ; mapped ; 0061 # 1.1 CIRCLED LATIN SMALL LETTER A ++24D1 ; mapped ; 0062 # 1.1 CIRCLED LATIN SMALL LETTER B ++24D2 ; mapped ; 0063 # 1.1 CIRCLED LATIN SMALL LETTER C ++24D3 ; mapped ; 0064 # 1.1 CIRCLED LATIN SMALL LETTER D ++24D4 ; mapped ; 0065 # 1.1 CIRCLED LATIN SMALL LETTER E ++24D5 ; mapped ; 0066 # 1.1 CIRCLED LATIN SMALL LETTER F ++24D6 ; mapped ; 0067 # 1.1 CIRCLED LATIN SMALL LETTER G ++24D7 ; mapped ; 0068 # 1.1 CIRCLED LATIN SMALL LETTER H ++24D8 ; mapped ; 0069 # 1.1 CIRCLED LATIN SMALL LETTER I ++24D9 ; mapped ; 006A # 1.1 CIRCLED LATIN SMALL LETTER J ++24DA ; mapped ; 006B # 1.1 CIRCLED LATIN SMALL LETTER K ++24DB ; mapped ; 006C # 1.1 CIRCLED LATIN SMALL LETTER L ++24DC ; mapped ; 006D # 1.1 CIRCLED LATIN SMALL LETTER M ++24DD ; mapped ; 006E # 1.1 CIRCLED LATIN SMALL LETTER N ++24DE ; mapped ; 006F # 1.1 CIRCLED LATIN SMALL LETTER O ++24DF ; mapped ; 0070 # 1.1 CIRCLED LATIN SMALL LETTER P ++24E0 ; mapped ; 0071 # 1.1 CIRCLED LATIN SMALL LETTER Q ++24E1 ; mapped ; 0072 # 1.1 CIRCLED LATIN SMALL LETTER R ++24E2 ; mapped ; 0073 # 1.1 CIRCLED LATIN SMALL LETTER S ++24E3 ; mapped ; 0074 # 1.1 CIRCLED LATIN SMALL LETTER T ++24E4 ; mapped ; 0075 # 1.1 CIRCLED LATIN SMALL LETTER U ++24E5 ; mapped ; 0076 # 1.1 CIRCLED LATIN SMALL LETTER V ++24E6 ; mapped ; 0077 # 1.1 CIRCLED LATIN SMALL LETTER W ++24E7 ; mapped ; 0078 # 1.1 CIRCLED LATIN SMALL LETTER X ++24E8 ; mapped ; 0079 # 1.1 CIRCLED LATIN SMALL LETTER Y ++24E9 ; mapped ; 007A # 1.1 CIRCLED LATIN SMALL LETTER Z ++24EA ; mapped ; 0030 # 1.1 CIRCLED DIGIT ZERO ++24EB..24FE ; valid ; ; NV8 # 3.2 NEGATIVE CIRCLED NUMBER ELEVEN..DOUBLE CIRCLED NUMBER TEN ++24FF ; valid ; ; NV8 # 4.0 NEGATIVE CIRCLED DIGIT ZERO ++2500..2595 ; valid ; ; NV8 # 1.1 BOX DRAWINGS LIGHT HORIZONTAL..RIGHT ONE EIGHTH BLOCK ++2596..259F ; valid ; ; NV8 # 3.2 QUADRANT LOWER LEFT..QUADRANT UPPER RIGHT AND LOWER LEFT AND LOWER RIGHT ++25A0..25EF ; valid ; ; NV8 # 1.1 BLACK SQUARE..LARGE CIRCLE ++25F0..25F7 ; valid ; ; NV8 # 3.0 WHITE SQUARE WITH UPPER LEFT QUADRANT..WHITE CIRCLE WITH UPPER RIGHT QUADRANT ++25F8..25FF ; valid ; ; NV8 # 3.2 UPPER LEFT TRIANGLE..LOWER RIGHT TRIANGLE ++2600..2613 ; valid ; ; NV8 # 1.1 BLACK SUN WITH RAYS..SALTIRE ++2614..2615 ; valid ; ; NV8 # 4.0 UMBRELLA WITH RAIN DROPS..HOT BEVERAGE ++2616..2617 ; valid ; ; NV8 # 3.2 WHITE SHOGI PIECE..BLACK SHOGI PIECE ++2618 ; valid ; ; NV8 # 4.1 SHAMROCK ++2619 ; valid ; ; NV8 # 3.0 REVERSED ROTATED FLORAL HEART BULLET ++261A..266F ; valid ; ; NV8 # 1.1 BLACK LEFT POINTING INDEX..MUSIC SHARP SIGN ++2670..2671 ; valid ; ; NV8 # 3.0 WEST SYRIAC CROSS..EAST SYRIAC CROSS ++2672..267D ; valid ; ; NV8 # 3.2 UNIVERSAL RECYCLING SYMBOL..PARTIALLY-RECYCLED PAPER SYMBOL ++267E..267F ; valid ; ; NV8 # 4.1 PERMANENT PAPER SIGN..WHEELCHAIR SYMBOL ++2680..2689 ; valid ; ; NV8 # 3.2 DIE FACE-1..BLACK CIRCLE WITH TWO WHITE DOTS ++268A..2691 ; valid ; ; NV8 # 4.0 MONOGRAM FOR YANG..BLACK FLAG ++2692..269C ; valid ; ; NV8 # 4.1 HAMMER AND PICK..FLEUR-DE-LIS ++269D ; valid ; ; NV8 # 5.1 OUTLINED WHITE STAR ++269E..269F ; valid ; ; NV8 # 5.2 THREE LINES CONVERGING RIGHT..THREE LINES CONVERGING LEFT ++26A0..26A1 ; valid ; ; NV8 # 4.0 WARNING SIGN..HIGH VOLTAGE SIGN ++26A2..26B1 ; valid ; ; NV8 # 4.1 DOUBLED FEMALE SIGN..FUNERAL URN ++26B2 ; valid ; ; NV8 # 5.0 NEUTER ++26B3..26BC ; valid ; ; NV8 # 5.1 CERES..SESQUIQUADRATE ++26BD..26BF ; valid ; ; NV8 # 5.2 SOCCER BALL..SQUARED KEY ++26C0..26C3 ; valid ; ; NV8 # 5.1 WHITE DRAUGHTS MAN..BLACK DRAUGHTS KING ++26C4..26CD ; valid ; ; NV8 # 5.2 SNOWMAN WITHOUT SNOW..DISABLED CAR ++26CE ; valid ; ; NV8 # 6.0 OPHIUCHUS ++26CF..26E1 ; valid ; ; NV8 # 5.2 PICK..RESTRICTED LEFT ENTRY-2 ++26E2 ; valid ; ; NV8 # 6.0 ASTRONOMICAL SYMBOL FOR URANUS ++26E3 ; valid ; ; NV8 # 5.2 HEAVY CIRCLE WITH STROKE AND TWO DOTS ABOVE ++26E4..26E7 ; valid ; ; NV8 # 6.0 PENTAGRAM..INVERTED PENTAGRAM ++26E8..26FF ; valid ; ; NV8 # 5.2 BLACK CROSS ON SHIELD..WHITE FLAG WITH HORIZONTAL MIDDLE BLACK STRIPE ++2700 ; valid ; ; NV8 # 7.0 BLACK SAFETY SCISSORS ++2701..2704 ; valid ; ; NV8 # 1.1 UPPER BLADE SCISSORS..WHITE SCISSORS ++2705 ; valid ; ; NV8 # 6.0 WHITE HEAVY CHECK MARK ++2706..2709 ; valid ; ; NV8 # 1.1 TELEPHONE LOCATION SIGN..ENVELOPE ++270A..270B ; valid ; ; NV8 # 6.0 RAISED FIST..RAISED HAND ++270C..2727 ; valid ; ; NV8 # 1.1 VICTORY HAND..WHITE FOUR POINTED STAR ++2728 ; valid ; ; NV8 # 6.0 SPARKLES ++2729..274B ; valid ; ; NV8 # 1.1 STRESS OUTLINED WHITE STAR..HEAVY EIGHT TEARDROP-SPOKED PROPELLER ASTERISK ++274C ; valid ; ; NV8 # 6.0 CROSS MARK ++274D ; valid ; ; NV8 # 1.1 SHADOWED WHITE CIRCLE ++274E ; valid ; ; NV8 # 6.0 NEGATIVE SQUARED CROSS MARK ++274F..2752 ; valid ; ; NV8 # 1.1 LOWER RIGHT DROP-SHADOWED WHITE SQUARE..UPPER RIGHT SHADOWED WHITE SQUARE ++2753..2755 ; valid ; ; NV8 # 6.0 BLACK QUESTION MARK ORNAMENT..WHITE EXCLAMATION MARK ORNAMENT ++2756 ; valid ; ; NV8 # 1.1 BLACK DIAMOND MINUS WHITE X ++2757 ; valid ; ; NV8 # 5.2 HEAVY EXCLAMATION MARK SYMBOL ++2758..275E ; valid ; ; NV8 # 1.1 LIGHT VERTICAL BAR..HEAVY DOUBLE COMMA QUOTATION MARK ORNAMENT ++275F..2760 ; valid ; ; NV8 # 6.0 HEAVY LOW SINGLE COMMA QUOTATION MARK ORNAMENT..HEAVY LOW DOUBLE COMMA QUOTATION MARK ORNAMENT ++2761..2767 ; valid ; ; NV8 # 1.1 CURVED STEM PARAGRAPH SIGN ORNAMENT..ROTATED FLORAL HEART BULLET ++2768..2775 ; valid ; ; NV8 # 3.2 MEDIUM LEFT PARENTHESIS ORNAMENT..MEDIUM RIGHT CURLY BRACKET ORNAMENT ++2776..2794 ; valid ; ; NV8 # 1.1 DINGBAT NEGATIVE CIRCLED DIGIT ONE..HEAVY WIDE-HEADED RIGHTWARDS ARROW ++2795..2797 ; valid ; ; NV8 # 6.0 HEAVY PLUS SIGN..HEAVY DIVISION SIGN ++2798..27AF ; valid ; ; NV8 # 1.1 HEAVY SOUTH EAST ARROW..NOTCHED LOWER RIGHT-SHADOWED WHITE RIGHTWARDS ARROW ++27B0 ; valid ; ; NV8 # 6.0 CURLY LOOP ++27B1..27BE ; valid ; ; NV8 # 1.1 NOTCHED UPPER RIGHT-SHADOWED WHITE RIGHTWARDS ARROW..OPEN-OUTLINED RIGHTWARDS ARROW ++27BF ; valid ; ; NV8 # 6.0 DOUBLE CURLY LOOP ++27C0..27C6 ; valid ; ; NV8 # 4.1 THREE DIMENSIONAL ANGLE..RIGHT S-SHAPED BAG DELIMITER ++27C7..27CA ; valid ; ; NV8 # 5.0 OR WITH DOT INSIDE..VERTICAL BAR WITH HORIZONTAL STROKE ++27CB ; valid ; ; NV8 # 6.1 MATHEMATICAL RISING DIAGONAL ++27CC ; valid ; ; NV8 # 5.1 LONG DIVISION ++27CD ; valid ; ; NV8 # 6.1 MATHEMATICAL FALLING DIAGONAL ++27CE..27CF ; valid ; ; NV8 # 6.0 SQUARED LOGICAL AND..SQUARED LOGICAL OR ++27D0..27EB ; valid ; ; NV8 # 3.2 WHITE DIAMOND WITH CENTRED DOT..MATHEMATICAL RIGHT DOUBLE ANGLE BRACKET ++27EC..27EF ; valid ; ; NV8 # 5.1 MATHEMATICAL LEFT WHITE TORTOISE SHELL BRACKET..MATHEMATICAL RIGHT FLATTENED PARENTHESIS ++27F0..27FF ; valid ; ; NV8 # 3.2 UPWARDS QUADRUPLE ARROW..LONG RIGHTWARDS SQUIGGLE ARROW ++2800..28FF ; valid ; ; NV8 # 3.0 BRAILLE PATTERN BLANK..BRAILLE PATTERN DOTS-12345678 ++2900..2A0B ; valid ; ; NV8 # 3.2 RIGHTWARDS TWO-HEADED ARROW WITH VERTICAL STROKE..SUMMATION WITH INTEGRAL ++2A0C ; mapped ; 222B 222B 222B 222B #3.2 QUADRUPLE INTEGRAL OPERATOR ++2A0D..2A73 ; valid ; ; NV8 # 3.2 FINITE PART INTEGRAL..EQUALS SIGN ABOVE TILDE OPERATOR ++2A74 ; disallowed_STD3_mapped ; 003A 003A 003D #3.2 DOUBLE COLON EQUAL ++2A75 ; disallowed_STD3_mapped ; 003D 003D # 3.2 TWO CONSECUTIVE EQUALS SIGNS ++2A76 ; disallowed_STD3_mapped ; 003D 003D 003D #3.2 THREE CONSECUTIVE EQUALS SIGNS ++2A77..2ADB ; valid ; ; NV8 # 3.2 EQUALS SIGN WITH TWO DOTS ABOVE AND TWO DOTS BELOW..TRANSVERSAL INTERSECTION ++2ADC ; mapped ; 2ADD 0338 # 3.2 FORKING ++2ADD..2AFF ; valid ; ; NV8 # 3.2 NONFORKING..N-ARY WHITE VERTICAL BAR ++2B00..2B0D ; valid ; ; NV8 # 4.0 NORTH EAST WHITE ARROW..UP DOWN BLACK ARROW ++2B0E..2B13 ; valid ; ; NV8 # 4.1 RIGHTWARDS ARROW WITH TIP DOWNWARDS..SQUARE WITH BOTTOM HALF BLACK ++2B14..2B1A ; valid ; ; NV8 # 5.0 SQUARE WITH UPPER RIGHT DIAGONAL HALF BLACK..DOTTED SQUARE ++2B1B..2B1F ; valid ; ; NV8 # 5.1 BLACK LARGE SQUARE..BLACK PENTAGON ++2B20..2B23 ; valid ; ; NV8 # 5.0 WHITE PENTAGON..HORIZONTAL BLACK HEXAGON ++2B24..2B4C ; valid ; ; NV8 # 5.1 BLACK LARGE CIRCLE..RIGHTWARDS ARROW ABOVE REVERSE TILDE OPERATOR ++2B4D..2B4F ; valid ; ; NV8 # 7.0 DOWNWARDS TRIANGLE-HEADED ZIGZAG ARROW..SHORT BACKSLANTED SOUTH ARROW ++2B50..2B54 ; valid ; ; NV8 # 5.1 WHITE MEDIUM STAR..WHITE RIGHT-POINTING PENTAGON ++2B55..2B59 ; valid ; ; NV8 # 5.2 HEAVY LARGE CIRCLE..HEAVY CIRCLED SALTIRE ++2B5A..2B73 ; valid ; ; NV8 # 7.0 SLANTED NORTH ARROW WITH HOOKED HEAD..DOWNWARDS TRIANGLE-HEADED ARROW TO BAR ++2B74..2B75 ; disallowed # NA .. ++2B76..2B95 ; valid ; ; NV8 # 7.0 NORTH WEST TRIANGLE-HEADED ARROW TO BAR..RIGHTWARDS BLACK ARROW ++2B96..2B97 ; disallowed # NA .. ++2B98..2BB9 ; valid ; ; NV8 # 7.0 THREE-D TOP-LIGHTED LEFTWARDS EQUILATERAL ARROWHEAD..UP ARROWHEAD IN A RECTANGLE BOX ++2BBA..2BBC ; disallowed # NA .. ++2BBD..2BC8 ; valid ; ; NV8 # 7.0 BALLOT BOX WITH LIGHT X..BLACK MEDIUM RIGHT-POINTING TRIANGLE CENTRED ++2BC9 ; disallowed # NA ++2BCA..2BD1 ; valid ; ; NV8 # 7.0 TOP HALF BLACK CIRCLE..UNCERTAINTY SIGN ++2BD2 ; valid ; ; NV8 # 10.0 GROUP MARK ++2BD3..2BEB ; disallowed # NA .. ++2BEC..2BEF ; valid ; ; NV8 # 8.0 LEFTWARDS TWO-HEADED ARROW WITH TRIANGLE ARROWHEADS..DOWNWARDS TWO-HEADED ARROW WITH TRIANGLE ARROWHEADS ++2BF0..2BFF ; disallowed # NA .. ++2C00 ; mapped ; 2C30 # 4.1 GLAGOLITIC CAPITAL LETTER AZU ++2C01 ; mapped ; 2C31 # 4.1 GLAGOLITIC CAPITAL LETTER BUKY ++2C02 ; mapped ; 2C32 # 4.1 GLAGOLITIC CAPITAL LETTER VEDE ++2C03 ; mapped ; 2C33 # 4.1 GLAGOLITIC CAPITAL LETTER GLAGOLI ++2C04 ; mapped ; 2C34 # 4.1 GLAGOLITIC CAPITAL LETTER DOBRO ++2C05 ; mapped ; 2C35 # 4.1 GLAGOLITIC CAPITAL LETTER YESTU ++2C06 ; mapped ; 2C36 # 4.1 GLAGOLITIC CAPITAL LETTER ZHIVETE ++2C07 ; mapped ; 2C37 # 4.1 GLAGOLITIC CAPITAL LETTER DZELO ++2C08 ; mapped ; 2C38 # 4.1 GLAGOLITIC CAPITAL LETTER ZEMLJA ++2C09 ; mapped ; 2C39 # 4.1 GLAGOLITIC CAPITAL LETTER IZHE ++2C0A ; mapped ; 2C3A # 4.1 GLAGOLITIC CAPITAL LETTER INITIAL IZHE ++2C0B ; mapped ; 2C3B # 4.1 GLAGOLITIC CAPITAL LETTER I ++2C0C ; mapped ; 2C3C # 4.1 GLAGOLITIC CAPITAL LETTER DJERVI ++2C0D ; mapped ; 2C3D # 4.1 GLAGOLITIC CAPITAL LETTER KAKO ++2C0E ; mapped ; 2C3E # 4.1 GLAGOLITIC CAPITAL LETTER LJUDIJE ++2C0F ; mapped ; 2C3F # 4.1 GLAGOLITIC CAPITAL LETTER MYSLITE ++2C10 ; mapped ; 2C40 # 4.1 GLAGOLITIC CAPITAL LETTER NASHI ++2C11 ; mapped ; 2C41 # 4.1 GLAGOLITIC CAPITAL LETTER ONU ++2C12 ; mapped ; 2C42 # 4.1 GLAGOLITIC CAPITAL LETTER POKOJI ++2C13 ; mapped ; 2C43 # 4.1 GLAGOLITIC CAPITAL LETTER RITSI ++2C14 ; mapped ; 2C44 # 4.1 GLAGOLITIC CAPITAL LETTER SLOVO ++2C15 ; mapped ; 2C45 # 4.1 GLAGOLITIC CAPITAL LETTER TVRIDO ++2C16 ; mapped ; 2C46 # 4.1 GLAGOLITIC CAPITAL LETTER UKU ++2C17 ; mapped ; 2C47 # 4.1 GLAGOLITIC CAPITAL LETTER FRITU ++2C18 ; mapped ; 2C48 # 4.1 GLAGOLITIC CAPITAL LETTER HERU ++2C19 ; mapped ; 2C49 # 4.1 GLAGOLITIC CAPITAL LETTER OTU ++2C1A ; mapped ; 2C4A # 4.1 GLAGOLITIC CAPITAL LETTER PE ++2C1B ; mapped ; 2C4B # 4.1 GLAGOLITIC CAPITAL LETTER SHTA ++2C1C ; mapped ; 2C4C # 4.1 GLAGOLITIC CAPITAL LETTER TSI ++2C1D ; mapped ; 2C4D # 4.1 GLAGOLITIC CAPITAL LETTER CHRIVI ++2C1E ; mapped ; 2C4E # 4.1 GLAGOLITIC CAPITAL LETTER SHA ++2C1F ; mapped ; 2C4F # 4.1 GLAGOLITIC CAPITAL LETTER YERU ++2C20 ; mapped ; 2C50 # 4.1 GLAGOLITIC CAPITAL LETTER YERI ++2C21 ; mapped ; 2C51 # 4.1 GLAGOLITIC CAPITAL LETTER YATI ++2C22 ; mapped ; 2C52 # 4.1 GLAGOLITIC CAPITAL LETTER SPIDERY HA ++2C23 ; mapped ; 2C53 # 4.1 GLAGOLITIC CAPITAL LETTER YU ++2C24 ; mapped ; 2C54 # 4.1 GLAGOLITIC CAPITAL LETTER SMALL YUS ++2C25 ; mapped ; 2C55 # 4.1 GLAGOLITIC CAPITAL LETTER SMALL YUS WITH TAIL ++2C26 ; mapped ; 2C56 # 4.1 GLAGOLITIC CAPITAL LETTER YO ++2C27 ; mapped ; 2C57 # 4.1 GLAGOLITIC CAPITAL LETTER IOTATED SMALL YUS ++2C28 ; mapped ; 2C58 # 4.1 GLAGOLITIC CAPITAL LETTER BIG YUS ++2C29 ; mapped ; 2C59 # 4.1 GLAGOLITIC CAPITAL LETTER IOTATED BIG YUS ++2C2A ; mapped ; 2C5A # 4.1 GLAGOLITIC CAPITAL LETTER FITA ++2C2B ; mapped ; 2C5B # 4.1 GLAGOLITIC CAPITAL LETTER IZHITSA ++2C2C ; mapped ; 2C5C # 4.1 GLAGOLITIC CAPITAL LETTER SHTAPIC ++2C2D ; mapped ; 2C5D # 4.1 GLAGOLITIC CAPITAL LETTER TROKUTASTI A ++2C2E ; mapped ; 2C5E # 4.1 GLAGOLITIC CAPITAL LETTER LATINATE MYSLITE ++2C2F ; disallowed # NA ++2C30..2C5E ; valid # 4.1 GLAGOLITIC SMALL LETTER AZU..GLAGOLITIC SMALL LETTER LATINATE MYSLITE ++2C5F ; disallowed # NA ++2C60 ; mapped ; 2C61 # 5.0 LATIN CAPITAL LETTER L WITH DOUBLE BAR ++2C61 ; valid # 5.0 LATIN SMALL LETTER L WITH DOUBLE BAR ++2C62 ; mapped ; 026B # 5.0 LATIN CAPITAL LETTER L WITH MIDDLE TILDE ++2C63 ; mapped ; 1D7D # 5.0 LATIN CAPITAL LETTER P WITH STROKE ++2C64 ; mapped ; 027D # 5.0 LATIN CAPITAL LETTER R WITH TAIL ++2C65..2C66 ; valid # 5.0 LATIN SMALL LETTER A WITH STROKE..LATIN SMALL LETTER T WITH DIAGONAL STROKE ++2C67 ; mapped ; 2C68 # 5.0 LATIN CAPITAL LETTER H WITH DESCENDER ++2C68 ; valid # 5.0 LATIN SMALL LETTER H WITH DESCENDER ++2C69 ; mapped ; 2C6A # 5.0 LATIN CAPITAL LETTER K WITH DESCENDER ++2C6A ; valid # 5.0 LATIN SMALL LETTER K WITH DESCENDER ++2C6B ; mapped ; 2C6C # 5.0 LATIN CAPITAL LETTER Z WITH DESCENDER ++2C6C ; valid # 5.0 LATIN SMALL LETTER Z WITH DESCENDER ++2C6D ; mapped ; 0251 # 5.1 LATIN CAPITAL LETTER ALPHA ++2C6E ; mapped ; 0271 # 5.1 LATIN CAPITAL LETTER M WITH HOOK ++2C6F ; mapped ; 0250 # 5.1 LATIN CAPITAL LETTER TURNED A ++2C70 ; mapped ; 0252 # 5.2 LATIN CAPITAL LETTER TURNED ALPHA ++2C71 ; valid # 5.1 LATIN SMALL LETTER V WITH RIGHT HOOK ++2C72 ; mapped ; 2C73 # 5.1 LATIN CAPITAL LETTER W WITH HOOK ++2C73 ; valid # 5.1 LATIN SMALL LETTER W WITH HOOK ++2C74 ; valid # 5.0 LATIN SMALL LETTER V WITH CURL ++2C75 ; mapped ; 2C76 # 5.0 LATIN CAPITAL LETTER HALF H ++2C76..2C77 ; valid # 5.0 LATIN SMALL LETTER HALF H..LATIN SMALL LETTER TAILLESS PHI ++2C78..2C7B ; valid # 5.1 LATIN SMALL LETTER E WITH NOTCH..LATIN LETTER SMALL CAPITAL TURNED E ++2C7C ; mapped ; 006A # 5.1 LATIN SUBSCRIPT SMALL LETTER J ++2C7D ; mapped ; 0076 # 5.1 MODIFIER LETTER CAPITAL V ++2C7E ; mapped ; 023F # 5.2 LATIN CAPITAL LETTER S WITH SWASH TAIL ++2C7F ; mapped ; 0240 # 5.2 LATIN CAPITAL LETTER Z WITH SWASH TAIL ++2C80 ; mapped ; 2C81 # 4.1 COPTIC CAPITAL LETTER ALFA ++2C81 ; valid # 4.1 COPTIC SMALL LETTER ALFA ++2C82 ; mapped ; 2C83 # 4.1 COPTIC CAPITAL LETTER VIDA ++2C83 ; valid # 4.1 COPTIC SMALL LETTER VIDA ++2C84 ; mapped ; 2C85 # 4.1 COPTIC CAPITAL LETTER GAMMA ++2C85 ; valid # 4.1 COPTIC SMALL LETTER GAMMA ++2C86 ; mapped ; 2C87 # 4.1 COPTIC CAPITAL LETTER DALDA ++2C87 ; valid # 4.1 COPTIC SMALL LETTER DALDA ++2C88 ; mapped ; 2C89 # 4.1 COPTIC CAPITAL LETTER EIE ++2C89 ; valid # 4.1 COPTIC SMALL LETTER EIE ++2C8A ; mapped ; 2C8B # 4.1 COPTIC CAPITAL LETTER SOU ++2C8B ; valid # 4.1 COPTIC SMALL LETTER SOU ++2C8C ; mapped ; 2C8D # 4.1 COPTIC CAPITAL LETTER ZATA ++2C8D ; valid # 4.1 COPTIC SMALL LETTER ZATA ++2C8E ; mapped ; 2C8F # 4.1 COPTIC CAPITAL LETTER HATE ++2C8F ; valid # 4.1 COPTIC SMALL LETTER HATE ++2C90 ; mapped ; 2C91 # 4.1 COPTIC CAPITAL LETTER THETHE ++2C91 ; valid # 4.1 COPTIC SMALL LETTER THETHE ++2C92 ; mapped ; 2C93 # 4.1 COPTIC CAPITAL LETTER IAUDA ++2C93 ; valid # 4.1 COPTIC SMALL LETTER IAUDA ++2C94 ; mapped ; 2C95 # 4.1 COPTIC CAPITAL LETTER KAPA ++2C95 ; valid # 4.1 COPTIC SMALL LETTER KAPA ++2C96 ; mapped ; 2C97 # 4.1 COPTIC CAPITAL LETTER LAULA ++2C97 ; valid # 4.1 COPTIC SMALL LETTER LAULA ++2C98 ; mapped ; 2C99 # 4.1 COPTIC CAPITAL LETTER MI ++2C99 ; valid # 4.1 COPTIC SMALL LETTER MI ++2C9A ; mapped ; 2C9B # 4.1 COPTIC CAPITAL LETTER NI ++2C9B ; valid # 4.1 COPTIC SMALL LETTER NI ++2C9C ; mapped ; 2C9D # 4.1 COPTIC CAPITAL LETTER KSI ++2C9D ; valid # 4.1 COPTIC SMALL LETTER KSI ++2C9E ; mapped ; 2C9F # 4.1 COPTIC CAPITAL LETTER O ++2C9F ; valid # 4.1 COPTIC SMALL LETTER O ++2CA0 ; mapped ; 2CA1 # 4.1 COPTIC CAPITAL LETTER PI ++2CA1 ; valid # 4.1 COPTIC SMALL LETTER PI ++2CA2 ; mapped ; 2CA3 # 4.1 COPTIC CAPITAL LETTER RO ++2CA3 ; valid # 4.1 COPTIC SMALL LETTER RO ++2CA4 ; mapped ; 2CA5 # 4.1 COPTIC CAPITAL LETTER SIMA ++2CA5 ; valid # 4.1 COPTIC SMALL LETTER SIMA ++2CA6 ; mapped ; 2CA7 # 4.1 COPTIC CAPITAL LETTER TAU ++2CA7 ; valid # 4.1 COPTIC SMALL LETTER TAU ++2CA8 ; mapped ; 2CA9 # 4.1 COPTIC CAPITAL LETTER UA ++2CA9 ; valid # 4.1 COPTIC SMALL LETTER UA ++2CAA ; mapped ; 2CAB # 4.1 COPTIC CAPITAL LETTER FI ++2CAB ; valid # 4.1 COPTIC SMALL LETTER FI ++2CAC ; mapped ; 2CAD # 4.1 COPTIC CAPITAL LETTER KHI ++2CAD ; valid # 4.1 COPTIC SMALL LETTER KHI ++2CAE ; mapped ; 2CAF # 4.1 COPTIC CAPITAL LETTER PSI ++2CAF ; valid # 4.1 COPTIC SMALL LETTER PSI ++2CB0 ; mapped ; 2CB1 # 4.1 COPTIC CAPITAL LETTER OOU ++2CB1 ; valid # 4.1 COPTIC SMALL LETTER OOU ++2CB2 ; mapped ; 2CB3 # 4.1 COPTIC CAPITAL LETTER DIALECT-P ALEF ++2CB3 ; valid # 4.1 COPTIC SMALL LETTER DIALECT-P ALEF ++2CB4 ; mapped ; 2CB5 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC AIN ++2CB5 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC AIN ++2CB6 ; mapped ; 2CB7 # 4.1 COPTIC CAPITAL LETTER CRYPTOGRAMMIC EIE ++2CB7 ; valid # 4.1 COPTIC SMALL LETTER CRYPTOGRAMMIC EIE ++2CB8 ; mapped ; 2CB9 # 4.1 COPTIC CAPITAL LETTER DIALECT-P KAPA ++2CB9 ; valid # 4.1 COPTIC SMALL LETTER DIALECT-P KAPA ++2CBA ; mapped ; 2CBB # 4.1 COPTIC CAPITAL LETTER DIALECT-P NI ++2CBB ; valid # 4.1 COPTIC SMALL LETTER DIALECT-P NI ++2CBC ; mapped ; 2CBD # 4.1 COPTIC CAPITAL LETTER CRYPTOGRAMMIC NI ++2CBD ; valid # 4.1 COPTIC SMALL LETTER CRYPTOGRAMMIC NI ++2CBE ; mapped ; 2CBF # 4.1 COPTIC CAPITAL LETTER OLD COPTIC OOU ++2CBF ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC OOU ++2CC0 ; mapped ; 2CC1 # 4.1 COPTIC CAPITAL LETTER SAMPI ++2CC1 ; valid # 4.1 COPTIC SMALL LETTER SAMPI ++2CC2 ; mapped ; 2CC3 # 4.1 COPTIC CAPITAL LETTER CROSSED SHEI ++2CC3 ; valid # 4.1 COPTIC SMALL LETTER CROSSED SHEI ++2CC4 ; mapped ; 2CC5 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC SHEI ++2CC5 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC SHEI ++2CC6 ; mapped ; 2CC7 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC ESH ++2CC7 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC ESH ++2CC8 ; mapped ; 2CC9 # 4.1 COPTIC CAPITAL LETTER AKHMIMIC KHEI ++2CC9 ; valid # 4.1 COPTIC SMALL LETTER AKHMIMIC KHEI ++2CCA ; mapped ; 2CCB # 4.1 COPTIC CAPITAL LETTER DIALECT-P HORI ++2CCB ; valid # 4.1 COPTIC SMALL LETTER DIALECT-P HORI ++2CCC ; mapped ; 2CCD # 4.1 COPTIC CAPITAL LETTER OLD COPTIC HORI ++2CCD ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC HORI ++2CCE ; mapped ; 2CCF # 4.1 COPTIC CAPITAL LETTER OLD COPTIC HA ++2CCF ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC HA ++2CD0 ; mapped ; 2CD1 # 4.1 COPTIC CAPITAL LETTER L-SHAPED HA ++2CD1 ; valid # 4.1 COPTIC SMALL LETTER L-SHAPED HA ++2CD2 ; mapped ; 2CD3 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC HEI ++2CD3 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC HEI ++2CD4 ; mapped ; 2CD5 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC HAT ++2CD5 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC HAT ++2CD6 ; mapped ; 2CD7 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC GANGIA ++2CD7 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC GANGIA ++2CD8 ; mapped ; 2CD9 # 4.1 COPTIC CAPITAL LETTER OLD COPTIC DJA ++2CD9 ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC DJA ++2CDA ; mapped ; 2CDB # 4.1 COPTIC CAPITAL LETTER OLD COPTIC SHIMA ++2CDB ; valid # 4.1 COPTIC SMALL LETTER OLD COPTIC SHIMA ++2CDC ; mapped ; 2CDD # 4.1 COPTIC CAPITAL LETTER OLD NUBIAN SHIMA ++2CDD ; valid # 4.1 COPTIC SMALL LETTER OLD NUBIAN SHIMA ++2CDE ; mapped ; 2CDF # 4.1 COPTIC CAPITAL LETTER OLD NUBIAN NGI ++2CDF ; valid # 4.1 COPTIC SMALL LETTER OLD NUBIAN NGI ++2CE0 ; mapped ; 2CE1 # 4.1 COPTIC CAPITAL LETTER OLD NUBIAN NYI ++2CE1 ; valid # 4.1 COPTIC SMALL LETTER OLD NUBIAN NYI ++2CE2 ; mapped ; 2CE3 # 4.1 COPTIC CAPITAL LETTER OLD NUBIAN WAU ++2CE3..2CE4 ; valid # 4.1 COPTIC SMALL LETTER OLD NUBIAN WAU..COPTIC SYMBOL KAI ++2CE5..2CEA ; valid ; ; NV8 # 4.1 COPTIC SYMBOL MI RO..COPTIC SYMBOL SHIMA SIMA ++2CEB ; mapped ; 2CEC # 5.2 COPTIC CAPITAL LETTER CRYPTOGRAMMIC SHEI ++2CEC ; valid # 5.2 COPTIC SMALL LETTER CRYPTOGRAMMIC SHEI ++2CED ; mapped ; 2CEE # 5.2 COPTIC CAPITAL LETTER CRYPTOGRAMMIC GANGIA ++2CEE..2CF1 ; valid # 5.2 COPTIC SMALL LETTER CRYPTOGRAMMIC GANGIA..COPTIC COMBINING SPIRITUS LENIS ++2CF2 ; mapped ; 2CF3 # 6.1 COPTIC CAPITAL LETTER BOHAIRIC KHEI ++2CF3 ; valid # 6.1 COPTIC SMALL LETTER BOHAIRIC KHEI ++2CF4..2CF8 ; disallowed # NA .. ++2CF9..2CFF ; valid ; ; NV8 # 4.1 COPTIC OLD NUBIAN FULL STOP..COPTIC MORPHOLOGICAL DIVIDER ++2D00..2D25 ; valid # 4.1 GEORGIAN SMALL LETTER AN..GEORGIAN SMALL LETTER HOE ++2D26 ; disallowed # NA ++2D27 ; valid # 6.1 GEORGIAN SMALL LETTER YN ++2D28..2D2C ; disallowed # NA .. ++2D2D ; valid # 6.1 GEORGIAN SMALL LETTER AEN ++2D2E..2D2F ; disallowed # NA .. ++2D30..2D65 ; valid # 4.1 TIFINAGH LETTER YA..TIFINAGH LETTER YAZZ ++2D66..2D67 ; valid # 6.1 TIFINAGH LETTER YE..TIFINAGH LETTER YO ++2D68..2D6E ; disallowed # NA .. ++2D6F ; mapped ; 2D61 # 4.1 TIFINAGH MODIFIER LETTER LABIALIZATION MARK ++2D70 ; valid ; ; NV8 # 6.0 TIFINAGH SEPARATOR MARK ++2D71..2D7E ; disallowed # NA .. ++2D7F ; valid # 6.0 TIFINAGH CONSONANT JOINER ++2D80..2D96 ; valid # 4.1 ETHIOPIC SYLLABLE LOA..ETHIOPIC SYLLABLE GGWE ++2D97..2D9F ; disallowed # NA .. ++2DA0..2DA6 ; valid # 4.1 ETHIOPIC SYLLABLE SSA..ETHIOPIC SYLLABLE SSO ++2DA7 ; disallowed # NA ++2DA8..2DAE ; valid # 4.1 ETHIOPIC SYLLABLE CCA..ETHIOPIC SYLLABLE CCO ++2DAF ; disallowed # NA ++2DB0..2DB6 ; valid # 4.1 ETHIOPIC SYLLABLE ZZA..ETHIOPIC SYLLABLE ZZO ++2DB7 ; disallowed # NA ++2DB8..2DBE ; valid # 4.1 ETHIOPIC SYLLABLE CCHA..ETHIOPIC SYLLABLE CCHO ++2DBF ; disallowed # NA ++2DC0..2DC6 ; valid # 4.1 ETHIOPIC SYLLABLE QYA..ETHIOPIC SYLLABLE QYO ++2DC7 ; disallowed # NA ++2DC8..2DCE ; valid # 4.1 ETHIOPIC SYLLABLE KYA..ETHIOPIC SYLLABLE KYO ++2DCF ; disallowed # NA ++2DD0..2DD6 ; valid # 4.1 ETHIOPIC SYLLABLE XYA..ETHIOPIC SYLLABLE XYO ++2DD7 ; disallowed # NA ++2DD8..2DDE ; valid # 4.1 ETHIOPIC SYLLABLE GYA..ETHIOPIC SYLLABLE GYO ++2DDF ; disallowed # NA ++2DE0..2DFF ; valid # 5.1 COMBINING CYRILLIC LETTER BE..COMBINING CYRILLIC LETTER IOTIFIED BIG YUS ++2E00..2E17 ; valid ; ; NV8 # 4.1 RIGHT ANGLE SUBSTITUTION MARKER..DOUBLE OBLIQUE HYPHEN ++2E18..2E1B ; valid ; ; NV8 # 5.1 INVERTED INTERROBANG..TILDE WITH RING ABOVE ++2E1C..2E1D ; valid ; ; NV8 # 4.1 LEFT LOW PARAPHRASE BRACKET..RIGHT LOW PARAPHRASE BRACKET ++2E1E..2E2E ; valid ; ; NV8 # 5.1 TILDE WITH DOT ABOVE..REVERSED QUESTION MARK ++2E2F ; valid # 5.1 VERTICAL TILDE ++2E30 ; valid ; ; NV8 # 5.1 RING POINT ++2E31 ; valid ; ; NV8 # 5.2 WORD SEPARATOR MIDDLE DOT ++2E32..2E3B ; valid ; ; NV8 # 6.1 TURNED COMMA..THREE-EM DASH ++2E3C..2E42 ; valid ; ; NV8 # 7.0 STENOGRAPHIC FULL STOP..DOUBLE LOW-REVERSED-9 QUOTATION MARK ++2E43..2E44 ; valid ; ; NV8 # 9.0 DASH WITH LEFT UPTURN..DOUBLE SUSPENSION MARK ++2E45..2E49 ; valid ; ; NV8 # 10.0 INVERTED LOW KAVYKA..DOUBLE STACKED COMMA ++2E4A..2E7F ; disallowed # NA .. ++2E80..2E99 ; valid ; ; NV8 # 3.0 CJK RADICAL REPEAT..CJK RADICAL RAP ++2E9A ; disallowed # NA ++2E9B..2E9E ; valid ; ; NV8 # 3.0 CJK RADICAL CHOKE..CJK RADICAL DEATH ++2E9F ; mapped ; 6BCD # 3.0 CJK RADICAL MOTHER ++2EA0..2EF2 ; valid ; ; NV8 # 3.0 CJK RADICAL CIVILIAN..CJK RADICAL J-SIMPLIFIED TURTLE ++2EF3 ; mapped ; 9F9F # 3.0 CJK RADICAL C-SIMPLIFIED TURTLE ++2EF4..2EFF ; disallowed # NA .. ++2F00 ; mapped ; 4E00 # 3.0 KANGXI RADICAL ONE ++2F01 ; mapped ; 4E28 # 3.0 KANGXI RADICAL LINE ++2F02 ; mapped ; 4E36 # 3.0 KANGXI RADICAL DOT ++2F03 ; mapped ; 4E3F # 3.0 KANGXI RADICAL SLASH ++2F04 ; mapped ; 4E59 # 3.0 KANGXI RADICAL SECOND ++2F05 ; mapped ; 4E85 # 3.0 KANGXI RADICAL HOOK ++2F06 ; mapped ; 4E8C # 3.0 KANGXI RADICAL TWO ++2F07 ; mapped ; 4EA0 # 3.0 KANGXI RADICAL LID ++2F08 ; mapped ; 4EBA # 3.0 KANGXI RADICAL MAN ++2F09 ; mapped ; 513F # 3.0 KANGXI RADICAL LEGS ++2F0A ; mapped ; 5165 # 3.0 KANGXI RADICAL ENTER ++2F0B ; mapped ; 516B # 3.0 KANGXI RADICAL EIGHT ++2F0C ; mapped ; 5182 # 3.0 KANGXI RADICAL DOWN BOX ++2F0D ; mapped ; 5196 # 3.0 KANGXI RADICAL COVER ++2F0E ; mapped ; 51AB # 3.0 KANGXI RADICAL ICE ++2F0F ; mapped ; 51E0 # 3.0 KANGXI RADICAL TABLE ++2F10 ; mapped ; 51F5 # 3.0 KANGXI RADICAL OPEN BOX ++2F11 ; mapped ; 5200 # 3.0 KANGXI RADICAL KNIFE ++2F12 ; mapped ; 529B # 3.0 KANGXI RADICAL POWER ++2F13 ; mapped ; 52F9 # 3.0 KANGXI RADICAL WRAP ++2F14 ; mapped ; 5315 # 3.0 KANGXI RADICAL SPOON ++2F15 ; mapped ; 531A # 3.0 KANGXI RADICAL RIGHT OPEN BOX ++2F16 ; mapped ; 5338 # 3.0 KANGXI RADICAL HIDING ENCLOSURE ++2F17 ; mapped ; 5341 # 3.0 KANGXI RADICAL TEN ++2F18 ; mapped ; 535C # 3.0 KANGXI RADICAL DIVINATION ++2F19 ; mapped ; 5369 # 3.0 KANGXI RADICAL SEAL ++2F1A ; mapped ; 5382 # 3.0 KANGXI RADICAL CLIFF ++2F1B ; mapped ; 53B6 # 3.0 KANGXI RADICAL PRIVATE ++2F1C ; mapped ; 53C8 # 3.0 KANGXI RADICAL AGAIN ++2F1D ; mapped ; 53E3 # 3.0 KANGXI RADICAL MOUTH ++2F1E ; mapped ; 56D7 # 3.0 KANGXI RADICAL ENCLOSURE ++2F1F ; mapped ; 571F # 3.0 KANGXI RADICAL EARTH ++2F20 ; mapped ; 58EB # 3.0 KANGXI RADICAL SCHOLAR ++2F21 ; mapped ; 5902 # 3.0 KANGXI RADICAL GO ++2F22 ; mapped ; 590A # 3.0 KANGXI RADICAL GO SLOWLY ++2F23 ; mapped ; 5915 # 3.0 KANGXI RADICAL EVENING ++2F24 ; mapped ; 5927 # 3.0 KANGXI RADICAL BIG ++2F25 ; mapped ; 5973 # 3.0 KANGXI RADICAL WOMAN ++2F26 ; mapped ; 5B50 # 3.0 KANGXI RADICAL CHILD ++2F27 ; mapped ; 5B80 # 3.0 KANGXI RADICAL ROOF ++2F28 ; mapped ; 5BF8 # 3.0 KANGXI RADICAL INCH ++2F29 ; mapped ; 5C0F # 3.0 KANGXI RADICAL SMALL ++2F2A ; mapped ; 5C22 # 3.0 KANGXI RADICAL LAME ++2F2B ; mapped ; 5C38 # 3.0 KANGXI RADICAL CORPSE ++2F2C ; mapped ; 5C6E # 3.0 KANGXI RADICAL SPROUT ++2F2D ; mapped ; 5C71 # 3.0 KANGXI RADICAL MOUNTAIN ++2F2E ; mapped ; 5DDB # 3.0 KANGXI RADICAL RIVER ++2F2F ; mapped ; 5DE5 # 3.0 KANGXI RADICAL WORK ++2F30 ; mapped ; 5DF1 # 3.0 KANGXI RADICAL ONESELF ++2F31 ; mapped ; 5DFE # 3.0 KANGXI RADICAL TURBAN ++2F32 ; mapped ; 5E72 # 3.0 KANGXI RADICAL DRY ++2F33 ; mapped ; 5E7A # 3.0 KANGXI RADICAL SHORT THREAD ++2F34 ; mapped ; 5E7F # 3.0 KANGXI RADICAL DOTTED CLIFF ++2F35 ; mapped ; 5EF4 # 3.0 KANGXI RADICAL LONG STRIDE ++2F36 ; mapped ; 5EFE # 3.0 KANGXI RADICAL TWO HANDS ++2F37 ; mapped ; 5F0B # 3.0 KANGXI RADICAL SHOOT ++2F38 ; mapped ; 5F13 # 3.0 KANGXI RADICAL BOW ++2F39 ; mapped ; 5F50 # 3.0 KANGXI RADICAL SNOUT ++2F3A ; mapped ; 5F61 # 3.0 KANGXI RADICAL BRISTLE ++2F3B ; mapped ; 5F73 # 3.0 KANGXI RADICAL STEP ++2F3C ; mapped ; 5FC3 # 3.0 KANGXI RADICAL HEART ++2F3D ; mapped ; 6208 # 3.0 KANGXI RADICAL HALBERD ++2F3E ; mapped ; 6236 # 3.0 KANGXI RADICAL DOOR ++2F3F ; mapped ; 624B # 3.0 KANGXI RADICAL HAND ++2F40 ; mapped ; 652F # 3.0 KANGXI RADICAL BRANCH ++2F41 ; mapped ; 6534 # 3.0 KANGXI RADICAL RAP ++2F42 ; mapped ; 6587 # 3.0 KANGXI RADICAL SCRIPT ++2F43 ; mapped ; 6597 # 3.0 KANGXI RADICAL DIPPER ++2F44 ; mapped ; 65A4 # 3.0 KANGXI RADICAL AXE ++2F45 ; mapped ; 65B9 # 3.0 KANGXI RADICAL SQUARE ++2F46 ; mapped ; 65E0 # 3.0 KANGXI RADICAL NOT ++2F47 ; mapped ; 65E5 # 3.0 KANGXI RADICAL SUN ++2F48 ; mapped ; 66F0 # 3.0 KANGXI RADICAL SAY ++2F49 ; mapped ; 6708 # 3.0 KANGXI RADICAL MOON ++2F4A ; mapped ; 6728 # 3.0 KANGXI RADICAL TREE ++2F4B ; mapped ; 6B20 # 3.0 KANGXI RADICAL LACK ++2F4C ; mapped ; 6B62 # 3.0 KANGXI RADICAL STOP ++2F4D ; mapped ; 6B79 # 3.0 KANGXI RADICAL DEATH ++2F4E ; mapped ; 6BB3 # 3.0 KANGXI RADICAL WEAPON ++2F4F ; mapped ; 6BCB # 3.0 KANGXI RADICAL DO NOT ++2F50 ; mapped ; 6BD4 # 3.0 KANGXI RADICAL COMPARE ++2F51 ; mapped ; 6BDB # 3.0 KANGXI RADICAL FUR ++2F52 ; mapped ; 6C0F # 3.0 KANGXI RADICAL CLAN ++2F53 ; mapped ; 6C14 # 3.0 KANGXI RADICAL STEAM ++2F54 ; mapped ; 6C34 # 3.0 KANGXI RADICAL WATER ++2F55 ; mapped ; 706B # 3.0 KANGXI RADICAL FIRE ++2F56 ; mapped ; 722A # 3.0 KANGXI RADICAL CLAW ++2F57 ; mapped ; 7236 # 3.0 KANGXI RADICAL FATHER ++2F58 ; mapped ; 723B # 3.0 KANGXI RADICAL DOUBLE X ++2F59 ; mapped ; 723F # 3.0 KANGXI RADICAL HALF TREE TRUNK ++2F5A ; mapped ; 7247 # 3.0 KANGXI RADICAL SLICE ++2F5B ; mapped ; 7259 # 3.0 KANGXI RADICAL FANG ++2F5C ; mapped ; 725B # 3.0 KANGXI RADICAL COW ++2F5D ; mapped ; 72AC # 3.0 KANGXI RADICAL DOG ++2F5E ; mapped ; 7384 # 3.0 KANGXI RADICAL PROFOUND ++2F5F ; mapped ; 7389 # 3.0 KANGXI RADICAL JADE ++2F60 ; mapped ; 74DC # 3.0 KANGXI RADICAL MELON ++2F61 ; mapped ; 74E6 # 3.0 KANGXI RADICAL TILE ++2F62 ; mapped ; 7518 # 3.0 KANGXI RADICAL SWEET ++2F63 ; mapped ; 751F # 3.0 KANGXI RADICAL LIFE ++2F64 ; mapped ; 7528 # 3.0 KANGXI RADICAL USE ++2F65 ; mapped ; 7530 # 3.0 KANGXI RADICAL FIELD ++2F66 ; mapped ; 758B # 3.0 KANGXI RADICAL BOLT OF CLOTH ++2F67 ; mapped ; 7592 # 3.0 KANGXI RADICAL SICKNESS ++2F68 ; mapped ; 7676 # 3.0 KANGXI RADICAL DOTTED TENT ++2F69 ; mapped ; 767D # 3.0 KANGXI RADICAL WHITE ++2F6A ; mapped ; 76AE # 3.0 KANGXI RADICAL SKIN ++2F6B ; mapped ; 76BF # 3.0 KANGXI RADICAL DISH ++2F6C ; mapped ; 76EE # 3.0 KANGXI RADICAL EYE ++2F6D ; mapped ; 77DB # 3.0 KANGXI RADICAL SPEAR ++2F6E ; mapped ; 77E2 # 3.0 KANGXI RADICAL ARROW ++2F6F ; mapped ; 77F3 # 3.0 KANGXI RADICAL STONE ++2F70 ; mapped ; 793A # 3.0 KANGXI RADICAL SPIRIT ++2F71 ; mapped ; 79B8 # 3.0 KANGXI RADICAL TRACK ++2F72 ; mapped ; 79BE # 3.0 KANGXI RADICAL GRAIN ++2F73 ; mapped ; 7A74 # 3.0 KANGXI RADICAL CAVE ++2F74 ; mapped ; 7ACB # 3.0 KANGXI RADICAL STAND ++2F75 ; mapped ; 7AF9 # 3.0 KANGXI RADICAL BAMBOO ++2F76 ; mapped ; 7C73 # 3.0 KANGXI RADICAL RICE ++2F77 ; mapped ; 7CF8 # 3.0 KANGXI RADICAL SILK ++2F78 ; mapped ; 7F36 # 3.0 KANGXI RADICAL JAR ++2F79 ; mapped ; 7F51 # 3.0 KANGXI RADICAL NET ++2F7A ; mapped ; 7F8A # 3.0 KANGXI RADICAL SHEEP ++2F7B ; mapped ; 7FBD # 3.0 KANGXI RADICAL FEATHER ++2F7C ; mapped ; 8001 # 3.0 KANGXI RADICAL OLD ++2F7D ; mapped ; 800C # 3.0 KANGXI RADICAL AND ++2F7E ; mapped ; 8012 # 3.0 KANGXI RADICAL PLOW ++2F7F ; mapped ; 8033 # 3.0 KANGXI RADICAL EAR ++2F80 ; mapped ; 807F # 3.0 KANGXI RADICAL BRUSH ++2F81 ; mapped ; 8089 # 3.0 KANGXI RADICAL MEAT ++2F82 ; mapped ; 81E3 # 3.0 KANGXI RADICAL MINISTER ++2F83 ; mapped ; 81EA # 3.0 KANGXI RADICAL SELF ++2F84 ; mapped ; 81F3 # 3.0 KANGXI RADICAL ARRIVE ++2F85 ; mapped ; 81FC # 3.0 KANGXI RADICAL MORTAR ++2F86 ; mapped ; 820C # 3.0 KANGXI RADICAL TONGUE ++2F87 ; mapped ; 821B # 3.0 KANGXI RADICAL OPPOSE ++2F88 ; mapped ; 821F # 3.0 KANGXI RADICAL BOAT ++2F89 ; mapped ; 826E # 3.0 KANGXI RADICAL STOPPING ++2F8A ; mapped ; 8272 # 3.0 KANGXI RADICAL COLOR ++2F8B ; mapped ; 8278 # 3.0 KANGXI RADICAL GRASS ++2F8C ; mapped ; 864D # 3.0 KANGXI RADICAL TIGER ++2F8D ; mapped ; 866B # 3.0 KANGXI RADICAL INSECT ++2F8E ; mapped ; 8840 # 3.0 KANGXI RADICAL BLOOD ++2F8F ; mapped ; 884C # 3.0 KANGXI RADICAL WALK ENCLOSURE ++2F90 ; mapped ; 8863 # 3.0 KANGXI RADICAL CLOTHES ++2F91 ; mapped ; 897E # 3.0 KANGXI RADICAL WEST ++2F92 ; mapped ; 898B # 3.0 KANGXI RADICAL SEE ++2F93 ; mapped ; 89D2 # 3.0 KANGXI RADICAL HORN ++2F94 ; mapped ; 8A00 # 3.0 KANGXI RADICAL SPEECH ++2F95 ; mapped ; 8C37 # 3.0 KANGXI RADICAL VALLEY ++2F96 ; mapped ; 8C46 # 3.0 KANGXI RADICAL BEAN ++2F97 ; mapped ; 8C55 # 3.0 KANGXI RADICAL PIG ++2F98 ; mapped ; 8C78 # 3.0 KANGXI RADICAL BADGER ++2F99 ; mapped ; 8C9D # 3.0 KANGXI RADICAL SHELL ++2F9A ; mapped ; 8D64 # 3.0 KANGXI RADICAL RED ++2F9B ; mapped ; 8D70 # 3.0 KANGXI RADICAL RUN ++2F9C ; mapped ; 8DB3 # 3.0 KANGXI RADICAL FOOT ++2F9D ; mapped ; 8EAB # 3.0 KANGXI RADICAL BODY ++2F9E ; mapped ; 8ECA # 3.0 KANGXI RADICAL CART ++2F9F ; mapped ; 8F9B # 3.0 KANGXI RADICAL BITTER ++2FA0 ; mapped ; 8FB0 # 3.0 KANGXI RADICAL MORNING ++2FA1 ; mapped ; 8FB5 # 3.0 KANGXI RADICAL WALK ++2FA2 ; mapped ; 9091 # 3.0 KANGXI RADICAL CITY ++2FA3 ; mapped ; 9149 # 3.0 KANGXI RADICAL WINE ++2FA4 ; mapped ; 91C6 # 3.0 KANGXI RADICAL DISTINGUISH ++2FA5 ; mapped ; 91CC # 3.0 KANGXI RADICAL VILLAGE ++2FA6 ; mapped ; 91D1 # 3.0 KANGXI RADICAL GOLD ++2FA7 ; mapped ; 9577 # 3.0 KANGXI RADICAL LONG ++2FA8 ; mapped ; 9580 # 3.0 KANGXI RADICAL GATE ++2FA9 ; mapped ; 961C # 3.0 KANGXI RADICAL MOUND ++2FAA ; mapped ; 96B6 # 3.0 KANGXI RADICAL SLAVE ++2FAB ; mapped ; 96B9 # 3.0 KANGXI RADICAL SHORT TAILED BIRD ++2FAC ; mapped ; 96E8 # 3.0 KANGXI RADICAL RAIN ++2FAD ; mapped ; 9751 # 3.0 KANGXI RADICAL BLUE ++2FAE ; mapped ; 975E # 3.0 KANGXI RADICAL WRONG ++2FAF ; mapped ; 9762 # 3.0 KANGXI RADICAL FACE ++2FB0 ; mapped ; 9769 # 3.0 KANGXI RADICAL LEATHER ++2FB1 ; mapped ; 97CB # 3.0 KANGXI RADICAL TANNED LEATHER ++2FB2 ; mapped ; 97ED # 3.0 KANGXI RADICAL LEEK ++2FB3 ; mapped ; 97F3 # 3.0 KANGXI RADICAL SOUND ++2FB4 ; mapped ; 9801 # 3.0 KANGXI RADICAL LEAF ++2FB5 ; mapped ; 98A8 # 3.0 KANGXI RADICAL WIND ++2FB6 ; mapped ; 98DB # 3.0 KANGXI RADICAL FLY ++2FB7 ; mapped ; 98DF # 3.0 KANGXI RADICAL EAT ++2FB8 ; mapped ; 9996 # 3.0 KANGXI RADICAL HEAD ++2FB9 ; mapped ; 9999 # 3.0 KANGXI RADICAL FRAGRANT ++2FBA ; mapped ; 99AC # 3.0 KANGXI RADICAL HORSE ++2FBB ; mapped ; 9AA8 # 3.0 KANGXI RADICAL BONE ++2FBC ; mapped ; 9AD8 # 3.0 KANGXI RADICAL TALL ++2FBD ; mapped ; 9ADF # 3.0 KANGXI RADICAL HAIR ++2FBE ; mapped ; 9B25 # 3.0 KANGXI RADICAL FIGHT ++2FBF ; mapped ; 9B2F # 3.0 KANGXI RADICAL SACRIFICIAL WINE ++2FC0 ; mapped ; 9B32 # 3.0 KANGXI RADICAL CAULDRON ++2FC1 ; mapped ; 9B3C # 3.0 KANGXI RADICAL GHOST ++2FC2 ; mapped ; 9B5A # 3.0 KANGXI RADICAL FISH ++2FC3 ; mapped ; 9CE5 # 3.0 KANGXI RADICAL BIRD ++2FC4 ; mapped ; 9E75 # 3.0 KANGXI RADICAL SALT ++2FC5 ; mapped ; 9E7F # 3.0 KANGXI RADICAL DEER ++2FC6 ; mapped ; 9EA5 # 3.0 KANGXI RADICAL WHEAT ++2FC7 ; mapped ; 9EBB # 3.0 KANGXI RADICAL HEMP ++2FC8 ; mapped ; 9EC3 # 3.0 KANGXI RADICAL YELLOW ++2FC9 ; mapped ; 9ECD # 3.0 KANGXI RADICAL MILLET ++2FCA ; mapped ; 9ED1 # 3.0 KANGXI RADICAL BLACK ++2FCB ; mapped ; 9EF9 # 3.0 KANGXI RADICAL EMBROIDERY ++2FCC ; mapped ; 9EFD # 3.0 KANGXI RADICAL FROG ++2FCD ; mapped ; 9F0E # 3.0 KANGXI RADICAL TRIPOD ++2FCE ; mapped ; 9F13 # 3.0 KANGXI RADICAL DRUM ++2FCF ; mapped ; 9F20 # 3.0 KANGXI RADICAL RAT ++2FD0 ; mapped ; 9F3B # 3.0 KANGXI RADICAL NOSE ++2FD1 ; mapped ; 9F4A # 3.0 KANGXI RADICAL EVEN ++2FD2 ; mapped ; 9F52 # 3.0 KANGXI RADICAL TOOTH ++2FD3 ; mapped ; 9F8D # 3.0 KANGXI RADICAL DRAGON ++2FD4 ; mapped ; 9F9C # 3.0 KANGXI RADICAL TURTLE ++2FD5 ; mapped ; 9FA0 # 3.0 KANGXI RADICAL FLUTE ++2FD6..2FEF ; disallowed # NA .. ++2FF0..2FFB ; disallowed # 3.0 IDEOGRAPHIC DESCRIPTION CHARACTER LEFT TO RIGHT..IDEOGRAPHIC DESCRIPTION CHARACTER OVERLAID ++2FFC..2FFF ; disallowed # NA .. ++3000 ; disallowed_STD3_mapped ; 0020 # 1.1 IDEOGRAPHIC SPACE ++3001 ; valid ; ; NV8 # 1.1 IDEOGRAPHIC COMMA ++3002 ; mapped ; 002E # 1.1 IDEOGRAPHIC FULL STOP ++3003..3004 ; valid ; ; NV8 # 1.1 DITTO MARK..JAPANESE INDUSTRIAL STANDARD SYMBOL ++3005..3007 ; valid # 1.1 IDEOGRAPHIC ITERATION MARK..IDEOGRAPHIC NUMBER ZERO ++3008..3029 ; valid ; ; NV8 # 1.1 LEFT ANGLE BRACKET..HANGZHOU NUMERAL NINE ++302A..302D ; valid # 1.1 IDEOGRAPHIC LEVEL TONE MARK..IDEOGRAPHIC ENTERING TONE MARK ++302E..3035 ; valid ; ; NV8 # 1.1 HANGUL SINGLE DOT TONE MARK..VERTICAL KANA REPEAT MARK LOWER HALF ++3036 ; mapped ; 3012 # 1.1 CIRCLED POSTAL MARK ++3037 ; valid ; ; NV8 # 1.1 IDEOGRAPHIC TELEGRAPH LINE FEED SEPARATOR SYMBOL ++3038 ; mapped ; 5341 # 3.0 HANGZHOU NUMERAL TEN ++3039 ; mapped ; 5344 # 3.0 HANGZHOU NUMERAL TWENTY ++303A ; mapped ; 5345 # 3.0 HANGZHOU NUMERAL THIRTY ++303B ; valid ; ; NV8 # 3.2 VERTICAL IDEOGRAPHIC ITERATION MARK ++303C ; valid # 3.2 MASU MARK ++303D ; valid ; ; NV8 # 3.2 PART ALTERNATION MARK ++303E ; valid ; ; NV8 # 3.0 IDEOGRAPHIC VARIATION INDICATOR ++303F ; valid ; ; NV8 # 1.1 IDEOGRAPHIC HALF FILL SPACE ++3040 ; disallowed # NA ++3041..3094 ; valid # 1.1 HIRAGANA LETTER SMALL A..HIRAGANA LETTER VU ++3095..3096 ; valid # 3.2 HIRAGANA LETTER SMALL KA..HIRAGANA LETTER SMALL KE ++3097..3098 ; disallowed # NA .. ++3099..309A ; valid # 1.1 COMBINING KATAKANA-HIRAGANA VOICED SOUND MARK..COMBINING KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK ++309B ; disallowed_STD3_mapped ; 0020 3099 # 1.1 KATAKANA-HIRAGANA VOICED SOUND MARK ++309C ; disallowed_STD3_mapped ; 0020 309A # 1.1 KATAKANA-HIRAGANA SEMI-VOICED SOUND MARK ++309D..309E ; valid # 1.1 HIRAGANA ITERATION MARK..HIRAGANA VOICED ITERATION MARK ++309F ; mapped ; 3088 308A # 3.2 HIRAGANA DIGRAPH YORI ++30A0 ; valid ; ; NV8 # 3.2 KATAKANA-HIRAGANA DOUBLE HYPHEN ++30A1..30FE ; valid # 1.1 KATAKANA LETTER SMALL A..KATAKANA VOICED ITERATION MARK ++30FF ; mapped ; 30B3 30C8 # 3.2 KATAKANA DIGRAPH KOTO ++3100..3104 ; disallowed # NA .. ++3105..312C ; valid # 1.1 BOPOMOFO LETTER B..BOPOMOFO LETTER GN ++312D ; valid # 5.1 BOPOMOFO LETTER IH ++312E ; valid # 10.0 BOPOMOFO LETTER O WITH DOT ABOVE ++312F..3130 ; disallowed # NA .. ++3131 ; mapped ; 1100 # 1.1 HANGUL LETTER KIYEOK ++3132 ; mapped ; 1101 # 1.1 HANGUL LETTER SSANGKIYEOK ++3133 ; mapped ; 11AA # 1.1 HANGUL LETTER KIYEOK-SIOS ++3134 ; mapped ; 1102 # 1.1 HANGUL LETTER NIEUN ++3135 ; mapped ; 11AC # 1.1 HANGUL LETTER NIEUN-CIEUC ++3136 ; mapped ; 11AD # 1.1 HANGUL LETTER NIEUN-HIEUH ++3137 ; mapped ; 1103 # 1.1 HANGUL LETTER TIKEUT ++3138 ; mapped ; 1104 # 1.1 HANGUL LETTER SSANGTIKEUT ++3139 ; mapped ; 1105 # 1.1 HANGUL LETTER RIEUL ++313A ; mapped ; 11B0 # 1.1 HANGUL LETTER RIEUL-KIYEOK ++313B ; mapped ; 11B1 # 1.1 HANGUL LETTER RIEUL-MIEUM ++313C ; mapped ; 11B2 # 1.1 HANGUL LETTER RIEUL-PIEUP ++313D ; mapped ; 11B3 # 1.1 HANGUL LETTER RIEUL-SIOS ++313E ; mapped ; 11B4 # 1.1 HANGUL LETTER RIEUL-THIEUTH ++313F ; mapped ; 11B5 # 1.1 HANGUL LETTER RIEUL-PHIEUPH ++3140 ; mapped ; 111A # 1.1 HANGUL LETTER RIEUL-HIEUH ++3141 ; mapped ; 1106 # 1.1 HANGUL LETTER MIEUM ++3142 ; mapped ; 1107 # 1.1 HANGUL LETTER PIEUP ++3143 ; mapped ; 1108 # 1.1 HANGUL LETTER SSANGPIEUP ++3144 ; mapped ; 1121 # 1.1 HANGUL LETTER PIEUP-SIOS ++3145 ; mapped ; 1109 # 1.1 HANGUL LETTER SIOS ++3146 ; mapped ; 110A # 1.1 HANGUL LETTER SSANGSIOS ++3147 ; mapped ; 110B # 1.1 HANGUL LETTER IEUNG ++3148 ; mapped ; 110C # 1.1 HANGUL LETTER CIEUC ++3149 ; mapped ; 110D # 1.1 HANGUL LETTER SSANGCIEUC ++314A ; mapped ; 110E # 1.1 HANGUL LETTER CHIEUCH ++314B ; mapped ; 110F # 1.1 HANGUL LETTER KHIEUKH ++314C ; mapped ; 1110 # 1.1 HANGUL LETTER THIEUTH ++314D ; mapped ; 1111 # 1.1 HANGUL LETTER PHIEUPH ++314E ; mapped ; 1112 # 1.1 HANGUL LETTER HIEUH ++314F ; mapped ; 1161 # 1.1 HANGUL LETTER A ++3150 ; mapped ; 1162 # 1.1 HANGUL LETTER AE ++3151 ; mapped ; 1163 # 1.1 HANGUL LETTER YA ++3152 ; mapped ; 1164 # 1.1 HANGUL LETTER YAE ++3153 ; mapped ; 1165 # 1.1 HANGUL LETTER EO ++3154 ; mapped ; 1166 # 1.1 HANGUL LETTER E ++3155 ; mapped ; 1167 # 1.1 HANGUL LETTER YEO ++3156 ; mapped ; 1168 # 1.1 HANGUL LETTER YE ++3157 ; mapped ; 1169 # 1.1 HANGUL LETTER O ++3158 ; mapped ; 116A # 1.1 HANGUL LETTER WA ++3159 ; mapped ; 116B # 1.1 HANGUL LETTER WAE ++315A ; mapped ; 116C # 1.1 HANGUL LETTER OE ++315B ; mapped ; 116D # 1.1 HANGUL LETTER YO ++315C ; mapped ; 116E # 1.1 HANGUL LETTER U ++315D ; mapped ; 116F # 1.1 HANGUL LETTER WEO ++315E ; mapped ; 1170 # 1.1 HANGUL LETTER WE ++315F ; mapped ; 1171 # 1.1 HANGUL LETTER WI ++3160 ; mapped ; 1172 # 1.1 HANGUL LETTER YU ++3161 ; mapped ; 1173 # 1.1 HANGUL LETTER EU ++3162 ; mapped ; 1174 # 1.1 HANGUL LETTER YI ++3163 ; mapped ; 1175 # 1.1 HANGUL LETTER I ++3164 ; disallowed # 1.1 HANGUL FILLER ++3165 ; mapped ; 1114 # 1.1 HANGUL LETTER SSANGNIEUN ++3166 ; mapped ; 1115 # 1.1 HANGUL LETTER NIEUN-TIKEUT ++3167 ; mapped ; 11C7 # 1.1 HANGUL LETTER NIEUN-SIOS ++3168 ; mapped ; 11C8 # 1.1 HANGUL LETTER NIEUN-PANSIOS ++3169 ; mapped ; 11CC # 1.1 HANGUL LETTER RIEUL-KIYEOK-SIOS ++316A ; mapped ; 11CE # 1.1 HANGUL LETTER RIEUL-TIKEUT ++316B ; mapped ; 11D3 # 1.1 HANGUL LETTER RIEUL-PIEUP-SIOS ++316C ; mapped ; 11D7 # 1.1 HANGUL LETTER RIEUL-PANSIOS ++316D ; mapped ; 11D9 # 1.1 HANGUL LETTER RIEUL-YEORINHIEUH ++316E ; mapped ; 111C # 1.1 HANGUL LETTER MIEUM-PIEUP ++316F ; mapped ; 11DD # 1.1 HANGUL LETTER MIEUM-SIOS ++3170 ; mapped ; 11DF # 1.1 HANGUL LETTER MIEUM-PANSIOS ++3171 ; mapped ; 111D # 1.1 HANGUL LETTER KAPYEOUNMIEUM ++3172 ; mapped ; 111E # 1.1 HANGUL LETTER PIEUP-KIYEOK ++3173 ; mapped ; 1120 # 1.1 HANGUL LETTER PIEUP-TIKEUT ++3174 ; mapped ; 1122 # 1.1 HANGUL LETTER PIEUP-SIOS-KIYEOK ++3175 ; mapped ; 1123 # 1.1 HANGUL LETTER PIEUP-SIOS-TIKEUT ++3176 ; mapped ; 1127 # 1.1 HANGUL LETTER PIEUP-CIEUC ++3177 ; mapped ; 1129 # 1.1 HANGUL LETTER PIEUP-THIEUTH ++3178 ; mapped ; 112B # 1.1 HANGUL LETTER KAPYEOUNPIEUP ++3179 ; mapped ; 112C # 1.1 HANGUL LETTER KAPYEOUNSSANGPIEUP ++317A ; mapped ; 112D # 1.1 HANGUL LETTER SIOS-KIYEOK ++317B ; mapped ; 112E # 1.1 HANGUL LETTER SIOS-NIEUN ++317C ; mapped ; 112F # 1.1 HANGUL LETTER SIOS-TIKEUT ++317D ; mapped ; 1132 # 1.1 HANGUL LETTER SIOS-PIEUP ++317E ; mapped ; 1136 # 1.1 HANGUL LETTER SIOS-CIEUC ++317F ; mapped ; 1140 # 1.1 HANGUL LETTER PANSIOS ++3180 ; mapped ; 1147 # 1.1 HANGUL LETTER SSANGIEUNG ++3181 ; mapped ; 114C # 1.1 HANGUL LETTER YESIEUNG ++3182 ; mapped ; 11F1 # 1.1 HANGUL LETTER YESIEUNG-SIOS ++3183 ; mapped ; 11F2 # 1.1 HANGUL LETTER YESIEUNG-PANSIOS ++3184 ; mapped ; 1157 # 1.1 HANGUL LETTER KAPYEOUNPHIEUPH ++3185 ; mapped ; 1158 # 1.1 HANGUL LETTER SSANGHIEUH ++3186 ; mapped ; 1159 # 1.1 HANGUL LETTER YEORINHIEUH ++3187 ; mapped ; 1184 # 1.1 HANGUL LETTER YO-YA ++3188 ; mapped ; 1185 # 1.1 HANGUL LETTER YO-YAE ++3189 ; mapped ; 1188 # 1.1 HANGUL LETTER YO-I ++318A ; mapped ; 1191 # 1.1 HANGUL LETTER YU-YEO ++318B ; mapped ; 1192 # 1.1 HANGUL LETTER YU-YE ++318C ; mapped ; 1194 # 1.1 HANGUL LETTER YU-I ++318D ; mapped ; 119E # 1.1 HANGUL LETTER ARAEA ++318E ; mapped ; 11A1 # 1.1 HANGUL LETTER ARAEAE ++318F ; disallowed # NA ++3190..3191 ; valid ; ; NV8 # 1.1 IDEOGRAPHIC ANNOTATION LINKING MARK..IDEOGRAPHIC ANNOTATION REVERSE MARK ++3192 ; mapped ; 4E00 # 1.1 IDEOGRAPHIC ANNOTATION ONE MARK ++3193 ; mapped ; 4E8C # 1.1 IDEOGRAPHIC ANNOTATION TWO MARK ++3194 ; mapped ; 4E09 # 1.1 IDEOGRAPHIC ANNOTATION THREE MARK ++3195 ; mapped ; 56DB # 1.1 IDEOGRAPHIC ANNOTATION FOUR MARK ++3196 ; mapped ; 4E0A # 1.1 IDEOGRAPHIC ANNOTATION TOP MARK ++3197 ; mapped ; 4E2D # 1.1 IDEOGRAPHIC ANNOTATION MIDDLE MARK ++3198 ; mapped ; 4E0B # 1.1 IDEOGRAPHIC ANNOTATION BOTTOM MARK ++3199 ; mapped ; 7532 # 1.1 IDEOGRAPHIC ANNOTATION FIRST MARK ++319A ; mapped ; 4E59 # 1.1 IDEOGRAPHIC ANNOTATION SECOND MARK ++319B ; mapped ; 4E19 # 1.1 IDEOGRAPHIC ANNOTATION THIRD MARK ++319C ; mapped ; 4E01 # 1.1 IDEOGRAPHIC ANNOTATION FOURTH MARK ++319D ; mapped ; 5929 # 1.1 IDEOGRAPHIC ANNOTATION HEAVEN MARK ++319E ; mapped ; 5730 # 1.1 IDEOGRAPHIC ANNOTATION EARTH MARK ++319F ; mapped ; 4EBA # 1.1 IDEOGRAPHIC ANNOTATION MAN MARK ++31A0..31B7 ; valid # 3.0 BOPOMOFO LETTER BU..BOPOMOFO FINAL LETTER H ++31B8..31BA ; valid # 6.0 BOPOMOFO LETTER GH..BOPOMOFO LETTER ZY ++31BB..31BF ; disallowed # NA .. ++31C0..31CF ; valid ; ; NV8 # 4.1 CJK STROKE T..CJK STROKE N ++31D0..31E3 ; valid ; ; NV8 # 5.1 CJK STROKE H..CJK STROKE Q ++31E4..31EF ; disallowed # NA .. ++31F0..31FF ; valid # 3.2 KATAKANA LETTER SMALL KU..KATAKANA LETTER SMALL RO ++3200 ; disallowed_STD3_mapped ; 0028 1100 0029 #1.1 PARENTHESIZED HANGUL KIYEOK ++3201 ; disallowed_STD3_mapped ; 0028 1102 0029 #1.1 PARENTHESIZED HANGUL NIEUN ++3202 ; disallowed_STD3_mapped ; 0028 1103 0029 #1.1 PARENTHESIZED HANGUL TIKEUT ++3203 ; disallowed_STD3_mapped ; 0028 1105 0029 #1.1 PARENTHESIZED HANGUL RIEUL ++3204 ; disallowed_STD3_mapped ; 0028 1106 0029 #1.1 PARENTHESIZED HANGUL MIEUM ++3205 ; disallowed_STD3_mapped ; 0028 1107 0029 #1.1 PARENTHESIZED HANGUL PIEUP ++3206 ; disallowed_STD3_mapped ; 0028 1109 0029 #1.1 PARENTHESIZED HANGUL SIOS ++3207 ; disallowed_STD3_mapped ; 0028 110B 0029 #1.1 PARENTHESIZED HANGUL IEUNG ++3208 ; disallowed_STD3_mapped ; 0028 110C 0029 #1.1 PARENTHESIZED HANGUL CIEUC ++3209 ; disallowed_STD3_mapped ; 0028 110E 0029 #1.1 PARENTHESIZED HANGUL CHIEUCH ++320A ; disallowed_STD3_mapped ; 0028 110F 0029 #1.1 PARENTHESIZED HANGUL KHIEUKH ++320B ; disallowed_STD3_mapped ; 0028 1110 0029 #1.1 PARENTHESIZED HANGUL THIEUTH ++320C ; disallowed_STD3_mapped ; 0028 1111 0029 #1.1 PARENTHESIZED HANGUL PHIEUPH ++320D ; disallowed_STD3_mapped ; 0028 1112 0029 #1.1 PARENTHESIZED HANGUL HIEUH ++320E ; disallowed_STD3_mapped ; 0028 AC00 0029 #1.1 PARENTHESIZED HANGUL KIYEOK A ++320F ; disallowed_STD3_mapped ; 0028 B098 0029 #1.1 PARENTHESIZED HANGUL NIEUN A ++3210 ; disallowed_STD3_mapped ; 0028 B2E4 0029 #1.1 PARENTHESIZED HANGUL TIKEUT A ++3211 ; disallowed_STD3_mapped ; 0028 B77C 0029 #1.1 PARENTHESIZED HANGUL RIEUL A ++3212 ; disallowed_STD3_mapped ; 0028 B9C8 0029 #1.1 PARENTHESIZED HANGUL MIEUM A ++3213 ; disallowed_STD3_mapped ; 0028 BC14 0029 #1.1 PARENTHESIZED HANGUL PIEUP A ++3214 ; disallowed_STD3_mapped ; 0028 C0AC 0029 #1.1 PARENTHESIZED HANGUL SIOS A ++3215 ; disallowed_STD3_mapped ; 0028 C544 0029 #1.1 PARENTHESIZED HANGUL IEUNG A ++3216 ; disallowed_STD3_mapped ; 0028 C790 0029 #1.1 PARENTHESIZED HANGUL CIEUC A ++3217 ; disallowed_STD3_mapped ; 0028 CC28 0029 #1.1 PARENTHESIZED HANGUL CHIEUCH A ++3218 ; disallowed_STD3_mapped ; 0028 CE74 0029 #1.1 PARENTHESIZED HANGUL KHIEUKH A ++3219 ; disallowed_STD3_mapped ; 0028 D0C0 0029 #1.1 PARENTHESIZED HANGUL THIEUTH A ++321A ; disallowed_STD3_mapped ; 0028 D30C 0029 #1.1 PARENTHESIZED HANGUL PHIEUPH A ++321B ; disallowed_STD3_mapped ; 0028 D558 0029 #1.1 PARENTHESIZED HANGUL HIEUH A ++321C ; disallowed_STD3_mapped ; 0028 C8FC 0029 #1.1 PARENTHESIZED HANGUL CIEUC U ++321D ; disallowed_STD3_mapped ; 0028 C624 C804 0029 #4.0 PARENTHESIZED KOREAN CHARACTER OJEON ++321E ; disallowed_STD3_mapped ; 0028 C624 D6C4 0029 #4.0 PARENTHESIZED KOREAN CHARACTER O HU ++321F ; disallowed # NA ++3220 ; disallowed_STD3_mapped ; 0028 4E00 0029 #1.1 PARENTHESIZED IDEOGRAPH ONE ++3221 ; disallowed_STD3_mapped ; 0028 4E8C 0029 #1.1 PARENTHESIZED IDEOGRAPH TWO ++3222 ; disallowed_STD3_mapped ; 0028 4E09 0029 #1.1 PARENTHESIZED IDEOGRAPH THREE ++3223 ; disallowed_STD3_mapped ; 0028 56DB 0029 #1.1 PARENTHESIZED IDEOGRAPH FOUR ++3224 ; disallowed_STD3_mapped ; 0028 4E94 0029 #1.1 PARENTHESIZED IDEOGRAPH FIVE ++3225 ; disallowed_STD3_mapped ; 0028 516D 0029 #1.1 PARENTHESIZED IDEOGRAPH SIX ++3226 ; disallowed_STD3_mapped ; 0028 4E03 0029 #1.1 PARENTHESIZED IDEOGRAPH SEVEN ++3227 ; disallowed_STD3_mapped ; 0028 516B 0029 #1.1 PARENTHESIZED IDEOGRAPH EIGHT ++3228 ; disallowed_STD3_mapped ; 0028 4E5D 0029 #1.1 PARENTHESIZED IDEOGRAPH NINE ++3229 ; disallowed_STD3_mapped ; 0028 5341 0029 #1.1 PARENTHESIZED IDEOGRAPH TEN ++322A ; disallowed_STD3_mapped ; 0028 6708 0029 #1.1 PARENTHESIZED IDEOGRAPH MOON ++322B ; disallowed_STD3_mapped ; 0028 706B 0029 #1.1 PARENTHESIZED IDEOGRAPH FIRE ++322C ; disallowed_STD3_mapped ; 0028 6C34 0029 #1.1 PARENTHESIZED IDEOGRAPH WATER ++322D ; disallowed_STD3_mapped ; 0028 6728 0029 #1.1 PARENTHESIZED IDEOGRAPH WOOD ++322E ; disallowed_STD3_mapped ; 0028 91D1 0029 #1.1 PARENTHESIZED IDEOGRAPH METAL ++322F ; disallowed_STD3_mapped ; 0028 571F 0029 #1.1 PARENTHESIZED IDEOGRAPH EARTH ++3230 ; disallowed_STD3_mapped ; 0028 65E5 0029 #1.1 PARENTHESIZED IDEOGRAPH SUN ++3231 ; disallowed_STD3_mapped ; 0028 682A 0029 #1.1 PARENTHESIZED IDEOGRAPH STOCK ++3232 ; disallowed_STD3_mapped ; 0028 6709 0029 #1.1 PARENTHESIZED IDEOGRAPH HAVE ++3233 ; disallowed_STD3_mapped ; 0028 793E 0029 #1.1 PARENTHESIZED IDEOGRAPH SOCIETY ++3234 ; disallowed_STD3_mapped ; 0028 540D 0029 #1.1 PARENTHESIZED IDEOGRAPH NAME ++3235 ; disallowed_STD3_mapped ; 0028 7279 0029 #1.1 PARENTHESIZED IDEOGRAPH SPECIAL ++3236 ; disallowed_STD3_mapped ; 0028 8CA1 0029 #1.1 PARENTHESIZED IDEOGRAPH FINANCIAL ++3237 ; disallowed_STD3_mapped ; 0028 795D 0029 #1.1 PARENTHESIZED IDEOGRAPH CONGRATULATION ++3238 ; disallowed_STD3_mapped ; 0028 52B4 0029 #1.1 PARENTHESIZED IDEOGRAPH LABOR ++3239 ; disallowed_STD3_mapped ; 0028 4EE3 0029 #1.1 PARENTHESIZED IDEOGRAPH REPRESENT ++323A ; disallowed_STD3_mapped ; 0028 547C 0029 #1.1 PARENTHESIZED IDEOGRAPH CALL ++323B ; disallowed_STD3_mapped ; 0028 5B66 0029 #1.1 PARENTHESIZED IDEOGRAPH STUDY ++323C ; disallowed_STD3_mapped ; 0028 76E3 0029 #1.1 PARENTHESIZED IDEOGRAPH SUPERVISE ++323D ; disallowed_STD3_mapped ; 0028 4F01 0029 #1.1 PARENTHESIZED IDEOGRAPH ENTERPRISE ++323E ; disallowed_STD3_mapped ; 0028 8CC7 0029 #1.1 PARENTHESIZED IDEOGRAPH RESOURCE ++323F ; disallowed_STD3_mapped ; 0028 5354 0029 #1.1 PARENTHESIZED IDEOGRAPH ALLIANCE ++3240 ; disallowed_STD3_mapped ; 0028 796D 0029 #1.1 PARENTHESIZED IDEOGRAPH FESTIVAL ++3241 ; disallowed_STD3_mapped ; 0028 4F11 0029 #1.1 PARENTHESIZED IDEOGRAPH REST ++3242 ; disallowed_STD3_mapped ; 0028 81EA 0029 #1.1 PARENTHESIZED IDEOGRAPH SELF ++3243 ; disallowed_STD3_mapped ; 0028 81F3 0029 #1.1 PARENTHESIZED IDEOGRAPH REACH ++3244 ; mapped ; 554F # 5.2 CIRCLED IDEOGRAPH QUESTION ++3245 ; mapped ; 5E7C # 5.2 CIRCLED IDEOGRAPH KINDERGARTEN ++3246 ; mapped ; 6587 # 5.2 CIRCLED IDEOGRAPH SCHOOL ++3247 ; mapped ; 7B8F # 5.2 CIRCLED IDEOGRAPH KOTO ++3248..324F ; valid ; ; NV8 # 5.2 CIRCLED NUMBER TEN ON BLACK SQUARE..CIRCLED NUMBER EIGHTY ON BLACK SQUARE ++3250 ; mapped ; 0070 0074 0065 #4.0 PARTNERSHIP SIGN ++3251 ; mapped ; 0032 0031 # 3.2 CIRCLED NUMBER TWENTY ONE ++3252 ; mapped ; 0032 0032 # 3.2 CIRCLED NUMBER TWENTY TWO ++3253 ; mapped ; 0032 0033 # 3.2 CIRCLED NUMBER TWENTY THREE ++3254 ; mapped ; 0032 0034 # 3.2 CIRCLED NUMBER TWENTY FOUR ++3255 ; mapped ; 0032 0035 # 3.2 CIRCLED NUMBER TWENTY FIVE ++3256 ; mapped ; 0032 0036 # 3.2 CIRCLED NUMBER TWENTY SIX ++3257 ; mapped ; 0032 0037 # 3.2 CIRCLED NUMBER TWENTY SEVEN ++3258 ; mapped ; 0032 0038 # 3.2 CIRCLED NUMBER TWENTY EIGHT ++3259 ; mapped ; 0032 0039 # 3.2 CIRCLED NUMBER TWENTY NINE ++325A ; mapped ; 0033 0030 # 3.2 CIRCLED NUMBER THIRTY ++325B ; mapped ; 0033 0031 # 3.2 CIRCLED NUMBER THIRTY ONE ++325C ; mapped ; 0033 0032 # 3.2 CIRCLED NUMBER THIRTY TWO ++325D ; mapped ; 0033 0033 # 3.2 CIRCLED NUMBER THIRTY THREE ++325E ; mapped ; 0033 0034 # 3.2 CIRCLED NUMBER THIRTY FOUR ++325F ; mapped ; 0033 0035 # 3.2 CIRCLED NUMBER THIRTY FIVE ++3260 ; mapped ; 1100 # 1.1 CIRCLED HANGUL KIYEOK ++3261 ; mapped ; 1102 # 1.1 CIRCLED HANGUL NIEUN ++3262 ; mapped ; 1103 # 1.1 CIRCLED HANGUL TIKEUT ++3263 ; mapped ; 1105 # 1.1 CIRCLED HANGUL RIEUL ++3264 ; mapped ; 1106 # 1.1 CIRCLED HANGUL MIEUM ++3265 ; mapped ; 1107 # 1.1 CIRCLED HANGUL PIEUP ++3266 ; mapped ; 1109 # 1.1 CIRCLED HANGUL SIOS ++3267 ; mapped ; 110B # 1.1 CIRCLED HANGUL IEUNG ++3268 ; mapped ; 110C # 1.1 CIRCLED HANGUL CIEUC ++3269 ; mapped ; 110E # 1.1 CIRCLED HANGUL CHIEUCH ++326A ; mapped ; 110F # 1.1 CIRCLED HANGUL KHIEUKH ++326B ; mapped ; 1110 # 1.1 CIRCLED HANGUL THIEUTH ++326C ; mapped ; 1111 # 1.1 CIRCLED HANGUL PHIEUPH ++326D ; mapped ; 1112 # 1.1 CIRCLED HANGUL HIEUH ++326E ; mapped ; AC00 # 1.1 CIRCLED HANGUL KIYEOK A ++326F ; mapped ; B098 # 1.1 CIRCLED HANGUL NIEUN A ++3270 ; mapped ; B2E4 # 1.1 CIRCLED HANGUL TIKEUT A ++3271 ; mapped ; B77C # 1.1 CIRCLED HANGUL RIEUL A ++3272 ; mapped ; B9C8 # 1.1 CIRCLED HANGUL MIEUM A ++3273 ; mapped ; BC14 # 1.1 CIRCLED HANGUL PIEUP A ++3274 ; mapped ; C0AC # 1.1 CIRCLED HANGUL SIOS A ++3275 ; mapped ; C544 # 1.1 CIRCLED HANGUL IEUNG A ++3276 ; mapped ; C790 # 1.1 CIRCLED HANGUL CIEUC A ++3277 ; mapped ; CC28 # 1.1 CIRCLED HANGUL CHIEUCH A ++3278 ; mapped ; CE74 # 1.1 CIRCLED HANGUL KHIEUKH A ++3279 ; mapped ; D0C0 # 1.1 CIRCLED HANGUL THIEUTH A ++327A ; mapped ; D30C # 1.1 CIRCLED HANGUL PHIEUPH A ++327B ; mapped ; D558 # 1.1 CIRCLED HANGUL HIEUH A ++327C ; mapped ; CC38 ACE0 # 4.0 CIRCLED KOREAN CHARACTER CHAMKO ++327D ; mapped ; C8FC C758 # 4.0 CIRCLED KOREAN CHARACTER JUEUI ++327E ; mapped ; C6B0 # 4.1 CIRCLED HANGUL IEUNG U ++327F ; valid ; ; NV8 # 1.1 KOREAN STANDARD SYMBOL ++3280 ; mapped ; 4E00 # 1.1 CIRCLED IDEOGRAPH ONE ++3281 ; mapped ; 4E8C # 1.1 CIRCLED IDEOGRAPH TWO ++3282 ; mapped ; 4E09 # 1.1 CIRCLED IDEOGRAPH THREE ++3283 ; mapped ; 56DB # 1.1 CIRCLED IDEOGRAPH FOUR ++3284 ; mapped ; 4E94 # 1.1 CIRCLED IDEOGRAPH FIVE ++3285 ; mapped ; 516D # 1.1 CIRCLED IDEOGRAPH SIX ++3286 ; mapped ; 4E03 # 1.1 CIRCLED IDEOGRAPH SEVEN ++3287 ; mapped ; 516B # 1.1 CIRCLED IDEOGRAPH EIGHT ++3288 ; mapped ; 4E5D # 1.1 CIRCLED IDEOGRAPH NINE ++3289 ; mapped ; 5341 # 1.1 CIRCLED IDEOGRAPH TEN ++328A ; mapped ; 6708 # 1.1 CIRCLED IDEOGRAPH MOON ++328B ; mapped ; 706B # 1.1 CIRCLED IDEOGRAPH FIRE ++328C ; mapped ; 6C34 # 1.1 CIRCLED IDEOGRAPH WATER ++328D ; mapped ; 6728 # 1.1 CIRCLED IDEOGRAPH WOOD ++328E ; mapped ; 91D1 # 1.1 CIRCLED IDEOGRAPH METAL ++328F ; mapped ; 571F # 1.1 CIRCLED IDEOGRAPH EARTH ++3290 ; mapped ; 65E5 # 1.1 CIRCLED IDEOGRAPH SUN ++3291 ; mapped ; 682A # 1.1 CIRCLED IDEOGRAPH STOCK ++3292 ; mapped ; 6709 # 1.1 CIRCLED IDEOGRAPH HAVE ++3293 ; mapped ; 793E # 1.1 CIRCLED IDEOGRAPH SOCIETY ++3294 ; mapped ; 540D # 1.1 CIRCLED IDEOGRAPH NAME ++3295 ; mapped ; 7279 # 1.1 CIRCLED IDEOGRAPH SPECIAL ++3296 ; mapped ; 8CA1 # 1.1 CIRCLED IDEOGRAPH FINANCIAL ++3297 ; mapped ; 795D # 1.1 CIRCLED IDEOGRAPH CONGRATULATION ++3298 ; mapped ; 52B4 # 1.1 CIRCLED IDEOGRAPH LABOR ++3299 ; mapped ; 79D8 # 1.1 CIRCLED IDEOGRAPH SECRET ++329A ; mapped ; 7537 # 1.1 CIRCLED IDEOGRAPH MALE ++329B ; mapped ; 5973 # 1.1 CIRCLED IDEOGRAPH FEMALE ++329C ; mapped ; 9069 # 1.1 CIRCLED IDEOGRAPH SUITABLE ++329D ; mapped ; 512A # 1.1 CIRCLED IDEOGRAPH EXCELLENT ++329E ; mapped ; 5370 # 1.1 CIRCLED IDEOGRAPH PRINT ++329F ; mapped ; 6CE8 # 1.1 CIRCLED IDEOGRAPH ATTENTION ++32A0 ; mapped ; 9805 # 1.1 CIRCLED IDEOGRAPH ITEM ++32A1 ; mapped ; 4F11 # 1.1 CIRCLED IDEOGRAPH REST ++32A2 ; mapped ; 5199 # 1.1 CIRCLED IDEOGRAPH COPY ++32A3 ; mapped ; 6B63 # 1.1 CIRCLED IDEOGRAPH CORRECT ++32A4 ; mapped ; 4E0A # 1.1 CIRCLED IDEOGRAPH HIGH ++32A5 ; mapped ; 4E2D # 1.1 CIRCLED IDEOGRAPH CENTRE ++32A6 ; mapped ; 4E0B # 1.1 CIRCLED IDEOGRAPH LOW ++32A7 ; mapped ; 5DE6 # 1.1 CIRCLED IDEOGRAPH LEFT ++32A8 ; mapped ; 53F3 # 1.1 CIRCLED IDEOGRAPH RIGHT ++32A9 ; mapped ; 533B # 1.1 CIRCLED IDEOGRAPH MEDICINE ++32AA ; mapped ; 5B97 # 1.1 CIRCLED IDEOGRAPH RELIGION ++32AB ; mapped ; 5B66 # 1.1 CIRCLED IDEOGRAPH STUDY ++32AC ; mapped ; 76E3 # 1.1 CIRCLED IDEOGRAPH SUPERVISE ++32AD ; mapped ; 4F01 # 1.1 CIRCLED IDEOGRAPH ENTERPRISE ++32AE ; mapped ; 8CC7 # 1.1 CIRCLED IDEOGRAPH RESOURCE ++32AF ; mapped ; 5354 # 1.1 CIRCLED IDEOGRAPH ALLIANCE ++32B0 ; mapped ; 591C # 1.1 CIRCLED IDEOGRAPH NIGHT ++32B1 ; mapped ; 0033 0036 # 3.2 CIRCLED NUMBER THIRTY SIX ++32B2 ; mapped ; 0033 0037 # 3.2 CIRCLED NUMBER THIRTY SEVEN ++32B3 ; mapped ; 0033 0038 # 3.2 CIRCLED NUMBER THIRTY EIGHT ++32B4 ; mapped ; 0033 0039 # 3.2 CIRCLED NUMBER THIRTY NINE ++32B5 ; mapped ; 0034 0030 # 3.2 CIRCLED NUMBER FORTY ++32B6 ; mapped ; 0034 0031 # 3.2 CIRCLED NUMBER FORTY ONE ++32B7 ; mapped ; 0034 0032 # 3.2 CIRCLED NUMBER FORTY TWO ++32B8 ; mapped ; 0034 0033 # 3.2 CIRCLED NUMBER FORTY THREE ++32B9 ; mapped ; 0034 0034 # 3.2 CIRCLED NUMBER FORTY FOUR ++32BA ; mapped ; 0034 0035 # 3.2 CIRCLED NUMBER FORTY FIVE ++32BB ; mapped ; 0034 0036 # 3.2 CIRCLED NUMBER FORTY SIX ++32BC ; mapped ; 0034 0037 # 3.2 CIRCLED NUMBER FORTY SEVEN ++32BD ; mapped ; 0034 0038 # 3.2 CIRCLED NUMBER FORTY EIGHT ++32BE ; mapped ; 0034 0039 # 3.2 CIRCLED NUMBER FORTY NINE ++32BF ; mapped ; 0035 0030 # 3.2 CIRCLED NUMBER FIFTY ++32C0 ; mapped ; 0031 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR JANUARY ++32C1 ; mapped ; 0032 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR FEBRUARY ++32C2 ; mapped ; 0033 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR MARCH ++32C3 ; mapped ; 0034 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR APRIL ++32C4 ; mapped ; 0035 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR MAY ++32C5 ; mapped ; 0036 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR JUNE ++32C6 ; mapped ; 0037 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR JULY ++32C7 ; mapped ; 0038 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR AUGUST ++32C8 ; mapped ; 0039 6708 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR SEPTEMBER ++32C9 ; mapped ; 0031 0030 6708 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR OCTOBER ++32CA ; mapped ; 0031 0031 6708 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR NOVEMBER ++32CB ; mapped ; 0031 0032 6708 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DECEMBER ++32CC ; mapped ; 0068 0067 # 4.0 SQUARE HG ++32CD ; mapped ; 0065 0072 0067 #4.0 SQUARE ERG ++32CE ; mapped ; 0065 0076 # 4.0 SQUARE EV ++32CF ; mapped ; 006C 0074 0064 #4.0 LIMITED LIABILITY SIGN ++32D0 ; mapped ; 30A2 # 1.1 CIRCLED KATAKANA A ++32D1 ; mapped ; 30A4 # 1.1 CIRCLED KATAKANA I ++32D2 ; mapped ; 30A6 # 1.1 CIRCLED KATAKANA U ++32D3 ; mapped ; 30A8 # 1.1 CIRCLED KATAKANA E ++32D4 ; mapped ; 30AA # 1.1 CIRCLED KATAKANA O ++32D5 ; mapped ; 30AB # 1.1 CIRCLED KATAKANA KA ++32D6 ; mapped ; 30AD # 1.1 CIRCLED KATAKANA KI ++32D7 ; mapped ; 30AF # 1.1 CIRCLED KATAKANA KU ++32D8 ; mapped ; 30B1 # 1.1 CIRCLED KATAKANA KE ++32D9 ; mapped ; 30B3 # 1.1 CIRCLED KATAKANA KO ++32DA ; mapped ; 30B5 # 1.1 CIRCLED KATAKANA SA ++32DB ; mapped ; 30B7 # 1.1 CIRCLED KATAKANA SI ++32DC ; mapped ; 30B9 # 1.1 CIRCLED KATAKANA SU ++32DD ; mapped ; 30BB # 1.1 CIRCLED KATAKANA SE ++32DE ; mapped ; 30BD # 1.1 CIRCLED KATAKANA SO ++32DF ; mapped ; 30BF # 1.1 CIRCLED KATAKANA TA ++32E0 ; mapped ; 30C1 # 1.1 CIRCLED KATAKANA TI ++32E1 ; mapped ; 30C4 # 1.1 CIRCLED KATAKANA TU ++32E2 ; mapped ; 30C6 # 1.1 CIRCLED KATAKANA TE ++32E3 ; mapped ; 30C8 # 1.1 CIRCLED KATAKANA TO ++32E4 ; mapped ; 30CA # 1.1 CIRCLED KATAKANA NA ++32E5 ; mapped ; 30CB # 1.1 CIRCLED KATAKANA NI ++32E6 ; mapped ; 30CC # 1.1 CIRCLED KATAKANA NU ++32E7 ; mapped ; 30CD # 1.1 CIRCLED KATAKANA NE ++32E8 ; mapped ; 30CE # 1.1 CIRCLED KATAKANA NO ++32E9 ; mapped ; 30CF # 1.1 CIRCLED KATAKANA HA ++32EA ; mapped ; 30D2 # 1.1 CIRCLED KATAKANA HI ++32EB ; mapped ; 30D5 # 1.1 CIRCLED KATAKANA HU ++32EC ; mapped ; 30D8 # 1.1 CIRCLED KATAKANA HE ++32ED ; mapped ; 30DB # 1.1 CIRCLED KATAKANA HO ++32EE ; mapped ; 30DE # 1.1 CIRCLED KATAKANA MA ++32EF ; mapped ; 30DF # 1.1 CIRCLED KATAKANA MI ++32F0 ; mapped ; 30E0 # 1.1 CIRCLED KATAKANA MU ++32F1 ; mapped ; 30E1 # 1.1 CIRCLED KATAKANA ME ++32F2 ; mapped ; 30E2 # 1.1 CIRCLED KATAKANA MO ++32F3 ; mapped ; 30E4 # 1.1 CIRCLED KATAKANA YA ++32F4 ; mapped ; 30E6 # 1.1 CIRCLED KATAKANA YU ++32F5 ; mapped ; 30E8 # 1.1 CIRCLED KATAKANA YO ++32F6 ; mapped ; 30E9 # 1.1 CIRCLED KATAKANA RA ++32F7 ; mapped ; 30EA # 1.1 CIRCLED KATAKANA RI ++32F8 ; mapped ; 30EB # 1.1 CIRCLED KATAKANA RU ++32F9 ; mapped ; 30EC # 1.1 CIRCLED KATAKANA RE ++32FA ; mapped ; 30ED # 1.1 CIRCLED KATAKANA RO ++32FB ; mapped ; 30EF # 1.1 CIRCLED KATAKANA WA ++32FC ; mapped ; 30F0 # 1.1 CIRCLED KATAKANA WI ++32FD ; mapped ; 30F1 # 1.1 CIRCLED KATAKANA WE ++32FE ; mapped ; 30F2 # 1.1 CIRCLED KATAKANA WO ++32FF ; disallowed # NA ++3300 ; mapped ; 30A2 30D1 30FC 30C8 #1.1 SQUARE APAATO ++3301 ; mapped ; 30A2 30EB 30D5 30A1 #1.1 SQUARE ARUHUA ++3302 ; mapped ; 30A2 30F3 30DA 30A2 #1.1 SQUARE ANPEA ++3303 ; mapped ; 30A2 30FC 30EB #1.1 SQUARE AARU ++3304 ; mapped ; 30A4 30CB 30F3 30B0 #1.1 SQUARE ININGU ++3305 ; mapped ; 30A4 30F3 30C1 #1.1 SQUARE INTI ++3306 ; mapped ; 30A6 30A9 30F3 #1.1 SQUARE UON ++3307 ; mapped ; 30A8 30B9 30AF 30FC 30C9 #1.1 SQUARE ESUKUUDO ++3308 ; mapped ; 30A8 30FC 30AB 30FC #1.1 SQUARE EEKAA ++3309 ; mapped ; 30AA 30F3 30B9 #1.1 SQUARE ONSU ++330A ; mapped ; 30AA 30FC 30E0 #1.1 SQUARE OOMU ++330B ; mapped ; 30AB 30A4 30EA #1.1 SQUARE KAIRI ++330C ; mapped ; 30AB 30E9 30C3 30C8 #1.1 SQUARE KARATTO ++330D ; mapped ; 30AB 30ED 30EA 30FC #1.1 SQUARE KARORII ++330E ; mapped ; 30AC 30ED 30F3 #1.1 SQUARE GARON ++330F ; mapped ; 30AC 30F3 30DE #1.1 SQUARE GANMA ++3310 ; mapped ; 30AE 30AC # 1.1 SQUARE GIGA ++3311 ; mapped ; 30AE 30CB 30FC #1.1 SQUARE GINII ++3312 ; mapped ; 30AD 30E5 30EA 30FC #1.1 SQUARE KYURII ++3313 ; mapped ; 30AE 30EB 30C0 30FC #1.1 SQUARE GIRUDAA ++3314 ; mapped ; 30AD 30ED # 1.1 SQUARE KIRO ++3315 ; mapped ; 30AD 30ED 30B0 30E9 30E0 #1.1 SQUARE KIROGURAMU ++3316 ; mapped ; 30AD 30ED 30E1 30FC 30C8 30EB #1.1 SQUARE KIROMEETORU ++3317 ; mapped ; 30AD 30ED 30EF 30C3 30C8 #1.1 SQUARE KIROWATTO ++3318 ; mapped ; 30B0 30E9 30E0 #1.1 SQUARE GURAMU ++3319 ; mapped ; 30B0 30E9 30E0 30C8 30F3 #1.1 SQUARE GURAMUTON ++331A ; mapped ; 30AF 30EB 30BC 30A4 30ED #1.1 SQUARE KURUZEIRO ++331B ; mapped ; 30AF 30ED 30FC 30CD #1.1 SQUARE KUROONE ++331C ; mapped ; 30B1 30FC 30B9 #1.1 SQUARE KEESU ++331D ; mapped ; 30B3 30EB 30CA #1.1 SQUARE KORUNA ++331E ; mapped ; 30B3 30FC 30DD #1.1 SQUARE KOOPO ++331F ; mapped ; 30B5 30A4 30AF 30EB #1.1 SQUARE SAIKURU ++3320 ; mapped ; 30B5 30F3 30C1 30FC 30E0 #1.1 SQUARE SANTIIMU ++3321 ; mapped ; 30B7 30EA 30F3 30B0 #1.1 SQUARE SIRINGU ++3322 ; mapped ; 30BB 30F3 30C1 #1.1 SQUARE SENTI ++3323 ; mapped ; 30BB 30F3 30C8 #1.1 SQUARE SENTO ++3324 ; mapped ; 30C0 30FC 30B9 #1.1 SQUARE DAASU ++3325 ; mapped ; 30C7 30B7 # 1.1 SQUARE DESI ++3326 ; mapped ; 30C9 30EB # 1.1 SQUARE DORU ++3327 ; mapped ; 30C8 30F3 # 1.1 SQUARE TON ++3328 ; mapped ; 30CA 30CE # 1.1 SQUARE NANO ++3329 ; mapped ; 30CE 30C3 30C8 #1.1 SQUARE NOTTO ++332A ; mapped ; 30CF 30A4 30C4 #1.1 SQUARE HAITU ++332B ; mapped ; 30D1 30FC 30BB 30F3 30C8 #1.1 SQUARE PAASENTO ++332C ; mapped ; 30D1 30FC 30C4 #1.1 SQUARE PAATU ++332D ; mapped ; 30D0 30FC 30EC 30EB #1.1 SQUARE BAARERU ++332E ; mapped ; 30D4 30A2 30B9 30C8 30EB #1.1 SQUARE PIASUTORU ++332F ; mapped ; 30D4 30AF 30EB #1.1 SQUARE PIKURU ++3330 ; mapped ; 30D4 30B3 # 1.1 SQUARE PIKO ++3331 ; mapped ; 30D3 30EB # 1.1 SQUARE BIRU ++3332 ; mapped ; 30D5 30A1 30E9 30C3 30C9 #1.1 SQUARE HUARADDO ++3333 ; mapped ; 30D5 30A3 30FC 30C8 #1.1 SQUARE HUIITO ++3334 ; mapped ; 30D6 30C3 30B7 30A7 30EB #1.1 SQUARE BUSSYERU ++3335 ; mapped ; 30D5 30E9 30F3 #1.1 SQUARE HURAN ++3336 ; mapped ; 30D8 30AF 30BF 30FC 30EB #1.1 SQUARE HEKUTAARU ++3337 ; mapped ; 30DA 30BD # 1.1 SQUARE PESO ++3338 ; mapped ; 30DA 30CB 30D2 #1.1 SQUARE PENIHI ++3339 ; mapped ; 30D8 30EB 30C4 #1.1 SQUARE HERUTU ++333A ; mapped ; 30DA 30F3 30B9 #1.1 SQUARE PENSU ++333B ; mapped ; 30DA 30FC 30B8 #1.1 SQUARE PEEZI ++333C ; mapped ; 30D9 30FC 30BF #1.1 SQUARE BEETA ++333D ; mapped ; 30DD 30A4 30F3 30C8 #1.1 SQUARE POINTO ++333E ; mapped ; 30DC 30EB 30C8 #1.1 SQUARE BORUTO ++333F ; mapped ; 30DB 30F3 # 1.1 SQUARE HON ++3340 ; mapped ; 30DD 30F3 30C9 #1.1 SQUARE PONDO ++3341 ; mapped ; 30DB 30FC 30EB #1.1 SQUARE HOORU ++3342 ; mapped ; 30DB 30FC 30F3 #1.1 SQUARE HOON ++3343 ; mapped ; 30DE 30A4 30AF 30ED #1.1 SQUARE MAIKURO ++3344 ; mapped ; 30DE 30A4 30EB #1.1 SQUARE MAIRU ++3345 ; mapped ; 30DE 30C3 30CF #1.1 SQUARE MAHHA ++3346 ; mapped ; 30DE 30EB 30AF #1.1 SQUARE MARUKU ++3347 ; mapped ; 30DE 30F3 30B7 30E7 30F3 #1.1 SQUARE MANSYON ++3348 ; mapped ; 30DF 30AF 30ED 30F3 #1.1 SQUARE MIKURON ++3349 ; mapped ; 30DF 30EA # 1.1 SQUARE MIRI ++334A ; mapped ; 30DF 30EA 30D0 30FC 30EB #1.1 SQUARE MIRIBAARU ++334B ; mapped ; 30E1 30AC # 1.1 SQUARE MEGA ++334C ; mapped ; 30E1 30AC 30C8 30F3 #1.1 SQUARE MEGATON ++334D ; mapped ; 30E1 30FC 30C8 30EB #1.1 SQUARE MEETORU ++334E ; mapped ; 30E4 30FC 30C9 #1.1 SQUARE YAADO ++334F ; mapped ; 30E4 30FC 30EB #1.1 SQUARE YAARU ++3350 ; mapped ; 30E6 30A2 30F3 #1.1 SQUARE YUAN ++3351 ; mapped ; 30EA 30C3 30C8 30EB #1.1 SQUARE RITTORU ++3352 ; mapped ; 30EA 30E9 # 1.1 SQUARE RIRA ++3353 ; mapped ; 30EB 30D4 30FC #1.1 SQUARE RUPII ++3354 ; mapped ; 30EB 30FC 30D6 30EB #1.1 SQUARE RUUBURU ++3355 ; mapped ; 30EC 30E0 # 1.1 SQUARE REMU ++3356 ; mapped ; 30EC 30F3 30C8 30B2 30F3 #1.1 SQUARE RENTOGEN ++3357 ; mapped ; 30EF 30C3 30C8 #1.1 SQUARE WATTO ++3358 ; mapped ; 0030 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR ZERO ++3359 ; mapped ; 0031 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR ONE ++335A ; mapped ; 0032 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWO ++335B ; mapped ; 0033 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR THREE ++335C ; mapped ; 0034 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR FOUR ++335D ; mapped ; 0035 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR FIVE ++335E ; mapped ; 0036 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR SIX ++335F ; mapped ; 0037 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR SEVEN ++3360 ; mapped ; 0038 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR EIGHT ++3361 ; mapped ; 0039 70B9 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR NINE ++3362 ; mapped ; 0031 0030 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TEN ++3363 ; mapped ; 0031 0031 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR ELEVEN ++3364 ; mapped ; 0031 0032 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWELVE ++3365 ; mapped ; 0031 0033 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR THIRTEEN ++3366 ; mapped ; 0031 0034 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR FOURTEEN ++3367 ; mapped ; 0031 0035 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR FIFTEEN ++3368 ; mapped ; 0031 0036 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR SIXTEEN ++3369 ; mapped ; 0031 0037 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR SEVENTEEN ++336A ; mapped ; 0031 0038 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR EIGHTEEN ++336B ; mapped ; 0031 0039 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR NINETEEN ++336C ; mapped ; 0032 0030 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWENTY ++336D ; mapped ; 0032 0031 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWENTY-ONE ++336E ; mapped ; 0032 0032 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWENTY-TWO ++336F ; mapped ; 0032 0033 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWENTY-THREE ++3370 ; mapped ; 0032 0034 70B9 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR HOUR TWENTY-FOUR ++3371 ; mapped ; 0068 0070 0061 #1.1 SQUARE HPA ++3372 ; mapped ; 0064 0061 # 1.1 SQUARE DA ++3373 ; mapped ; 0061 0075 # 1.1 SQUARE AU ++3374 ; mapped ; 0062 0061 0072 #1.1 SQUARE BAR ++3375 ; mapped ; 006F 0076 # 1.1 SQUARE OV ++3376 ; mapped ; 0070 0063 # 1.1 SQUARE PC ++3377 ; mapped ; 0064 006D # 4.0 SQUARE DM ++3378 ; mapped ; 0064 006D 0032 #4.0 SQUARE DM SQUARED ++3379 ; mapped ; 0064 006D 0033 #4.0 SQUARE DM CUBED ++337A ; mapped ; 0069 0075 # 4.0 SQUARE IU ++337B ; mapped ; 5E73 6210 # 1.1 SQUARE ERA NAME HEISEI ++337C ; mapped ; 662D 548C # 1.1 SQUARE ERA NAME SYOUWA ++337D ; mapped ; 5927 6B63 # 1.1 SQUARE ERA NAME TAISYOU ++337E ; mapped ; 660E 6CBB # 1.1 SQUARE ERA NAME MEIZI ++337F ; mapped ; 682A 5F0F 4F1A 793E #1.1 SQUARE CORPORATION ++3380 ; mapped ; 0070 0061 # 1.1 SQUARE PA AMPS ++3381 ; mapped ; 006E 0061 # 1.1 SQUARE NA ++3382 ; mapped ; 03BC 0061 # 1.1 SQUARE MU A ++3383 ; mapped ; 006D 0061 # 1.1 SQUARE MA ++3384 ; mapped ; 006B 0061 # 1.1 SQUARE KA ++3385 ; mapped ; 006B 0062 # 1.1 SQUARE KB ++3386 ; mapped ; 006D 0062 # 1.1 SQUARE MB ++3387 ; mapped ; 0067 0062 # 1.1 SQUARE GB ++3388 ; mapped ; 0063 0061 006C #1.1 SQUARE CAL ++3389 ; mapped ; 006B 0063 0061 006C #1.1 SQUARE KCAL ++338A ; mapped ; 0070 0066 # 1.1 SQUARE PF ++338B ; mapped ; 006E 0066 # 1.1 SQUARE NF ++338C ; mapped ; 03BC 0066 # 1.1 SQUARE MU F ++338D ; mapped ; 03BC 0067 # 1.1 SQUARE MU G ++338E ; mapped ; 006D 0067 # 1.1 SQUARE MG ++338F ; mapped ; 006B 0067 # 1.1 SQUARE KG ++3390 ; mapped ; 0068 007A # 1.1 SQUARE HZ ++3391 ; mapped ; 006B 0068 007A #1.1 SQUARE KHZ ++3392 ; mapped ; 006D 0068 007A #1.1 SQUARE MHZ ++3393 ; mapped ; 0067 0068 007A #1.1 SQUARE GHZ ++3394 ; mapped ; 0074 0068 007A #1.1 SQUARE THZ ++3395 ; mapped ; 03BC 006C # 1.1 SQUARE MU L ++3396 ; mapped ; 006D 006C # 1.1 SQUARE ML ++3397 ; mapped ; 0064 006C # 1.1 SQUARE DL ++3398 ; mapped ; 006B 006C # 1.1 SQUARE KL ++3399 ; mapped ; 0066 006D # 1.1 SQUARE FM ++339A ; mapped ; 006E 006D # 1.1 SQUARE NM ++339B ; mapped ; 03BC 006D # 1.1 SQUARE MU M ++339C ; mapped ; 006D 006D # 1.1 SQUARE MM ++339D ; mapped ; 0063 006D # 1.1 SQUARE CM ++339E ; mapped ; 006B 006D # 1.1 SQUARE KM ++339F ; mapped ; 006D 006D 0032 #1.1 SQUARE MM SQUARED ++33A0 ; mapped ; 0063 006D 0032 #1.1 SQUARE CM SQUARED ++33A1 ; mapped ; 006D 0032 # 1.1 SQUARE M SQUARED ++33A2 ; mapped ; 006B 006D 0032 #1.1 SQUARE KM SQUARED ++33A3 ; mapped ; 006D 006D 0033 #1.1 SQUARE MM CUBED ++33A4 ; mapped ; 0063 006D 0033 #1.1 SQUARE CM CUBED ++33A5 ; mapped ; 006D 0033 # 1.1 SQUARE M CUBED ++33A6 ; mapped ; 006B 006D 0033 #1.1 SQUARE KM CUBED ++33A7 ; mapped ; 006D 2215 0073 #1.1 SQUARE M OVER S ++33A8 ; mapped ; 006D 2215 0073 0032 #1.1 SQUARE M OVER S SQUARED ++33A9 ; mapped ; 0070 0061 # 1.1 SQUARE PA ++33AA ; mapped ; 006B 0070 0061 #1.1 SQUARE KPA ++33AB ; mapped ; 006D 0070 0061 #1.1 SQUARE MPA ++33AC ; mapped ; 0067 0070 0061 #1.1 SQUARE GPA ++33AD ; mapped ; 0072 0061 0064 #1.1 SQUARE RAD ++33AE ; mapped ; 0072 0061 0064 2215 0073 #1.1 SQUARE RAD OVER S ++33AF ; mapped ; 0072 0061 0064 2215 0073 0032 #1.1 SQUARE RAD OVER S SQUARED ++33B0 ; mapped ; 0070 0073 # 1.1 SQUARE PS ++33B1 ; mapped ; 006E 0073 # 1.1 SQUARE NS ++33B2 ; mapped ; 03BC 0073 # 1.1 SQUARE MU S ++33B3 ; mapped ; 006D 0073 # 1.1 SQUARE MS ++33B4 ; mapped ; 0070 0076 # 1.1 SQUARE PV ++33B5 ; mapped ; 006E 0076 # 1.1 SQUARE NV ++33B6 ; mapped ; 03BC 0076 # 1.1 SQUARE MU V ++33B7 ; mapped ; 006D 0076 # 1.1 SQUARE MV ++33B8 ; mapped ; 006B 0076 # 1.1 SQUARE KV ++33B9 ; mapped ; 006D 0076 # 1.1 SQUARE MV MEGA ++33BA ; mapped ; 0070 0077 # 1.1 SQUARE PW ++33BB ; mapped ; 006E 0077 # 1.1 SQUARE NW ++33BC ; mapped ; 03BC 0077 # 1.1 SQUARE MU W ++33BD ; mapped ; 006D 0077 # 1.1 SQUARE MW ++33BE ; mapped ; 006B 0077 # 1.1 SQUARE KW ++33BF ; mapped ; 006D 0077 # 1.1 SQUARE MW MEGA ++33C0 ; mapped ; 006B 03C9 # 1.1 SQUARE K OHM ++33C1 ; mapped ; 006D 03C9 # 1.1 SQUARE M OHM ++33C2 ; disallowed # 1.1 SQUARE AM ++33C3 ; mapped ; 0062 0071 # 1.1 SQUARE BQ ++33C4 ; mapped ; 0063 0063 # 1.1 SQUARE CC ++33C5 ; mapped ; 0063 0064 # 1.1 SQUARE CD ++33C6 ; mapped ; 0063 2215 006B 0067 #1.1 SQUARE C OVER KG ++33C7 ; disallowed # 1.1 SQUARE CO ++33C8 ; mapped ; 0064 0062 # 1.1 SQUARE DB ++33C9 ; mapped ; 0067 0079 # 1.1 SQUARE GY ++33CA ; mapped ; 0068 0061 # 1.1 SQUARE HA ++33CB ; mapped ; 0068 0070 # 1.1 SQUARE HP ++33CC ; mapped ; 0069 006E # 1.1 SQUARE IN ++33CD ; mapped ; 006B 006B # 1.1 SQUARE KK ++33CE ; mapped ; 006B 006D # 1.1 SQUARE KM CAPITAL ++33CF ; mapped ; 006B 0074 # 1.1 SQUARE KT ++33D0 ; mapped ; 006C 006D # 1.1 SQUARE LM ++33D1 ; mapped ; 006C 006E # 1.1 SQUARE LN ++33D2 ; mapped ; 006C 006F 0067 #1.1 SQUARE LOG ++33D3 ; mapped ; 006C 0078 # 1.1 SQUARE LX ++33D4 ; mapped ; 006D 0062 # 1.1 SQUARE MB SMALL ++33D5 ; mapped ; 006D 0069 006C #1.1 SQUARE MIL ++33D6 ; mapped ; 006D 006F 006C #1.1 SQUARE MOL ++33D7 ; mapped ; 0070 0068 # 1.1 SQUARE PH ++33D8 ; disallowed # 1.1 SQUARE PM ++33D9 ; mapped ; 0070 0070 006D #1.1 SQUARE PPM ++33DA ; mapped ; 0070 0072 # 1.1 SQUARE PR ++33DB ; mapped ; 0073 0072 # 1.1 SQUARE SR ++33DC ; mapped ; 0073 0076 # 1.1 SQUARE SV ++33DD ; mapped ; 0077 0062 # 1.1 SQUARE WB ++33DE ; mapped ; 0076 2215 006D #4.0 SQUARE V OVER M ++33DF ; mapped ; 0061 2215 006D #4.0 SQUARE A OVER M ++33E0 ; mapped ; 0031 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY ONE ++33E1 ; mapped ; 0032 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWO ++33E2 ; mapped ; 0033 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY THREE ++33E3 ; mapped ; 0034 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY FOUR ++33E4 ; mapped ; 0035 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY FIVE ++33E5 ; mapped ; 0036 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY SIX ++33E6 ; mapped ; 0037 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY SEVEN ++33E7 ; mapped ; 0038 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY EIGHT ++33E8 ; mapped ; 0039 65E5 # 1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY NINE ++33E9 ; mapped ; 0031 0030 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TEN ++33EA ; mapped ; 0031 0031 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY ELEVEN ++33EB ; mapped ; 0031 0032 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWELVE ++33EC ; mapped ; 0031 0033 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY THIRTEEN ++33ED ; mapped ; 0031 0034 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY FOURTEEN ++33EE ; mapped ; 0031 0035 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY FIFTEEN ++33EF ; mapped ; 0031 0036 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY SIXTEEN ++33F0 ; mapped ; 0031 0037 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY SEVENTEEN ++33F1 ; mapped ; 0031 0038 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY EIGHTEEN ++33F2 ; mapped ; 0031 0039 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY NINETEEN ++33F3 ; mapped ; 0032 0030 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY ++33F4 ; mapped ; 0032 0031 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-ONE ++33F5 ; mapped ; 0032 0032 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-TWO ++33F6 ; mapped ; 0032 0033 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-THREE ++33F7 ; mapped ; 0032 0034 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-FOUR ++33F8 ; mapped ; 0032 0035 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-FIVE ++33F9 ; mapped ; 0032 0036 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-SIX ++33FA ; mapped ; 0032 0037 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-SEVEN ++33FB ; mapped ; 0032 0038 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-EIGHT ++33FC ; mapped ; 0032 0039 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY TWENTY-NINE ++33FD ; mapped ; 0033 0030 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY THIRTY ++33FE ; mapped ; 0033 0031 65E5 #1.1 IDEOGRAPHIC TELEGRAPH SYMBOL FOR DAY THIRTY-ONE ++33FF ; mapped ; 0067 0061 006C #4.0 SQUARE GAL ++3400..4DB5 ; valid # 3.0 CJK UNIFIED IDEOGRAPH-3400..CJK UNIFIED IDEOGRAPH-4DB5 ++4DB6..4DBF ; disallowed # NA .. ++4DC0..4DFF ; valid ; ; NV8 # 4.0 HEXAGRAM FOR THE CREATIVE HEAVEN..HEXAGRAM FOR BEFORE COMPLETION ++4E00..9FA5 ; valid # 1.1 CJK UNIFIED IDEOGRAPH-4E00..CJK UNIFIED IDEOGRAPH-9FA5 ++9FA6..9FBB ; valid # 4.1 CJK UNIFIED IDEOGRAPH-9FA6..CJK UNIFIED IDEOGRAPH-9FBB ++9FBC..9FC3 ; valid # 5.1 CJK UNIFIED IDEOGRAPH-9FBC..CJK UNIFIED IDEOGRAPH-9FC3 ++9FC4..9FCB ; valid # 5.2 CJK UNIFIED IDEOGRAPH-9FC4..CJK UNIFIED IDEOGRAPH-9FCB ++9FCC ; valid # 6.1 CJK UNIFIED IDEOGRAPH-9FCC ++9FCD..9FD5 ; valid # 8.0 CJK UNIFIED IDEOGRAPH-9FCD..CJK UNIFIED IDEOGRAPH-9FD5 ++9FD6..9FEA ; valid # 10.0 CJK UNIFIED IDEOGRAPH-9FD6..CJK UNIFIED IDEOGRAPH-9FEA ++9FEB..9FFF ; disallowed # NA .. ++A000..A48C ; valid # 3.0 YI SYLLABLE IT..YI SYLLABLE YYR ++A48D..A48F ; disallowed # NA .. ++A490..A4A1 ; valid ; ; NV8 # 3.0 YI RADICAL QOT..YI RADICAL GA ++A4A2..A4A3 ; valid ; ; NV8 # 3.2 YI RADICAL ZUP..YI RADICAL CYT ++A4A4..A4B3 ; valid ; ; NV8 # 3.0 YI RADICAL DDUR..YI RADICAL JO ++A4B4 ; valid ; ; NV8 # 3.2 YI RADICAL NZUP ++A4B5..A4C0 ; valid ; ; NV8 # 3.0 YI RADICAL JJY..YI RADICAL SHAT ++A4C1 ; valid ; ; NV8 # 3.2 YI RADICAL ZUR ++A4C2..A4C4 ; valid ; ; NV8 # 3.0 YI RADICAL SHOP..YI RADICAL ZZIET ++A4C5 ; valid ; ; NV8 # 3.2 YI RADICAL NBIE ++A4C6 ; valid ; ; NV8 # 3.0 YI RADICAL KE ++A4C7..A4CF ; disallowed # NA .. ++A4D0..A4FD ; valid # 5.2 LISU LETTER BA..LISU LETTER TONE MYA JEU ++A4FE..A4FF ; valid ; ; NV8 # 5.2 LISU PUNCTUATION COMMA..LISU PUNCTUATION FULL STOP ++A500..A60C ; valid # 5.1 VAI SYLLABLE EE..VAI SYLLABLE LENGTHENER ++A60D..A60F ; valid ; ; NV8 # 5.1 VAI COMMA..VAI QUESTION MARK ++A610..A62B ; valid # 5.1 VAI SYLLABLE NDOLE FA..VAI SYLLABLE NDOLE DO ++A62C..A63F ; disallowed # NA .. ++A640 ; mapped ; A641 # 5.1 CYRILLIC CAPITAL LETTER ZEMLYA ++A641 ; valid # 5.1 CYRILLIC SMALL LETTER ZEMLYA ++A642 ; mapped ; A643 # 5.1 CYRILLIC CAPITAL LETTER DZELO ++A643 ; valid # 5.1 CYRILLIC SMALL LETTER DZELO ++A644 ; mapped ; A645 # 5.1 CYRILLIC CAPITAL LETTER REVERSED DZE ++A645 ; valid # 5.1 CYRILLIC SMALL LETTER REVERSED DZE ++A646 ; mapped ; A647 # 5.1 CYRILLIC CAPITAL LETTER IOTA ++A647 ; valid # 5.1 CYRILLIC SMALL LETTER IOTA ++A648 ; mapped ; A649 # 5.1 CYRILLIC CAPITAL LETTER DJERV ++A649 ; valid # 5.1 CYRILLIC SMALL LETTER DJERV ++A64A ; mapped ; A64B # 5.1 CYRILLIC CAPITAL LETTER MONOGRAPH UK ++A64B ; valid # 5.1 CYRILLIC SMALL LETTER MONOGRAPH UK ++A64C ; mapped ; A64D # 5.1 CYRILLIC CAPITAL LETTER BROAD OMEGA ++A64D ; valid # 5.1 CYRILLIC SMALL LETTER BROAD OMEGA ++A64E ; mapped ; A64F # 5.1 CYRILLIC CAPITAL LETTER NEUTRAL YER ++A64F ; valid # 5.1 CYRILLIC SMALL LETTER NEUTRAL YER ++A650 ; mapped ; A651 # 5.1 CYRILLIC CAPITAL LETTER YERU WITH BACK YER ++A651 ; valid # 5.1 CYRILLIC SMALL LETTER YERU WITH BACK YER ++A652 ; mapped ; A653 # 5.1 CYRILLIC CAPITAL LETTER IOTIFIED YAT ++A653 ; valid # 5.1 CYRILLIC SMALL LETTER IOTIFIED YAT ++A654 ; mapped ; A655 # 5.1 CYRILLIC CAPITAL LETTER REVERSED YU ++A655 ; valid # 5.1 CYRILLIC SMALL LETTER REVERSED YU ++A656 ; mapped ; A657 # 5.1 CYRILLIC CAPITAL LETTER IOTIFIED A ++A657 ; valid # 5.1 CYRILLIC SMALL LETTER IOTIFIED A ++A658 ; mapped ; A659 # 5.1 CYRILLIC CAPITAL LETTER CLOSED LITTLE YUS ++A659 ; valid # 5.1 CYRILLIC SMALL LETTER CLOSED LITTLE YUS ++A65A ; mapped ; A65B # 5.1 CYRILLIC CAPITAL LETTER BLENDED YUS ++A65B ; valid # 5.1 CYRILLIC SMALL LETTER BLENDED YUS ++A65C ; mapped ; A65D # 5.1 CYRILLIC CAPITAL LETTER IOTIFIED CLOSED LITTLE YUS ++A65D ; valid # 5.1 CYRILLIC SMALL LETTER IOTIFIED CLOSED LITTLE YUS ++A65E ; mapped ; A65F # 5.1 CYRILLIC CAPITAL LETTER YN ++A65F ; valid # 5.1 CYRILLIC SMALL LETTER YN ++A660 ; mapped ; A661 # 6.0 CYRILLIC CAPITAL LETTER REVERSED TSE ++A661 ; valid # 6.0 CYRILLIC SMALL LETTER REVERSED TSE ++A662 ; mapped ; A663 # 5.1 CYRILLIC CAPITAL LETTER SOFT DE ++A663 ; valid # 5.1 CYRILLIC SMALL LETTER SOFT DE ++A664 ; mapped ; A665 # 5.1 CYRILLIC CAPITAL LETTER SOFT EL ++A665 ; valid # 5.1 CYRILLIC SMALL LETTER SOFT EL ++A666 ; mapped ; A667 # 5.1 CYRILLIC CAPITAL LETTER SOFT EM ++A667 ; valid # 5.1 CYRILLIC SMALL LETTER SOFT EM ++A668 ; mapped ; A669 # 5.1 CYRILLIC CAPITAL LETTER MONOCULAR O ++A669 ; valid # 5.1 CYRILLIC SMALL LETTER MONOCULAR O ++A66A ; mapped ; A66B # 5.1 CYRILLIC CAPITAL LETTER BINOCULAR O ++A66B ; valid # 5.1 CYRILLIC SMALL LETTER BINOCULAR O ++A66C ; mapped ; A66D # 5.1 CYRILLIC CAPITAL LETTER DOUBLE MONOCULAR O ++A66D..A66F ; valid # 5.1 CYRILLIC SMALL LETTER DOUBLE MONOCULAR O..COMBINING CYRILLIC VZMET ++A670..A673 ; valid ; ; NV8 # 5.1 COMBINING CYRILLIC TEN MILLIONS SIGN..SLAVONIC ASTERISK ++A674..A67B ; valid # 6.1 COMBINING CYRILLIC LETTER UKRAINIAN IE..COMBINING CYRILLIC LETTER OMEGA ++A67C..A67D ; valid # 5.1 COMBINING CYRILLIC KAVYKA..COMBINING CYRILLIC PAYEROK ++A67E ; valid ; ; NV8 # 5.1 CYRILLIC KAVYKA ++A67F ; valid # 5.1 CYRILLIC PAYEROK ++A680 ; mapped ; A681 # 5.1 CYRILLIC CAPITAL LETTER DWE ++A681 ; valid # 5.1 CYRILLIC SMALL LETTER DWE ++A682 ; mapped ; A683 # 5.1 CYRILLIC CAPITAL LETTER DZWE ++A683 ; valid # 5.1 CYRILLIC SMALL LETTER DZWE ++A684 ; mapped ; A685 # 5.1 CYRILLIC CAPITAL LETTER ZHWE ++A685 ; valid # 5.1 CYRILLIC SMALL LETTER ZHWE ++A686 ; mapped ; A687 # 5.1 CYRILLIC CAPITAL LETTER CCHE ++A687 ; valid # 5.1 CYRILLIC SMALL LETTER CCHE ++A688 ; mapped ; A689 # 5.1 CYRILLIC CAPITAL LETTER DZZE ++A689 ; valid # 5.1 CYRILLIC SMALL LETTER DZZE ++A68A ; mapped ; A68B # 5.1 CYRILLIC CAPITAL LETTER TE WITH MIDDLE HOOK ++A68B ; valid # 5.1 CYRILLIC SMALL LETTER TE WITH MIDDLE HOOK ++A68C ; mapped ; A68D # 5.1 CYRILLIC CAPITAL LETTER TWE ++A68D ; valid # 5.1 CYRILLIC SMALL LETTER TWE ++A68E ; mapped ; A68F # 5.1 CYRILLIC CAPITAL LETTER TSWE ++A68F ; valid # 5.1 CYRILLIC SMALL LETTER TSWE ++A690 ; mapped ; A691 # 5.1 CYRILLIC CAPITAL LETTER TSSE ++A691 ; valid # 5.1 CYRILLIC SMALL LETTER TSSE ++A692 ; mapped ; A693 # 5.1 CYRILLIC CAPITAL LETTER TCHE ++A693 ; valid # 5.1 CYRILLIC SMALL LETTER TCHE ++A694 ; mapped ; A695 # 5.1 CYRILLIC CAPITAL LETTER HWE ++A695 ; valid # 5.1 CYRILLIC SMALL LETTER HWE ++A696 ; mapped ; A697 # 5.1 CYRILLIC CAPITAL LETTER SHWE ++A697 ; valid # 5.1 CYRILLIC SMALL LETTER SHWE ++A698 ; mapped ; A699 # 7.0 CYRILLIC CAPITAL LETTER DOUBLE O ++A699 ; valid # 7.0 CYRILLIC SMALL LETTER DOUBLE O ++A69A ; mapped ; A69B # 7.0 CYRILLIC CAPITAL LETTER CROSSED O ++A69B ; valid # 7.0 CYRILLIC SMALL LETTER CROSSED O ++A69C ; mapped ; 044A # 7.0 MODIFIER LETTER CYRILLIC HARD SIGN ++A69D ; mapped ; 044C # 7.0 MODIFIER LETTER CYRILLIC SOFT SIGN ++A69E ; valid # 8.0 COMBINING CYRILLIC LETTER EF ++A69F ; valid # 6.1 COMBINING CYRILLIC LETTER IOTIFIED E ++A6A0..A6E5 ; valid # 5.2 BAMUM LETTER A..BAMUM LETTER KI ++A6E6..A6EF ; valid ; ; NV8 # 5.2 BAMUM LETTER MO..BAMUM LETTER KOGHOM ++A6F0..A6F1 ; valid # 5.2 BAMUM COMBINING MARK KOQNDON..BAMUM COMBINING MARK TUKWENTIS ++A6F2..A6F7 ; valid ; ; NV8 # 5.2 BAMUM NJAEMLI..BAMUM QUESTION MARK ++A6F8..A6FF ; disallowed # NA .. ++A700..A716 ; valid ; ; NV8 # 4.1 MODIFIER LETTER CHINESE TONE YIN PING..MODIFIER LETTER EXTRA-LOW LEFT-STEM TONE BAR ++A717..A71A ; valid # 5.0 MODIFIER LETTER DOT VERTICAL BAR..MODIFIER LETTER LOWER RIGHT CORNER ANGLE ++A71B..A71F ; valid # 5.1 MODIFIER LETTER RAISED UP ARROW..MODIFIER LETTER LOW INVERTED EXCLAMATION MARK ++A720..A721 ; valid ; ; NV8 # 5.0 MODIFIER LETTER STRESS AND HIGH TONE..MODIFIER LETTER STRESS AND LOW TONE ++A722 ; mapped ; A723 # 5.1 LATIN CAPITAL LETTER EGYPTOLOGICAL ALEF ++A723 ; valid # 5.1 LATIN SMALL LETTER EGYPTOLOGICAL ALEF ++A724 ; mapped ; A725 # 5.1 LATIN CAPITAL LETTER EGYPTOLOGICAL AIN ++A725 ; valid # 5.1 LATIN SMALL LETTER EGYPTOLOGICAL AIN ++A726 ; mapped ; A727 # 5.1 LATIN CAPITAL LETTER HENG ++A727 ; valid # 5.1 LATIN SMALL LETTER HENG ++A728 ; mapped ; A729 # 5.1 LATIN CAPITAL LETTER TZ ++A729 ; valid # 5.1 LATIN SMALL LETTER TZ ++A72A ; mapped ; A72B # 5.1 LATIN CAPITAL LETTER TRESILLO ++A72B ; valid # 5.1 LATIN SMALL LETTER TRESILLO ++A72C ; mapped ; A72D # 5.1 LATIN CAPITAL LETTER CUATRILLO ++A72D ; valid # 5.1 LATIN SMALL LETTER CUATRILLO ++A72E ; mapped ; A72F # 5.1 LATIN CAPITAL LETTER CUATRILLO WITH COMMA ++A72F..A731 ; valid # 5.1 LATIN SMALL LETTER CUATRILLO WITH COMMA..LATIN LETTER SMALL CAPITAL S ++A732 ; mapped ; A733 # 5.1 LATIN CAPITAL LETTER AA ++A733 ; valid # 5.1 LATIN SMALL LETTER AA ++A734 ; mapped ; A735 # 5.1 LATIN CAPITAL LETTER AO ++A735 ; valid # 5.1 LATIN SMALL LETTER AO ++A736 ; mapped ; A737 # 5.1 LATIN CAPITAL LETTER AU ++A737 ; valid # 5.1 LATIN SMALL LETTER AU ++A738 ; mapped ; A739 # 5.1 LATIN CAPITAL LETTER AV ++A739 ; valid # 5.1 LATIN SMALL LETTER AV ++A73A ; mapped ; A73B # 5.1 LATIN CAPITAL LETTER AV WITH HORIZONTAL BAR ++A73B ; valid # 5.1 LATIN SMALL LETTER AV WITH HORIZONTAL BAR ++A73C ; mapped ; A73D # 5.1 LATIN CAPITAL LETTER AY ++A73D ; valid # 5.1 LATIN SMALL LETTER AY ++A73E ; mapped ; A73F # 5.1 LATIN CAPITAL LETTER REVERSED C WITH DOT ++A73F ; valid # 5.1 LATIN SMALL LETTER REVERSED C WITH DOT ++A740 ; mapped ; A741 # 5.1 LATIN CAPITAL LETTER K WITH STROKE ++A741 ; valid # 5.1 LATIN SMALL LETTER K WITH STROKE ++A742 ; mapped ; A743 # 5.1 LATIN CAPITAL LETTER K WITH DIAGONAL STROKE ++A743 ; valid # 5.1 LATIN SMALL LETTER K WITH DIAGONAL STROKE ++A744 ; mapped ; A745 # 5.1 LATIN CAPITAL LETTER K WITH STROKE AND DIAGONAL STROKE ++A745 ; valid # 5.1 LATIN SMALL LETTER K WITH STROKE AND DIAGONAL STROKE ++A746 ; mapped ; A747 # 5.1 LATIN CAPITAL LETTER BROKEN L ++A747 ; valid # 5.1 LATIN SMALL LETTER BROKEN L ++A748 ; mapped ; A749 # 5.1 LATIN CAPITAL LETTER L WITH HIGH STROKE ++A749 ; valid # 5.1 LATIN SMALL LETTER L WITH HIGH STROKE ++A74A ; mapped ; A74B # 5.1 LATIN CAPITAL LETTER O WITH LONG STROKE OVERLAY ++A74B ; valid # 5.1 LATIN SMALL LETTER O WITH LONG STROKE OVERLAY ++A74C ; mapped ; A74D # 5.1 LATIN CAPITAL LETTER O WITH LOOP ++A74D ; valid # 5.1 LATIN SMALL LETTER O WITH LOOP ++A74E ; mapped ; A74F # 5.1 LATIN CAPITAL LETTER OO ++A74F ; valid # 5.1 LATIN SMALL LETTER OO ++A750 ; mapped ; A751 # 5.1 LATIN CAPITAL LETTER P WITH STROKE THROUGH DESCENDER ++A751 ; valid # 5.1 LATIN SMALL LETTER P WITH STROKE THROUGH DESCENDER ++A752 ; mapped ; A753 # 5.1 LATIN CAPITAL LETTER P WITH FLOURISH ++A753 ; valid # 5.1 LATIN SMALL LETTER P WITH FLOURISH ++A754 ; mapped ; A755 # 5.1 LATIN CAPITAL LETTER P WITH SQUIRREL TAIL ++A755 ; valid # 5.1 LATIN SMALL LETTER P WITH SQUIRREL TAIL ++A756 ; mapped ; A757 # 5.1 LATIN CAPITAL LETTER Q WITH STROKE THROUGH DESCENDER ++A757 ; valid # 5.1 LATIN SMALL LETTER Q WITH STROKE THROUGH DESCENDER ++A758 ; mapped ; A759 # 5.1 LATIN CAPITAL LETTER Q WITH DIAGONAL STROKE ++A759 ; valid # 5.1 LATIN SMALL LETTER Q WITH DIAGONAL STROKE ++A75A ; mapped ; A75B # 5.1 LATIN CAPITAL LETTER R ROTUNDA ++A75B ; valid # 5.1 LATIN SMALL LETTER R ROTUNDA ++A75C ; mapped ; A75D # 5.1 LATIN CAPITAL LETTER RUM ROTUNDA ++A75D ; valid # 5.1 LATIN SMALL LETTER RUM ROTUNDA ++A75E ; mapped ; A75F # 5.1 LATIN CAPITAL LETTER V WITH DIAGONAL STROKE ++A75F ; valid # 5.1 LATIN SMALL LETTER V WITH DIAGONAL STROKE ++A760 ; mapped ; A761 # 5.1 LATIN CAPITAL LETTER VY ++A761 ; valid # 5.1 LATIN SMALL LETTER VY ++A762 ; mapped ; A763 # 5.1 LATIN CAPITAL LETTER VISIGOTHIC Z ++A763 ; valid # 5.1 LATIN SMALL LETTER VISIGOTHIC Z ++A764 ; mapped ; A765 # 5.1 LATIN CAPITAL LETTER THORN WITH STROKE ++A765 ; valid # 5.1 LATIN SMALL LETTER THORN WITH STROKE ++A766 ; mapped ; A767 # 5.1 LATIN CAPITAL LETTER THORN WITH STROKE THROUGH DESCENDER ++A767 ; valid # 5.1 LATIN SMALL LETTER THORN WITH STROKE THROUGH DESCENDER ++A768 ; mapped ; A769 # 5.1 LATIN CAPITAL LETTER VEND ++A769 ; valid # 5.1 LATIN SMALL LETTER VEND ++A76A ; mapped ; A76B # 5.1 LATIN CAPITAL LETTER ET ++A76B ; valid # 5.1 LATIN SMALL LETTER ET ++A76C ; mapped ; A76D # 5.1 LATIN CAPITAL LETTER IS ++A76D ; valid # 5.1 LATIN SMALL LETTER IS ++A76E ; mapped ; A76F # 5.1 LATIN CAPITAL LETTER CON ++A76F ; valid # 5.1 LATIN SMALL LETTER CON ++A770 ; mapped ; A76F # 5.1 MODIFIER LETTER US ++A771..A778 ; valid # 5.1 LATIN SMALL LETTER DUM..LATIN SMALL LETTER UM ++A779 ; mapped ; A77A # 5.1 LATIN CAPITAL LETTER INSULAR D ++A77A ; valid # 5.1 LATIN SMALL LETTER INSULAR D ++A77B ; mapped ; A77C # 5.1 LATIN CAPITAL LETTER INSULAR F ++A77C ; valid # 5.1 LATIN SMALL LETTER INSULAR F ++A77D ; mapped ; 1D79 # 5.1 LATIN CAPITAL LETTER INSULAR G ++A77E ; mapped ; A77F # 5.1 LATIN CAPITAL LETTER TURNED INSULAR G ++A77F ; valid # 5.1 LATIN SMALL LETTER TURNED INSULAR G ++A780 ; mapped ; A781 # 5.1 LATIN CAPITAL LETTER TURNED L ++A781 ; valid # 5.1 LATIN SMALL LETTER TURNED L ++A782 ; mapped ; A783 # 5.1 LATIN CAPITAL LETTER INSULAR R ++A783 ; valid # 5.1 LATIN SMALL LETTER INSULAR R ++A784 ; mapped ; A785 # 5.1 LATIN CAPITAL LETTER INSULAR S ++A785 ; valid # 5.1 LATIN SMALL LETTER INSULAR S ++A786 ; mapped ; A787 # 5.1 LATIN CAPITAL LETTER INSULAR T ++A787..A788 ; valid # 5.1 LATIN SMALL LETTER INSULAR T..MODIFIER LETTER LOW CIRCUMFLEX ACCENT ++A789..A78A ; valid ; ; NV8 # 5.1 MODIFIER LETTER COLON..MODIFIER LETTER SHORT EQUALS SIGN ++A78B ; mapped ; A78C # 5.1 LATIN CAPITAL LETTER SALTILLO ++A78C ; valid # 5.1 LATIN SMALL LETTER SALTILLO ++A78D ; mapped ; 0265 # 6.0 LATIN CAPITAL LETTER TURNED H ++A78E ; valid # 6.0 LATIN SMALL LETTER L WITH RETROFLEX HOOK AND BELT ++A78F ; valid # 8.0 LATIN LETTER SINOLOGICAL DOT ++A790 ; mapped ; A791 # 6.0 LATIN CAPITAL LETTER N WITH DESCENDER ++A791 ; valid # 6.0 LATIN SMALL LETTER N WITH DESCENDER ++A792 ; mapped ; A793 # 6.1 LATIN CAPITAL LETTER C WITH BAR ++A793 ; valid # 6.1 LATIN SMALL LETTER C WITH BAR ++A794..A795 ; valid # 7.0 LATIN SMALL LETTER C WITH PALATAL HOOK..LATIN SMALL LETTER H WITH PALATAL HOOK ++A796 ; mapped ; A797 # 7.0 LATIN CAPITAL LETTER B WITH FLOURISH ++A797 ; valid # 7.0 LATIN SMALL LETTER B WITH FLOURISH ++A798 ; mapped ; A799 # 7.0 LATIN CAPITAL LETTER F WITH STROKE ++A799 ; valid # 7.0 LATIN SMALL LETTER F WITH STROKE ++A79A ; mapped ; A79B # 7.0 LATIN CAPITAL LETTER VOLAPUK AE ++A79B ; valid # 7.0 LATIN SMALL LETTER VOLAPUK AE ++A79C ; mapped ; A79D # 7.0 LATIN CAPITAL LETTER VOLAPUK OE ++A79D ; valid # 7.0 LATIN SMALL LETTER VOLAPUK OE ++A79E ; mapped ; A79F # 7.0 LATIN CAPITAL LETTER VOLAPUK UE ++A79F ; valid # 7.0 LATIN SMALL LETTER VOLAPUK UE ++A7A0 ; mapped ; A7A1 # 6.0 LATIN CAPITAL LETTER G WITH OBLIQUE STROKE ++A7A1 ; valid # 6.0 LATIN SMALL LETTER G WITH OBLIQUE STROKE ++A7A2 ; mapped ; A7A3 # 6.0 LATIN CAPITAL LETTER K WITH OBLIQUE STROKE ++A7A3 ; valid # 6.0 LATIN SMALL LETTER K WITH OBLIQUE STROKE ++A7A4 ; mapped ; A7A5 # 6.0 LATIN CAPITAL LETTER N WITH OBLIQUE STROKE ++A7A5 ; valid # 6.0 LATIN SMALL LETTER N WITH OBLIQUE STROKE ++A7A6 ; mapped ; A7A7 # 6.0 LATIN CAPITAL LETTER R WITH OBLIQUE STROKE ++A7A7 ; valid # 6.0 LATIN SMALL LETTER R WITH OBLIQUE STROKE ++A7A8 ; mapped ; A7A9 # 6.0 LATIN CAPITAL LETTER S WITH OBLIQUE STROKE ++A7A9 ; valid # 6.0 LATIN SMALL LETTER S WITH OBLIQUE STROKE ++A7AA ; mapped ; 0266 # 6.1 LATIN CAPITAL LETTER H WITH HOOK ++A7AB ; mapped ; 025C # 7.0 LATIN CAPITAL LETTER REVERSED OPEN E ++A7AC ; mapped ; 0261 # 7.0 LATIN CAPITAL LETTER SCRIPT G ++A7AD ; mapped ; 026C # 7.0 LATIN CAPITAL LETTER L WITH BELT ++A7AE ; mapped ; 026A # 9.0 LATIN CAPITAL LETTER SMALL CAPITAL I ++A7AF ; disallowed # NA ++A7B0 ; mapped ; 029E # 7.0 LATIN CAPITAL LETTER TURNED K ++A7B1 ; mapped ; 0287 # 7.0 LATIN CAPITAL LETTER TURNED T ++A7B2 ; mapped ; 029D # 8.0 LATIN CAPITAL LETTER J WITH CROSSED-TAIL ++A7B3 ; mapped ; AB53 # 8.0 LATIN CAPITAL LETTER CHI ++A7B4 ; mapped ; A7B5 # 8.0 LATIN CAPITAL LETTER BETA ++A7B5 ; valid # 8.0 LATIN SMALL LETTER BETA ++A7B6 ; mapped ; A7B7 # 8.0 LATIN CAPITAL LETTER OMEGA ++A7B7 ; valid # 8.0 LATIN SMALL LETTER OMEGA ++A7B8..A7F6 ; disallowed # NA .. ++A7F7 ; valid # 7.0 LATIN EPIGRAPHIC LETTER SIDEWAYS I ++A7F8 ; mapped ; 0127 # 6.1 MODIFIER LETTER CAPITAL H WITH STROKE ++A7F9 ; mapped ; 0153 # 6.1 MODIFIER LETTER SMALL LIGATURE OE ++A7FA ; valid # 6.0 LATIN LETTER SMALL CAPITAL TURNED M ++A7FB..A7FF ; valid # 5.1 LATIN EPIGRAPHIC LETTER REVERSED F..LATIN EPIGRAPHIC LETTER ARCHAIC M ++A800..A827 ; valid # 4.1 SYLOTI NAGRI LETTER A..SYLOTI NAGRI VOWEL SIGN OO ++A828..A82B ; valid ; ; NV8 # 4.1 SYLOTI NAGRI POETRY MARK-1..SYLOTI NAGRI POETRY MARK-4 ++A82C..A82F ; disallowed # NA .. ++A830..A839 ; valid ; ; NV8 # 5.2 NORTH INDIC FRACTION ONE QUARTER..NORTH INDIC QUANTITY MARK ++A83A..A83F ; disallowed # NA .. ++A840..A873 ; valid # 5.0 PHAGS-PA LETTER KA..PHAGS-PA LETTER CANDRABINDU ++A874..A877 ; valid ; ; NV8 # 5.0 PHAGS-PA SINGLE HEAD MARK..PHAGS-PA MARK DOUBLE SHAD ++A878..A87F ; disallowed # NA .. ++A880..A8C4 ; valid # 5.1 SAURASHTRA SIGN ANUSVARA..SAURASHTRA SIGN VIRAMA ++A8C5 ; valid # 9.0 SAURASHTRA SIGN CANDRABINDU ++A8C6..A8CD ; disallowed # NA .. ++A8CE..A8CF ; valid ; ; NV8 # 5.1 SAURASHTRA DANDA..SAURASHTRA DOUBLE DANDA ++A8D0..A8D9 ; valid # 5.1 SAURASHTRA DIGIT ZERO..SAURASHTRA DIGIT NINE ++A8DA..A8DF ; disallowed # NA .. ++A8E0..A8F7 ; valid # 5.2 COMBINING DEVANAGARI DIGIT ZERO..DEVANAGARI SIGN CANDRABINDU AVAGRAHA ++A8F8..A8FA ; valid ; ; NV8 # 5.2 DEVANAGARI SIGN PUSHPIKA..DEVANAGARI CARET ++A8FB ; valid # 5.2 DEVANAGARI HEADSTROKE ++A8FC ; valid ; ; NV8 # 8.0 DEVANAGARI SIGN SIDDHAM ++A8FD ; valid # 8.0 DEVANAGARI JAIN OM ++A8FE..A8FF ; disallowed # NA .. ++A900..A92D ; valid # 5.1 KAYAH LI DIGIT ZERO..KAYAH LI TONE CALYA PLOPHU ++A92E..A92F ; valid ; ; NV8 # 5.1 KAYAH LI SIGN CWI..KAYAH LI SIGN SHYA ++A930..A953 ; valid # 5.1 REJANG LETTER KA..REJANG VIRAMA ++A954..A95E ; disallowed # NA .. ++A95F ; valid ; ; NV8 # 5.1 REJANG SECTION MARK ++A960..A97C ; valid ; ; NV8 # 5.2 HANGUL CHOSEONG TIKEUT-MIEUM..HANGUL CHOSEONG SSANGYEORINHIEUH ++A97D..A97F ; disallowed # NA .. ++A980..A9C0 ; valid # 5.2 JAVANESE SIGN PANYANGGA..JAVANESE PANGKON ++A9C1..A9CD ; valid ; ; NV8 # 5.2 JAVANESE LEFT RERENGGAN..JAVANESE TURNED PADA PISELEH ++A9CE ; disallowed # NA ++A9CF..A9D9 ; valid # 5.2 JAVANESE PANGRANGKEP..JAVANESE DIGIT NINE ++A9DA..A9DD ; disallowed # NA .. ++A9DE..A9DF ; valid ; ; NV8 # 5.2 JAVANESE PADA TIRTA TUMETES..JAVANESE PADA ISEN-ISEN ++A9E0..A9FE ; valid # 7.0 MYANMAR LETTER SHAN GHA..MYANMAR LETTER TAI LAING BHA ++A9FF ; disallowed # NA ++AA00..AA36 ; valid # 5.1 CHAM LETTER A..CHAM CONSONANT SIGN WA ++AA37..AA3F ; disallowed # NA .. ++AA40..AA4D ; valid # 5.1 CHAM LETTER FINAL K..CHAM CONSONANT SIGN FINAL H ++AA4E..AA4F ; disallowed # NA .. ++AA50..AA59 ; valid # 5.1 CHAM DIGIT ZERO..CHAM DIGIT NINE ++AA5A..AA5B ; disallowed # NA .. ++AA5C..AA5F ; valid ; ; NV8 # 5.1 CHAM PUNCTUATION SPIRAL..CHAM PUNCTUATION TRIPLE DANDA ++AA60..AA76 ; valid # 5.2 MYANMAR LETTER KHAMTI GA..MYANMAR LOGOGRAM KHAMTI HM ++AA77..AA79 ; valid ; ; NV8 # 5.2 MYANMAR SYMBOL AITON EXCLAMATION..MYANMAR SYMBOL AITON TWO ++AA7A..AA7B ; valid # 5.2 MYANMAR LETTER AITON RA..MYANMAR SIGN PAO KAREN TONE ++AA7C..AA7F ; valid # 7.0 MYANMAR SIGN TAI LAING TONE-2..MYANMAR LETTER SHWE PALAUNG SHA ++AA80..AAC2 ; valid # 5.2 TAI VIET LETTER LOW KO..TAI VIET TONE MAI SONG ++AAC3..AADA ; disallowed # NA .. ++AADB..AADD ; valid # 5.2 TAI VIET SYMBOL KON..TAI VIET SYMBOL SAM ++AADE..AADF ; valid ; ; NV8 # 5.2 TAI VIET SYMBOL HO HOI..TAI VIET SYMBOL KOI KOI ++AAE0..AAEF ; valid # 6.1 MEETEI MAYEK LETTER E..MEETEI MAYEK VOWEL SIGN AAU ++AAF0..AAF1 ; valid ; ; NV8 # 6.1 MEETEI MAYEK CHEIKHAN..MEETEI MAYEK AHANG KHUDAM ++AAF2..AAF6 ; valid # 6.1 MEETEI MAYEK ANJI..MEETEI MAYEK VIRAMA ++AAF7..AB00 ; disallowed # NA .. ++AB01..AB06 ; valid # 6.0 ETHIOPIC SYLLABLE TTHU..ETHIOPIC SYLLABLE TTHO ++AB07..AB08 ; disallowed # NA .. ++AB09..AB0E ; valid # 6.0 ETHIOPIC SYLLABLE DDHU..ETHIOPIC SYLLABLE DDHO ++AB0F..AB10 ; disallowed # NA .. ++AB11..AB16 ; valid # 6.0 ETHIOPIC SYLLABLE DZU..ETHIOPIC SYLLABLE DZO ++AB17..AB1F ; disallowed # NA .. ++AB20..AB26 ; valid # 6.0 ETHIOPIC SYLLABLE CCHHA..ETHIOPIC SYLLABLE CCHHO ++AB27 ; disallowed # NA ++AB28..AB2E ; valid # 6.0 ETHIOPIC SYLLABLE BBA..ETHIOPIC SYLLABLE BBO ++AB2F ; disallowed # NA ++AB30..AB5A ; valid # 7.0 LATIN SMALL LETTER BARRED ALPHA..LATIN SMALL LETTER Y WITH SHORT RIGHT LEG ++AB5B ; valid ; ; NV8 # 7.0 MODIFIER BREVE WITH INVERTED BREVE ++AB5C ; mapped ; A727 # 7.0 MODIFIER LETTER SMALL HENG ++AB5D ; mapped ; AB37 # 7.0 MODIFIER LETTER SMALL L WITH INVERTED LAZY S ++AB5E ; mapped ; 026B # 7.0 MODIFIER LETTER SMALL L WITH MIDDLE TILDE ++AB5F ; mapped ; AB52 # 7.0 MODIFIER LETTER SMALL U WITH LEFT HOOK ++AB60..AB63 ; valid # 8.0 LATIN SMALL LETTER SAKHA YAT..LATIN SMALL LETTER UO ++AB64..AB65 ; valid # 7.0 LATIN SMALL LETTER INVERTED ALPHA..GREEK LETTER SMALL CAPITAL OMEGA ++AB66..AB6F ; disallowed # NA .. ++AB70 ; mapped ; 13A0 # 8.0 CHEROKEE SMALL LETTER A ++AB71 ; mapped ; 13A1 # 8.0 CHEROKEE SMALL LETTER E ++AB72 ; mapped ; 13A2 # 8.0 CHEROKEE SMALL LETTER I ++AB73 ; mapped ; 13A3 # 8.0 CHEROKEE SMALL LETTER O ++AB74 ; mapped ; 13A4 # 8.0 CHEROKEE SMALL LETTER U ++AB75 ; mapped ; 13A5 # 8.0 CHEROKEE SMALL LETTER V ++AB76 ; mapped ; 13A6 # 8.0 CHEROKEE SMALL LETTER GA ++AB77 ; mapped ; 13A7 # 8.0 CHEROKEE SMALL LETTER KA ++AB78 ; mapped ; 13A8 # 8.0 CHEROKEE SMALL LETTER GE ++AB79 ; mapped ; 13A9 # 8.0 CHEROKEE SMALL LETTER GI ++AB7A ; mapped ; 13AA # 8.0 CHEROKEE SMALL LETTER GO ++AB7B ; mapped ; 13AB # 8.0 CHEROKEE SMALL LETTER GU ++AB7C ; mapped ; 13AC # 8.0 CHEROKEE SMALL LETTER GV ++AB7D ; mapped ; 13AD # 8.0 CHEROKEE SMALL LETTER HA ++AB7E ; mapped ; 13AE # 8.0 CHEROKEE SMALL LETTER HE ++AB7F ; mapped ; 13AF # 8.0 CHEROKEE SMALL LETTER HI ++AB80 ; mapped ; 13B0 # 8.0 CHEROKEE SMALL LETTER HO ++AB81 ; mapped ; 13B1 # 8.0 CHEROKEE SMALL LETTER HU ++AB82 ; mapped ; 13B2 # 8.0 CHEROKEE SMALL LETTER HV ++AB83 ; mapped ; 13B3 # 8.0 CHEROKEE SMALL LETTER LA ++AB84 ; mapped ; 13B4 # 8.0 CHEROKEE SMALL LETTER LE ++AB85 ; mapped ; 13B5 # 8.0 CHEROKEE SMALL LETTER LI ++AB86 ; mapped ; 13B6 # 8.0 CHEROKEE SMALL LETTER LO ++AB87 ; mapped ; 13B7 # 8.0 CHEROKEE SMALL LETTER LU ++AB88 ; mapped ; 13B8 # 8.0 CHEROKEE SMALL LETTER LV ++AB89 ; mapped ; 13B9 # 8.0 CHEROKEE SMALL LETTER MA ++AB8A ; mapped ; 13BA # 8.0 CHEROKEE SMALL LETTER ME ++AB8B ; mapped ; 13BB # 8.0 CHEROKEE SMALL LETTER MI ++AB8C ; mapped ; 13BC # 8.0 CHEROKEE SMALL LETTER MO ++AB8D ; mapped ; 13BD # 8.0 CHEROKEE SMALL LETTER MU ++AB8E ; mapped ; 13BE # 8.0 CHEROKEE SMALL LETTER NA ++AB8F ; mapped ; 13BF # 8.0 CHEROKEE SMALL LETTER HNA ++AB90 ; mapped ; 13C0 # 8.0 CHEROKEE SMALL LETTER NAH ++AB91 ; mapped ; 13C1 # 8.0 CHEROKEE SMALL LETTER NE ++AB92 ; mapped ; 13C2 # 8.0 CHEROKEE SMALL LETTER NI ++AB93 ; mapped ; 13C3 # 8.0 CHEROKEE SMALL LETTER NO ++AB94 ; mapped ; 13C4 # 8.0 CHEROKEE SMALL LETTER NU ++AB95 ; mapped ; 13C5 # 8.0 CHEROKEE SMALL LETTER NV ++AB96 ; mapped ; 13C6 # 8.0 CHEROKEE SMALL LETTER QUA ++AB97 ; mapped ; 13C7 # 8.0 CHEROKEE SMALL LETTER QUE ++AB98 ; mapped ; 13C8 # 8.0 CHEROKEE SMALL LETTER QUI ++AB99 ; mapped ; 13C9 # 8.0 CHEROKEE SMALL LETTER QUO ++AB9A ; mapped ; 13CA # 8.0 CHEROKEE SMALL LETTER QUU ++AB9B ; mapped ; 13CB # 8.0 CHEROKEE SMALL LETTER QUV ++AB9C ; mapped ; 13CC # 8.0 CHEROKEE SMALL LETTER SA ++AB9D ; mapped ; 13CD # 8.0 CHEROKEE SMALL LETTER S ++AB9E ; mapped ; 13CE # 8.0 CHEROKEE SMALL LETTER SE ++AB9F ; mapped ; 13CF # 8.0 CHEROKEE SMALL LETTER SI ++ABA0 ; mapped ; 13D0 # 8.0 CHEROKEE SMALL LETTER SO ++ABA1 ; mapped ; 13D1 # 8.0 CHEROKEE SMALL LETTER SU ++ABA2 ; mapped ; 13D2 # 8.0 CHEROKEE SMALL LETTER SV ++ABA3 ; mapped ; 13D3 # 8.0 CHEROKEE SMALL LETTER DA ++ABA4 ; mapped ; 13D4 # 8.0 CHEROKEE SMALL LETTER TA ++ABA5 ; mapped ; 13D5 # 8.0 CHEROKEE SMALL LETTER DE ++ABA6 ; mapped ; 13D6 # 8.0 CHEROKEE SMALL LETTER TE ++ABA7 ; mapped ; 13D7 # 8.0 CHEROKEE SMALL LETTER DI ++ABA8 ; mapped ; 13D8 # 8.0 CHEROKEE SMALL LETTER TI ++ABA9 ; mapped ; 13D9 # 8.0 CHEROKEE SMALL LETTER DO ++ABAA ; mapped ; 13DA # 8.0 CHEROKEE SMALL LETTER DU ++ABAB ; mapped ; 13DB # 8.0 CHEROKEE SMALL LETTER DV ++ABAC ; mapped ; 13DC # 8.0 CHEROKEE SMALL LETTER DLA ++ABAD ; mapped ; 13DD # 8.0 CHEROKEE SMALL LETTER TLA ++ABAE ; mapped ; 13DE # 8.0 CHEROKEE SMALL LETTER TLE ++ABAF ; mapped ; 13DF # 8.0 CHEROKEE SMALL LETTER TLI ++ABB0 ; mapped ; 13E0 # 8.0 CHEROKEE SMALL LETTER TLO ++ABB1 ; mapped ; 13E1 # 8.0 CHEROKEE SMALL LETTER TLU ++ABB2 ; mapped ; 13E2 # 8.0 CHEROKEE SMALL LETTER TLV ++ABB3 ; mapped ; 13E3 # 8.0 CHEROKEE SMALL LETTER TSA ++ABB4 ; mapped ; 13E4 # 8.0 CHEROKEE SMALL LETTER TSE ++ABB5 ; mapped ; 13E5 # 8.0 CHEROKEE SMALL LETTER TSI ++ABB6 ; mapped ; 13E6 # 8.0 CHEROKEE SMALL LETTER TSO ++ABB7 ; mapped ; 13E7 # 8.0 CHEROKEE SMALL LETTER TSU ++ABB8 ; mapped ; 13E8 # 8.0 CHEROKEE SMALL LETTER TSV ++ABB9 ; mapped ; 13E9 # 8.0 CHEROKEE SMALL LETTER WA ++ABBA ; mapped ; 13EA # 8.0 CHEROKEE SMALL LETTER WE ++ABBB ; mapped ; 13EB # 8.0 CHEROKEE SMALL LETTER WI ++ABBC ; mapped ; 13EC # 8.0 CHEROKEE SMALL LETTER WO ++ABBD ; mapped ; 13ED # 8.0 CHEROKEE SMALL LETTER WU ++ABBE ; mapped ; 13EE # 8.0 CHEROKEE SMALL LETTER WV ++ABBF ; mapped ; 13EF # 8.0 CHEROKEE SMALL LETTER YA ++ABC0..ABEA ; valid # 5.2 MEETEI MAYEK LETTER KOK..MEETEI MAYEK VOWEL SIGN NUNG ++ABEB ; valid ; ; NV8 # 5.2 MEETEI MAYEK CHEIKHEI ++ABEC..ABED ; valid # 5.2 MEETEI MAYEK LUM IYEK..MEETEI MAYEK APUN IYEK ++ABEE..ABEF ; disallowed # NA .. ++ABF0..ABF9 ; valid # 5.2 MEETEI MAYEK DIGIT ZERO..MEETEI MAYEK DIGIT NINE ++ABFA..ABFF ; disallowed # NA .. ++AC00..D7A3 ; valid # 2.0 HANGUL SYLLABLE GA..HANGUL SYLLABLE HIH ++D7A4..D7AF ; disallowed # NA .. ++D7B0..D7C6 ; valid ; ; NV8 # 5.2 HANGUL JUNGSEONG O-YEO..HANGUL JUNGSEONG ARAEA-E ++D7C7..D7CA ; disallowed # NA .. ++D7CB..D7FB ; valid ; ; NV8 # 5.2 HANGUL JONGSEONG NIEUN-RIEUL..HANGUL JONGSEONG PHIEUPH-THIEUTH ++D7FC..D7FF ; disallowed # NA .. ++D800..DFFF ; disallowed # 2.0 .. ++E000..F8FF ; disallowed # 1.1 .. ++F900 ; mapped ; 8C48 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F900 ++F901 ; mapped ; 66F4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F901 ++F902 ; mapped ; 8ECA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F902 ++F903 ; mapped ; 8CC8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F903 ++F904 ; mapped ; 6ED1 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F904 ++F905 ; mapped ; 4E32 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F905 ++F906 ; mapped ; 53E5 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F906 ++F907..F908 ; mapped ; 9F9C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F907..CJK COMPATIBILITY IDEOGRAPH-F908 ++F909 ; mapped ; 5951 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F909 ++F90A ; mapped ; 91D1 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F90A ++F90B ; mapped ; 5587 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F90B ++F90C ; mapped ; 5948 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F90C ++F90D ; mapped ; 61F6 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F90D ++F90E ; mapped ; 7669 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F90E ++F90F ; mapped ; 7F85 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F90F ++F910 ; mapped ; 863F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F910 ++F911 ; mapped ; 87BA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F911 ++F912 ; mapped ; 88F8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F912 ++F913 ; mapped ; 908F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F913 ++F914 ; mapped ; 6A02 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F914 ++F915 ; mapped ; 6D1B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F915 ++F916 ; mapped ; 70D9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F916 ++F917 ; mapped ; 73DE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F917 ++F918 ; mapped ; 843D # 1.1 CJK COMPATIBILITY IDEOGRAPH-F918 ++F919 ; mapped ; 916A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F919 ++F91A ; mapped ; 99F1 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F91A ++F91B ; mapped ; 4E82 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F91B ++F91C ; mapped ; 5375 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F91C ++F91D ; mapped ; 6B04 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F91D ++F91E ; mapped ; 721B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F91E ++F91F ; mapped ; 862D # 1.1 CJK COMPATIBILITY IDEOGRAPH-F91F ++F920 ; mapped ; 9E1E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F920 ++F921 ; mapped ; 5D50 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F921 ++F922 ; mapped ; 6FEB # 1.1 CJK COMPATIBILITY IDEOGRAPH-F922 ++F923 ; mapped ; 85CD # 1.1 CJK COMPATIBILITY IDEOGRAPH-F923 ++F924 ; mapped ; 8964 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F924 ++F925 ; mapped ; 62C9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F925 ++F926 ; mapped ; 81D8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F926 ++F927 ; mapped ; 881F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F927 ++F928 ; mapped ; 5ECA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F928 ++F929 ; mapped ; 6717 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F929 ++F92A ; mapped ; 6D6A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F92A ++F92B ; mapped ; 72FC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F92B ++F92C ; mapped ; 90CE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F92C ++F92D ; mapped ; 4F86 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F92D ++F92E ; mapped ; 51B7 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F92E ++F92F ; mapped ; 52DE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F92F ++F930 ; mapped ; 64C4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F930 ++F931 ; mapped ; 6AD3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F931 ++F932 ; mapped ; 7210 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F932 ++F933 ; mapped ; 76E7 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F933 ++F934 ; mapped ; 8001 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F934 ++F935 ; mapped ; 8606 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F935 ++F936 ; mapped ; 865C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F936 ++F937 ; mapped ; 8DEF # 1.1 CJK COMPATIBILITY IDEOGRAPH-F937 ++F938 ; mapped ; 9732 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F938 ++F939 ; mapped ; 9B6F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F939 ++F93A ; mapped ; 9DFA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F93A ++F93B ; mapped ; 788C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F93B ++F93C ; mapped ; 797F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F93C ++F93D ; mapped ; 7DA0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F93D ++F93E ; mapped ; 83C9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F93E ++F93F ; mapped ; 9304 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F93F ++F940 ; mapped ; 9E7F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F940 ++F941 ; mapped ; 8AD6 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F941 ++F942 ; mapped ; 58DF # 1.1 CJK COMPATIBILITY IDEOGRAPH-F942 ++F943 ; mapped ; 5F04 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F943 ++F944 ; mapped ; 7C60 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F944 ++F945 ; mapped ; 807E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F945 ++F946 ; mapped ; 7262 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F946 ++F947 ; mapped ; 78CA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F947 ++F948 ; mapped ; 8CC2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F948 ++F949 ; mapped ; 96F7 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F949 ++F94A ; mapped ; 58D8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F94A ++F94B ; mapped ; 5C62 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F94B ++F94C ; mapped ; 6A13 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F94C ++F94D ; mapped ; 6DDA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F94D ++F94E ; mapped ; 6F0F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F94E ++F94F ; mapped ; 7D2F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F94F ++F950 ; mapped ; 7E37 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F950 ++F951 ; mapped ; 964B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F951 ++F952 ; mapped ; 52D2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F952 ++F953 ; mapped ; 808B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F953 ++F954 ; mapped ; 51DC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F954 ++F955 ; mapped ; 51CC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F955 ++F956 ; mapped ; 7A1C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F956 ++F957 ; mapped ; 7DBE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F957 ++F958 ; mapped ; 83F1 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F958 ++F959 ; mapped ; 9675 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F959 ++F95A ; mapped ; 8B80 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F95A ++F95B ; mapped ; 62CF # 1.1 CJK COMPATIBILITY IDEOGRAPH-F95B ++F95C ; mapped ; 6A02 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F95C ++F95D ; mapped ; 8AFE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F95D ++F95E ; mapped ; 4E39 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F95E ++F95F ; mapped ; 5BE7 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F95F ++F960 ; mapped ; 6012 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F960 ++F961 ; mapped ; 7387 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F961 ++F962 ; mapped ; 7570 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F962 ++F963 ; mapped ; 5317 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F963 ++F964 ; mapped ; 78FB # 1.1 CJK COMPATIBILITY IDEOGRAPH-F964 ++F965 ; mapped ; 4FBF # 1.1 CJK COMPATIBILITY IDEOGRAPH-F965 ++F966 ; mapped ; 5FA9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F966 ++F967 ; mapped ; 4E0D # 1.1 CJK COMPATIBILITY IDEOGRAPH-F967 ++F968 ; mapped ; 6CCC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F968 ++F969 ; mapped ; 6578 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F969 ++F96A ; mapped ; 7D22 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F96A ++F96B ; mapped ; 53C3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F96B ++F96C ; mapped ; 585E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F96C ++F96D ; mapped ; 7701 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F96D ++F96E ; mapped ; 8449 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F96E ++F96F ; mapped ; 8AAA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F96F ++F970 ; mapped ; 6BBA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F970 ++F971 ; mapped ; 8FB0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F971 ++F972 ; mapped ; 6C88 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F972 ++F973 ; mapped ; 62FE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F973 ++F974 ; mapped ; 82E5 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F974 ++F975 ; mapped ; 63A0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F975 ++F976 ; mapped ; 7565 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F976 ++F977 ; mapped ; 4EAE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F977 ++F978 ; mapped ; 5169 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F978 ++F979 ; mapped ; 51C9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F979 ++F97A ; mapped ; 6881 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F97A ++F97B ; mapped ; 7CE7 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F97B ++F97C ; mapped ; 826F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F97C ++F97D ; mapped ; 8AD2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F97D ++F97E ; mapped ; 91CF # 1.1 CJK COMPATIBILITY IDEOGRAPH-F97E ++F97F ; mapped ; 52F5 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F97F ++F980 ; mapped ; 5442 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F980 ++F981 ; mapped ; 5973 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F981 ++F982 ; mapped ; 5EEC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F982 ++F983 ; mapped ; 65C5 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F983 ++F984 ; mapped ; 6FFE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F984 ++F985 ; mapped ; 792A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F985 ++F986 ; mapped ; 95AD # 1.1 CJK COMPATIBILITY IDEOGRAPH-F986 ++F987 ; mapped ; 9A6A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F987 ++F988 ; mapped ; 9E97 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F988 ++F989 ; mapped ; 9ECE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F989 ++F98A ; mapped ; 529B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F98A ++F98B ; mapped ; 66C6 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F98B ++F98C ; mapped ; 6B77 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F98C ++F98D ; mapped ; 8F62 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F98D ++F98E ; mapped ; 5E74 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F98E ++F98F ; mapped ; 6190 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F98F ++F990 ; mapped ; 6200 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F990 ++F991 ; mapped ; 649A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F991 ++F992 ; mapped ; 6F23 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F992 ++F993 ; mapped ; 7149 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F993 ++F994 ; mapped ; 7489 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F994 ++F995 ; mapped ; 79CA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F995 ++F996 ; mapped ; 7DF4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F996 ++F997 ; mapped ; 806F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F997 ++F998 ; mapped ; 8F26 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F998 ++F999 ; mapped ; 84EE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F999 ++F99A ; mapped ; 9023 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F99A ++F99B ; mapped ; 934A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F99B ++F99C ; mapped ; 5217 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F99C ++F99D ; mapped ; 52A3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F99D ++F99E ; mapped ; 54BD # 1.1 CJK COMPATIBILITY IDEOGRAPH-F99E ++F99F ; mapped ; 70C8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F99F ++F9A0 ; mapped ; 88C2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A0 ++F9A1 ; mapped ; 8AAA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A1 ++F9A2 ; mapped ; 5EC9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A2 ++F9A3 ; mapped ; 5FF5 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A3 ++F9A4 ; mapped ; 637B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A4 ++F9A5 ; mapped ; 6BAE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A5 ++F9A6 ; mapped ; 7C3E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A6 ++F9A7 ; mapped ; 7375 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A7 ++F9A8 ; mapped ; 4EE4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A8 ++F9A9 ; mapped ; 56F9 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9A9 ++F9AA ; mapped ; 5BE7 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9AA ++F9AB ; mapped ; 5DBA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9AB ++F9AC ; mapped ; 601C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9AC ++F9AD ; mapped ; 73B2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9AD ++F9AE ; mapped ; 7469 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9AE ++F9AF ; mapped ; 7F9A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9AF ++F9B0 ; mapped ; 8046 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B0 ++F9B1 ; mapped ; 9234 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B1 ++F9B2 ; mapped ; 96F6 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B2 ++F9B3 ; mapped ; 9748 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B3 ++F9B4 ; mapped ; 9818 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B4 ++F9B5 ; mapped ; 4F8B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B5 ++F9B6 ; mapped ; 79AE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B6 ++F9B7 ; mapped ; 91B4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B7 ++F9B8 ; mapped ; 96B8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B8 ++F9B9 ; mapped ; 60E1 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9B9 ++F9BA ; mapped ; 4E86 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9BA ++F9BB ; mapped ; 50DA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9BB ++F9BC ; mapped ; 5BEE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9BC ++F9BD ; mapped ; 5C3F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9BD ++F9BE ; mapped ; 6599 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9BE ++F9BF ; mapped ; 6A02 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9BF ++F9C0 ; mapped ; 71CE # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C0 ++F9C1 ; mapped ; 7642 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C1 ++F9C2 ; mapped ; 84FC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C2 ++F9C3 ; mapped ; 907C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C3 ++F9C4 ; mapped ; 9F8D # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C4 ++F9C5 ; mapped ; 6688 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C5 ++F9C6 ; mapped ; 962E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C6 ++F9C7 ; mapped ; 5289 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C7 ++F9C8 ; mapped ; 677B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C8 ++F9C9 ; mapped ; 67F3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9C9 ++F9CA ; mapped ; 6D41 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9CA ++F9CB ; mapped ; 6E9C # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9CB ++F9CC ; mapped ; 7409 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9CC ++F9CD ; mapped ; 7559 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9CD ++F9CE ; mapped ; 786B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9CE ++F9CF ; mapped ; 7D10 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9CF ++F9D0 ; mapped ; 985E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D0 ++F9D1 ; mapped ; 516D # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D1 ++F9D2 ; mapped ; 622E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D2 ++F9D3 ; mapped ; 9678 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D3 ++F9D4 ; mapped ; 502B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D4 ++F9D5 ; mapped ; 5D19 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D5 ++F9D6 ; mapped ; 6DEA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D6 ++F9D7 ; mapped ; 8F2A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D7 ++F9D8 ; mapped ; 5F8B # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D8 ++F9D9 ; mapped ; 6144 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9D9 ++F9DA ; mapped ; 6817 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9DA ++F9DB ; mapped ; 7387 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9DB ++F9DC ; mapped ; 9686 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9DC ++F9DD ; mapped ; 5229 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9DD ++F9DE ; mapped ; 540F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9DE ++F9DF ; mapped ; 5C65 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9DF ++F9E0 ; mapped ; 6613 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E0 ++F9E1 ; mapped ; 674E # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E1 ++F9E2 ; mapped ; 68A8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E2 ++F9E3 ; mapped ; 6CE5 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E3 ++F9E4 ; mapped ; 7406 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E4 ++F9E5 ; mapped ; 75E2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E5 ++F9E6 ; mapped ; 7F79 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E6 ++F9E7 ; mapped ; 88CF # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E7 ++F9E8 ; mapped ; 88E1 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E8 ++F9E9 ; mapped ; 91CC # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9E9 ++F9EA ; mapped ; 96E2 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9EA ++F9EB ; mapped ; 533F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9EB ++F9EC ; mapped ; 6EBA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9EC ++F9ED ; mapped ; 541D # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9ED ++F9EE ; mapped ; 71D0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9EE ++F9EF ; mapped ; 7498 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9EF ++F9F0 ; mapped ; 85FA # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F0 ++F9F1 ; mapped ; 96A3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F1 ++F9F2 ; mapped ; 9C57 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F2 ++F9F3 ; mapped ; 9E9F # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F3 ++F9F4 ; mapped ; 6797 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F4 ++F9F5 ; mapped ; 6DCB # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F5 ++F9F6 ; mapped ; 81E8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F6 ++F9F7 ; mapped ; 7ACB # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F7 ++F9F8 ; mapped ; 7B20 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F8 ++F9F9 ; mapped ; 7C92 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9F9 ++F9FA ; mapped ; 72C0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9FA ++F9FB ; mapped ; 7099 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9FB ++F9FC ; mapped ; 8B58 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9FC ++F9FD ; mapped ; 4EC0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9FD ++F9FE ; mapped ; 8336 # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9FE ++F9FF ; mapped ; 523A # 1.1 CJK COMPATIBILITY IDEOGRAPH-F9FF ++FA00 ; mapped ; 5207 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA00 ++FA01 ; mapped ; 5EA6 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA01 ++FA02 ; mapped ; 62D3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA02 ++FA03 ; mapped ; 7CD6 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA03 ++FA04 ; mapped ; 5B85 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA04 ++FA05 ; mapped ; 6D1E # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA05 ++FA06 ; mapped ; 66B4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA06 ++FA07 ; mapped ; 8F3B # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA07 ++FA08 ; mapped ; 884C # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA08 ++FA09 ; mapped ; 964D # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA09 ++FA0A ; mapped ; 898B # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA0A ++FA0B ; mapped ; 5ED3 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA0B ++FA0C ; mapped ; 5140 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA0C ++FA0D ; mapped ; 55C0 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA0D ++FA0E..FA0F ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA0E..CJK COMPATIBILITY IDEOGRAPH-FA0F ++FA10 ; mapped ; 585A # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA10 ++FA11 ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA11 ++FA12 ; mapped ; 6674 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA12 ++FA13..FA14 ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA13..CJK COMPATIBILITY IDEOGRAPH-FA14 ++FA15 ; mapped ; 51DE # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA15 ++FA16 ; mapped ; 732A # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA16 ++FA17 ; mapped ; 76CA # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA17 ++FA18 ; mapped ; 793C # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA18 ++FA19 ; mapped ; 795E # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA19 ++FA1A ; mapped ; 7965 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA1A ++FA1B ; mapped ; 798F # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA1B ++FA1C ; mapped ; 9756 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA1C ++FA1D ; mapped ; 7CBE # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA1D ++FA1E ; mapped ; 7FBD # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA1E ++FA1F ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA1F ++FA20 ; mapped ; 8612 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA20 ++FA21 ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA21 ++FA22 ; mapped ; 8AF8 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA22 ++FA23..FA24 ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA23..CJK COMPATIBILITY IDEOGRAPH-FA24 ++FA25 ; mapped ; 9038 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA25 ++FA26 ; mapped ; 90FD # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA26 ++FA27..FA29 ; valid # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA27..CJK COMPATIBILITY IDEOGRAPH-FA29 ++FA2A ; mapped ; 98EF # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA2A ++FA2B ; mapped ; 98FC # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA2B ++FA2C ; mapped ; 9928 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA2C ++FA2D ; mapped ; 9DB4 # 1.1 CJK COMPATIBILITY IDEOGRAPH-FA2D ++FA2E ; mapped ; 90DE # 6.1 CJK COMPATIBILITY IDEOGRAPH-FA2E ++FA2F ; mapped ; 96B7 # 6.1 CJK COMPATIBILITY IDEOGRAPH-FA2F ++FA30 ; mapped ; 4FAE # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA30 ++FA31 ; mapped ; 50E7 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA31 ++FA32 ; mapped ; 514D # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA32 ++FA33 ; mapped ; 52C9 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA33 ++FA34 ; mapped ; 52E4 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA34 ++FA35 ; mapped ; 5351 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA35 ++FA36 ; mapped ; 559D # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA36 ++FA37 ; mapped ; 5606 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA37 ++FA38 ; mapped ; 5668 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA38 ++FA39 ; mapped ; 5840 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA39 ++FA3A ; mapped ; 58A8 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA3A ++FA3B ; mapped ; 5C64 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA3B ++FA3C ; mapped ; 5C6E # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA3C ++FA3D ; mapped ; 6094 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA3D ++FA3E ; mapped ; 6168 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA3E ++FA3F ; mapped ; 618E # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA3F ++FA40 ; mapped ; 61F2 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA40 ++FA41 ; mapped ; 654F # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA41 ++FA42 ; mapped ; 65E2 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA42 ++FA43 ; mapped ; 6691 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA43 ++FA44 ; mapped ; 6885 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA44 ++FA45 ; mapped ; 6D77 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA45 ++FA46 ; mapped ; 6E1A # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA46 ++FA47 ; mapped ; 6F22 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA47 ++FA48 ; mapped ; 716E # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA48 ++FA49 ; mapped ; 722B # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA49 ++FA4A ; mapped ; 7422 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA4A ++FA4B ; mapped ; 7891 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA4B ++FA4C ; mapped ; 793E # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA4C ++FA4D ; mapped ; 7949 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA4D ++FA4E ; mapped ; 7948 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA4E ++FA4F ; mapped ; 7950 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA4F ++FA50 ; mapped ; 7956 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA50 ++FA51 ; mapped ; 795D # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA51 ++FA52 ; mapped ; 798D # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA52 ++FA53 ; mapped ; 798E # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA53 ++FA54 ; mapped ; 7A40 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA54 ++FA55 ; mapped ; 7A81 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA55 ++FA56 ; mapped ; 7BC0 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA56 ++FA57 ; mapped ; 7DF4 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA57 ++FA58 ; mapped ; 7E09 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA58 ++FA59 ; mapped ; 7E41 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA59 ++FA5A ; mapped ; 7F72 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA5A ++FA5B ; mapped ; 8005 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA5B ++FA5C ; mapped ; 81ED # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA5C ++FA5D..FA5E ; mapped ; 8279 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA5D..CJK COMPATIBILITY IDEOGRAPH-FA5E ++FA5F ; mapped ; 8457 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA5F ++FA60 ; mapped ; 8910 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA60 ++FA61 ; mapped ; 8996 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA61 ++FA62 ; mapped ; 8B01 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA62 ++FA63 ; mapped ; 8B39 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA63 ++FA64 ; mapped ; 8CD3 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA64 ++FA65 ; mapped ; 8D08 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA65 ++FA66 ; mapped ; 8FB6 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA66 ++FA67 ; mapped ; 9038 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA67 ++FA68 ; mapped ; 96E3 # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA68 ++FA69 ; mapped ; 97FF # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA69 ++FA6A ; mapped ; 983B # 3.2 CJK COMPATIBILITY IDEOGRAPH-FA6A ++FA6B ; mapped ; 6075 # 5.2 CJK COMPATIBILITY IDEOGRAPH-FA6B ++FA6C ; mapped ; 242EE # 5.2 CJK COMPATIBILITY IDEOGRAPH-FA6C ++FA6D ; mapped ; 8218 # 5.2 CJK COMPATIBILITY IDEOGRAPH-FA6D ++FA6E..FA6F ; disallowed # NA .. ++FA70 ; mapped ; 4E26 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA70 ++FA71 ; mapped ; 51B5 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA71 ++FA72 ; mapped ; 5168 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA72 ++FA73 ; mapped ; 4F80 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA73 ++FA74 ; mapped ; 5145 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA74 ++FA75 ; mapped ; 5180 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA75 ++FA76 ; mapped ; 52C7 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA76 ++FA77 ; mapped ; 52FA # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA77 ++FA78 ; mapped ; 559D # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA78 ++FA79 ; mapped ; 5555 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA79 ++FA7A ; mapped ; 5599 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA7A ++FA7B ; mapped ; 55E2 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA7B ++FA7C ; mapped ; 585A # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA7C ++FA7D ; mapped ; 58B3 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA7D ++FA7E ; mapped ; 5944 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA7E ++FA7F ; mapped ; 5954 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA7F ++FA80 ; mapped ; 5A62 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA80 ++FA81 ; mapped ; 5B28 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA81 ++FA82 ; mapped ; 5ED2 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA82 ++FA83 ; mapped ; 5ED9 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA83 ++FA84 ; mapped ; 5F69 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA84 ++FA85 ; mapped ; 5FAD # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA85 ++FA86 ; mapped ; 60D8 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA86 ++FA87 ; mapped ; 614E # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA87 ++FA88 ; mapped ; 6108 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA88 ++FA89 ; mapped ; 618E # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA89 ++FA8A ; mapped ; 6160 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA8A ++FA8B ; mapped ; 61F2 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA8B ++FA8C ; mapped ; 6234 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA8C ++FA8D ; mapped ; 63C4 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA8D ++FA8E ; mapped ; 641C # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA8E ++FA8F ; mapped ; 6452 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA8F ++FA90 ; mapped ; 6556 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA90 ++FA91 ; mapped ; 6674 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA91 ++FA92 ; mapped ; 6717 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA92 ++FA93 ; mapped ; 671B # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA93 ++FA94 ; mapped ; 6756 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA94 ++FA95 ; mapped ; 6B79 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA95 ++FA96 ; mapped ; 6BBA # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA96 ++FA97 ; mapped ; 6D41 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA97 ++FA98 ; mapped ; 6EDB # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA98 ++FA99 ; mapped ; 6ECB # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA99 ++FA9A ; mapped ; 6F22 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA9A ++FA9B ; mapped ; 701E # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA9B ++FA9C ; mapped ; 716E # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA9C ++FA9D ; mapped ; 77A7 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA9D ++FA9E ; mapped ; 7235 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA9E ++FA9F ; mapped ; 72AF # 4.1 CJK COMPATIBILITY IDEOGRAPH-FA9F ++FAA0 ; mapped ; 732A # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA0 ++FAA1 ; mapped ; 7471 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA1 ++FAA2 ; mapped ; 7506 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA2 ++FAA3 ; mapped ; 753B # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA3 ++FAA4 ; mapped ; 761D # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA4 ++FAA5 ; mapped ; 761F # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA5 ++FAA6 ; mapped ; 76CA # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA6 ++FAA7 ; mapped ; 76DB # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA7 ++FAA8 ; mapped ; 76F4 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA8 ++FAA9 ; mapped ; 774A # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAA9 ++FAAA ; mapped ; 7740 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAAA ++FAAB ; mapped ; 78CC # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAAB ++FAAC ; mapped ; 7AB1 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAAC ++FAAD ; mapped ; 7BC0 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAAD ++FAAE ; mapped ; 7C7B # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAAE ++FAAF ; mapped ; 7D5B # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAAF ++FAB0 ; mapped ; 7DF4 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB0 ++FAB1 ; mapped ; 7F3E # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB1 ++FAB2 ; mapped ; 8005 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB2 ++FAB3 ; mapped ; 8352 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB3 ++FAB4 ; mapped ; 83EF # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB4 ++FAB5 ; mapped ; 8779 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB5 ++FAB6 ; mapped ; 8941 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB6 ++FAB7 ; mapped ; 8986 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB7 ++FAB8 ; mapped ; 8996 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB8 ++FAB9 ; mapped ; 8ABF # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAB9 ++FABA ; mapped ; 8AF8 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FABA ++FABB ; mapped ; 8ACB # 4.1 CJK COMPATIBILITY IDEOGRAPH-FABB ++FABC ; mapped ; 8B01 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FABC ++FABD ; mapped ; 8AFE # 4.1 CJK COMPATIBILITY IDEOGRAPH-FABD ++FABE ; mapped ; 8AED # 4.1 CJK COMPATIBILITY IDEOGRAPH-FABE ++FABF ; mapped ; 8B39 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FABF ++FAC0 ; mapped ; 8B8A # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC0 ++FAC1 ; mapped ; 8D08 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC1 ++FAC2 ; mapped ; 8F38 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC2 ++FAC3 ; mapped ; 9072 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC3 ++FAC4 ; mapped ; 9199 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC4 ++FAC5 ; mapped ; 9276 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC5 ++FAC6 ; mapped ; 967C # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC6 ++FAC7 ; mapped ; 96E3 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC7 ++FAC8 ; mapped ; 9756 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC8 ++FAC9 ; mapped ; 97DB # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAC9 ++FACA ; mapped ; 97FF # 4.1 CJK COMPATIBILITY IDEOGRAPH-FACA ++FACB ; mapped ; 980B # 4.1 CJK COMPATIBILITY IDEOGRAPH-FACB ++FACC ; mapped ; 983B # 4.1 CJK COMPATIBILITY IDEOGRAPH-FACC ++FACD ; mapped ; 9B12 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FACD ++FACE ; mapped ; 9F9C # 4.1 CJK COMPATIBILITY IDEOGRAPH-FACE ++FACF ; mapped ; 2284A # 4.1 CJK COMPATIBILITY IDEOGRAPH-FACF ++FAD0 ; mapped ; 22844 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD0 ++FAD1 ; mapped ; 233D5 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD1 ++FAD2 ; mapped ; 3B9D # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD2 ++FAD3 ; mapped ; 4018 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD3 ++FAD4 ; mapped ; 4039 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD4 ++FAD5 ; mapped ; 25249 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD5 ++FAD6 ; mapped ; 25CD0 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD6 ++FAD7 ; mapped ; 27ED3 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD7 ++FAD8 ; mapped ; 9F43 # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD8 ++FAD9 ; mapped ; 9F8E # 4.1 CJK COMPATIBILITY IDEOGRAPH-FAD9 ++FADA..FAFF ; disallowed # NA .. ++FB00 ; mapped ; 0066 0066 # 1.1 LATIN SMALL LIGATURE FF ++FB01 ; mapped ; 0066 0069 # 1.1 LATIN SMALL LIGATURE FI ++FB02 ; mapped ; 0066 006C # 1.1 LATIN SMALL LIGATURE FL ++FB03 ; mapped ; 0066 0066 0069 #1.1 LATIN SMALL LIGATURE FFI ++FB04 ; mapped ; 0066 0066 006C #1.1 LATIN SMALL LIGATURE FFL ++FB05..FB06 ; mapped ; 0073 0074 # 1.1 LATIN SMALL LIGATURE LONG S T..LATIN SMALL LIGATURE ST ++FB07..FB12 ; disallowed # NA .. ++FB13 ; mapped ; 0574 0576 # 1.1 ARMENIAN SMALL LIGATURE MEN NOW ++FB14 ; mapped ; 0574 0565 # 1.1 ARMENIAN SMALL LIGATURE MEN ECH ++FB15 ; mapped ; 0574 056B # 1.1 ARMENIAN SMALL LIGATURE MEN INI ++FB16 ; mapped ; 057E 0576 # 1.1 ARMENIAN SMALL LIGATURE VEW NOW ++FB17 ; mapped ; 0574 056D # 1.1 ARMENIAN SMALL LIGATURE MEN XEH ++FB18..FB1C ; disallowed # NA .. ++FB1D ; mapped ; 05D9 05B4 # 3.0 HEBREW LETTER YOD WITH HIRIQ ++FB1E ; valid # 1.1 HEBREW POINT JUDEO-SPANISH VARIKA ++FB1F ; mapped ; 05F2 05B7 # 1.1 HEBREW LIGATURE YIDDISH YOD YOD PATAH ++FB20 ; mapped ; 05E2 # 1.1 HEBREW LETTER ALTERNATIVE AYIN ++FB21 ; mapped ; 05D0 # 1.1 HEBREW LETTER WIDE ALEF ++FB22 ; mapped ; 05D3 # 1.1 HEBREW LETTER WIDE DALET ++FB23 ; mapped ; 05D4 # 1.1 HEBREW LETTER WIDE HE ++FB24 ; mapped ; 05DB # 1.1 HEBREW LETTER WIDE KAF ++FB25 ; mapped ; 05DC # 1.1 HEBREW LETTER WIDE LAMED ++FB26 ; mapped ; 05DD # 1.1 HEBREW LETTER WIDE FINAL MEM ++FB27 ; mapped ; 05E8 # 1.1 HEBREW LETTER WIDE RESH ++FB28 ; mapped ; 05EA # 1.1 HEBREW LETTER WIDE TAV ++FB29 ; disallowed_STD3_mapped ; 002B # 1.1 HEBREW LETTER ALTERNATIVE PLUS SIGN ++FB2A ; mapped ; 05E9 05C1 # 1.1 HEBREW LETTER SHIN WITH SHIN DOT ++FB2B ; mapped ; 05E9 05C2 # 1.1 HEBREW LETTER SHIN WITH SIN DOT ++FB2C ; mapped ; 05E9 05BC 05C1 #1.1 HEBREW LETTER SHIN WITH DAGESH AND SHIN DOT ++FB2D ; mapped ; 05E9 05BC 05C2 #1.1 HEBREW LETTER SHIN WITH DAGESH AND SIN DOT ++FB2E ; mapped ; 05D0 05B7 # 1.1 HEBREW LETTER ALEF WITH PATAH ++FB2F ; mapped ; 05D0 05B8 # 1.1 HEBREW LETTER ALEF WITH QAMATS ++FB30 ; mapped ; 05D0 05BC # 1.1 HEBREW LETTER ALEF WITH MAPIQ ++FB31 ; mapped ; 05D1 05BC # 1.1 HEBREW LETTER BET WITH DAGESH ++FB32 ; mapped ; 05D2 05BC # 1.1 HEBREW LETTER GIMEL WITH DAGESH ++FB33 ; mapped ; 05D3 05BC # 1.1 HEBREW LETTER DALET WITH DAGESH ++FB34 ; mapped ; 05D4 05BC # 1.1 HEBREW LETTER HE WITH MAPIQ ++FB35 ; mapped ; 05D5 05BC # 1.1 HEBREW LETTER VAV WITH DAGESH ++FB36 ; mapped ; 05D6 05BC # 1.1 HEBREW LETTER ZAYIN WITH DAGESH ++FB37 ; disallowed # NA ++FB38 ; mapped ; 05D8 05BC # 1.1 HEBREW LETTER TET WITH DAGESH ++FB39 ; mapped ; 05D9 05BC # 1.1 HEBREW LETTER YOD WITH DAGESH ++FB3A ; mapped ; 05DA 05BC # 1.1 HEBREW LETTER FINAL KAF WITH DAGESH ++FB3B ; mapped ; 05DB 05BC # 1.1 HEBREW LETTER KAF WITH DAGESH ++FB3C ; mapped ; 05DC 05BC # 1.1 HEBREW LETTER LAMED WITH DAGESH ++FB3D ; disallowed # NA ++FB3E ; mapped ; 05DE 05BC # 1.1 HEBREW LETTER MEM WITH DAGESH ++FB3F ; disallowed # NA ++FB40 ; mapped ; 05E0 05BC # 1.1 HEBREW LETTER NUN WITH DAGESH ++FB41 ; mapped ; 05E1 05BC # 1.1 HEBREW LETTER SAMEKH WITH DAGESH ++FB42 ; disallowed # NA ++FB43 ; mapped ; 05E3 05BC # 1.1 HEBREW LETTER FINAL PE WITH DAGESH ++FB44 ; mapped ; 05E4 05BC # 1.1 HEBREW LETTER PE WITH DAGESH ++FB45 ; disallowed # NA ++FB46 ; mapped ; 05E6 05BC # 1.1 HEBREW LETTER TSADI WITH DAGESH ++FB47 ; mapped ; 05E7 05BC # 1.1 HEBREW LETTER QOF WITH DAGESH ++FB48 ; mapped ; 05E8 05BC # 1.1 HEBREW LETTER RESH WITH DAGESH ++FB49 ; mapped ; 05E9 05BC # 1.1 HEBREW LETTER SHIN WITH DAGESH ++FB4A ; mapped ; 05EA 05BC # 1.1 HEBREW LETTER TAV WITH DAGESH ++FB4B ; mapped ; 05D5 05B9 # 1.1 HEBREW LETTER VAV WITH HOLAM ++FB4C ; mapped ; 05D1 05BF # 1.1 HEBREW LETTER BET WITH RAFE ++FB4D ; mapped ; 05DB 05BF # 1.1 HEBREW LETTER KAF WITH RAFE ++FB4E ; mapped ; 05E4 05BF # 1.1 HEBREW LETTER PE WITH RAFE ++FB4F ; mapped ; 05D0 05DC # 1.1 HEBREW LIGATURE ALEF LAMED ++FB50..FB51 ; mapped ; 0671 # 1.1 ARABIC LETTER ALEF WASLA ISOLATED FORM..ARABIC LETTER ALEF WASLA FINAL FORM ++FB52..FB55 ; mapped ; 067B # 1.1 ARABIC LETTER BEEH ISOLATED FORM..ARABIC LETTER BEEH MEDIAL FORM ++FB56..FB59 ; mapped ; 067E # 1.1 ARABIC LETTER PEH ISOLATED FORM..ARABIC LETTER PEH MEDIAL FORM ++FB5A..FB5D ; mapped ; 0680 # 1.1 ARABIC LETTER BEHEH ISOLATED FORM..ARABIC LETTER BEHEH MEDIAL FORM ++FB5E..FB61 ; mapped ; 067A # 1.1 ARABIC LETTER TTEHEH ISOLATED FORM..ARABIC LETTER TTEHEH MEDIAL FORM ++FB62..FB65 ; mapped ; 067F # 1.1 ARABIC LETTER TEHEH ISOLATED FORM..ARABIC LETTER TEHEH MEDIAL FORM ++FB66..FB69 ; mapped ; 0679 # 1.1 ARABIC LETTER TTEH ISOLATED FORM..ARABIC LETTER TTEH MEDIAL FORM ++FB6A..FB6D ; mapped ; 06A4 # 1.1 ARABIC LETTER VEH ISOLATED FORM..ARABIC LETTER VEH MEDIAL FORM ++FB6E..FB71 ; mapped ; 06A6 # 1.1 ARABIC LETTER PEHEH ISOLATED FORM..ARABIC LETTER PEHEH MEDIAL FORM ++FB72..FB75 ; mapped ; 0684 # 1.1 ARABIC LETTER DYEH ISOLATED FORM..ARABIC LETTER DYEH MEDIAL FORM ++FB76..FB79 ; mapped ; 0683 # 1.1 ARABIC LETTER NYEH ISOLATED FORM..ARABIC LETTER NYEH MEDIAL FORM ++FB7A..FB7D ; mapped ; 0686 # 1.1 ARABIC LETTER TCHEH ISOLATED FORM..ARABIC LETTER TCHEH MEDIAL FORM ++FB7E..FB81 ; mapped ; 0687 # 1.1 ARABIC LETTER TCHEHEH ISOLATED FORM..ARABIC LETTER TCHEHEH MEDIAL FORM ++FB82..FB83 ; mapped ; 068D # 1.1 ARABIC LETTER DDAHAL ISOLATED FORM..ARABIC LETTER DDAHAL FINAL FORM ++FB84..FB85 ; mapped ; 068C # 1.1 ARABIC LETTER DAHAL ISOLATED FORM..ARABIC LETTER DAHAL FINAL FORM ++FB86..FB87 ; mapped ; 068E # 1.1 ARABIC LETTER DUL ISOLATED FORM..ARABIC LETTER DUL FINAL FORM ++FB88..FB89 ; mapped ; 0688 # 1.1 ARABIC LETTER DDAL ISOLATED FORM..ARABIC LETTER DDAL FINAL FORM ++FB8A..FB8B ; mapped ; 0698 # 1.1 ARABIC LETTER JEH ISOLATED FORM..ARABIC LETTER JEH FINAL FORM ++FB8C..FB8D ; mapped ; 0691 # 1.1 ARABIC LETTER RREH ISOLATED FORM..ARABIC LETTER RREH FINAL FORM ++FB8E..FB91 ; mapped ; 06A9 # 1.1 ARABIC LETTER KEHEH ISOLATED FORM..ARABIC LETTER KEHEH MEDIAL FORM ++FB92..FB95 ; mapped ; 06AF # 1.1 ARABIC LETTER GAF ISOLATED FORM..ARABIC LETTER GAF MEDIAL FORM ++FB96..FB99 ; mapped ; 06B3 # 1.1 ARABIC LETTER GUEH ISOLATED FORM..ARABIC LETTER GUEH MEDIAL FORM ++FB9A..FB9D ; mapped ; 06B1 # 1.1 ARABIC LETTER NGOEH ISOLATED FORM..ARABIC LETTER NGOEH MEDIAL FORM ++FB9E..FB9F ; mapped ; 06BA # 1.1 ARABIC LETTER NOON GHUNNA ISOLATED FORM..ARABIC LETTER NOON GHUNNA FINAL FORM ++FBA0..FBA3 ; mapped ; 06BB # 1.1 ARABIC LETTER RNOON ISOLATED FORM..ARABIC LETTER RNOON MEDIAL FORM ++FBA4..FBA5 ; mapped ; 06C0 # 1.1 ARABIC LETTER HEH WITH YEH ABOVE ISOLATED FORM..ARABIC LETTER HEH WITH YEH ABOVE FINAL FORM ++FBA6..FBA9 ; mapped ; 06C1 # 1.1 ARABIC LETTER HEH GOAL ISOLATED FORM..ARABIC LETTER HEH GOAL MEDIAL FORM ++FBAA..FBAD ; mapped ; 06BE # 1.1 ARABIC LETTER HEH DOACHASHMEE ISOLATED FORM..ARABIC LETTER HEH DOACHASHMEE MEDIAL FORM ++FBAE..FBAF ; mapped ; 06D2 # 1.1 ARABIC LETTER YEH BARREE ISOLATED FORM..ARABIC LETTER YEH BARREE FINAL FORM ++FBB0..FBB1 ; mapped ; 06D3 # 1.1 ARABIC LETTER YEH BARREE WITH HAMZA ABOVE ISOLATED FORM..ARABIC LETTER YEH BARREE WITH HAMZA ABOVE FINAL FORM ++FBB2..FBC1 ; valid ; ; NV8 # 6.0 ARABIC SYMBOL DOT ABOVE..ARABIC SYMBOL SMALL TAH BELOW ++FBC2..FBD2 ; disallowed # NA .. ++FBD3..FBD6 ; mapped ; 06AD # 1.1 ARABIC LETTER NG ISOLATED FORM..ARABIC LETTER NG MEDIAL FORM ++FBD7..FBD8 ; mapped ; 06C7 # 1.1 ARABIC LETTER U ISOLATED FORM..ARABIC LETTER U FINAL FORM ++FBD9..FBDA ; mapped ; 06C6 # 1.1 ARABIC LETTER OE ISOLATED FORM..ARABIC LETTER OE FINAL FORM ++FBDB..FBDC ; mapped ; 06C8 # 1.1 ARABIC LETTER YU ISOLATED FORM..ARABIC LETTER YU FINAL FORM ++FBDD ; mapped ; 06C7 0674 # 1.1 ARABIC LETTER U WITH HAMZA ABOVE ISOLATED FORM ++FBDE..FBDF ; mapped ; 06CB # 1.1 ARABIC LETTER VE ISOLATED FORM..ARABIC LETTER VE FINAL FORM ++FBE0..FBE1 ; mapped ; 06C5 # 1.1 ARABIC LETTER KIRGHIZ OE ISOLATED FORM..ARABIC LETTER KIRGHIZ OE FINAL FORM ++FBE2..FBE3 ; mapped ; 06C9 # 1.1 ARABIC LETTER KIRGHIZ YU ISOLATED FORM..ARABIC LETTER KIRGHIZ YU FINAL FORM ++FBE4..FBE7 ; mapped ; 06D0 # 1.1 ARABIC LETTER E ISOLATED FORM..ARABIC LETTER E MEDIAL FORM ++FBE8..FBE9 ; mapped ; 0649 # 1.1 ARABIC LETTER UIGHUR KAZAKH KIRGHIZ ALEF MAKSURA INITIAL FORM..ARABIC LETTER UIGHUR KAZAKH KIRGHIZ ALEF MAKSURA MEDIAL FORM ++FBEA..FBEB ; mapped ; 0626 0627 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ALEF ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ALEF FINAL FORM ++FBEC..FBED ; mapped ; 0626 06D5 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH AE ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH AE FINAL FORM ++FBEE..FBEF ; mapped ; 0626 0648 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH WAW ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH WAW FINAL FORM ++FBF0..FBF1 ; mapped ; 0626 06C7 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH U ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH U FINAL FORM ++FBF2..FBF3 ; mapped ; 0626 06C6 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH OE ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH OE FINAL FORM ++FBF4..FBF5 ; mapped ; 0626 06C8 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH YU ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH YU FINAL FORM ++FBF6..FBF8 ; mapped ; 0626 06D0 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH E ISOLATED FORM..ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH E INITIAL FORM ++FBF9..FBFB ; mapped ; 0626 0649 # 1.1 ARABIC LIGATURE UIGHUR KIRGHIZ YEH WITH HAMZA ABOVE WITH ALEF MAKSURA ISOLATED FORM..ARABIC LIGATURE UIGHUR KIRGHIZ YEH WITH HAMZA ABOVE WITH ALEF MAKSURA INITIAL FORM ++FBFC..FBFF ; mapped ; 06CC # 1.1 ARABIC LETTER FARSI YEH ISOLATED FORM..ARABIC LETTER FARSI YEH MEDIAL FORM ++FC00 ; mapped ; 0626 062C # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH JEEM ISOLATED FORM ++FC01 ; mapped ; 0626 062D # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH HAH ISOLATED FORM ++FC02 ; mapped ; 0626 0645 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH MEEM ISOLATED FORM ++FC03 ; mapped ; 0626 0649 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ALEF MAKSURA ISOLATED FORM ++FC04 ; mapped ; 0626 064A # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH YEH ISOLATED FORM ++FC05 ; mapped ; 0628 062C # 1.1 ARABIC LIGATURE BEH WITH JEEM ISOLATED FORM ++FC06 ; mapped ; 0628 062D # 1.1 ARABIC LIGATURE BEH WITH HAH ISOLATED FORM ++FC07 ; mapped ; 0628 062E # 1.1 ARABIC LIGATURE BEH WITH KHAH ISOLATED FORM ++FC08 ; mapped ; 0628 0645 # 1.1 ARABIC LIGATURE BEH WITH MEEM ISOLATED FORM ++FC09 ; mapped ; 0628 0649 # 1.1 ARABIC LIGATURE BEH WITH ALEF MAKSURA ISOLATED FORM ++FC0A ; mapped ; 0628 064A # 1.1 ARABIC LIGATURE BEH WITH YEH ISOLATED FORM ++FC0B ; mapped ; 062A 062C # 1.1 ARABIC LIGATURE TEH WITH JEEM ISOLATED FORM ++FC0C ; mapped ; 062A 062D # 1.1 ARABIC LIGATURE TEH WITH HAH ISOLATED FORM ++FC0D ; mapped ; 062A 062E # 1.1 ARABIC LIGATURE TEH WITH KHAH ISOLATED FORM ++FC0E ; mapped ; 062A 0645 # 1.1 ARABIC LIGATURE TEH WITH MEEM ISOLATED FORM ++FC0F ; mapped ; 062A 0649 # 1.1 ARABIC LIGATURE TEH WITH ALEF MAKSURA ISOLATED FORM ++FC10 ; mapped ; 062A 064A # 1.1 ARABIC LIGATURE TEH WITH YEH ISOLATED FORM ++FC11 ; mapped ; 062B 062C # 1.1 ARABIC LIGATURE THEH WITH JEEM ISOLATED FORM ++FC12 ; mapped ; 062B 0645 # 1.1 ARABIC LIGATURE THEH WITH MEEM ISOLATED FORM ++FC13 ; mapped ; 062B 0649 # 1.1 ARABIC LIGATURE THEH WITH ALEF MAKSURA ISOLATED FORM ++FC14 ; mapped ; 062B 064A # 1.1 ARABIC LIGATURE THEH WITH YEH ISOLATED FORM ++FC15 ; mapped ; 062C 062D # 1.1 ARABIC LIGATURE JEEM WITH HAH ISOLATED FORM ++FC16 ; mapped ; 062C 0645 # 1.1 ARABIC LIGATURE JEEM WITH MEEM ISOLATED FORM ++FC17 ; mapped ; 062D 062C # 1.1 ARABIC LIGATURE HAH WITH JEEM ISOLATED FORM ++FC18 ; mapped ; 062D 0645 # 1.1 ARABIC LIGATURE HAH WITH MEEM ISOLATED FORM ++FC19 ; mapped ; 062E 062C # 1.1 ARABIC LIGATURE KHAH WITH JEEM ISOLATED FORM ++FC1A ; mapped ; 062E 062D # 1.1 ARABIC LIGATURE KHAH WITH HAH ISOLATED FORM ++FC1B ; mapped ; 062E 0645 # 1.1 ARABIC LIGATURE KHAH WITH MEEM ISOLATED FORM ++FC1C ; mapped ; 0633 062C # 1.1 ARABIC LIGATURE SEEN WITH JEEM ISOLATED FORM ++FC1D ; mapped ; 0633 062D # 1.1 ARABIC LIGATURE SEEN WITH HAH ISOLATED FORM ++FC1E ; mapped ; 0633 062E # 1.1 ARABIC LIGATURE SEEN WITH KHAH ISOLATED FORM ++FC1F ; mapped ; 0633 0645 # 1.1 ARABIC LIGATURE SEEN WITH MEEM ISOLATED FORM ++FC20 ; mapped ; 0635 062D # 1.1 ARABIC LIGATURE SAD WITH HAH ISOLATED FORM ++FC21 ; mapped ; 0635 0645 # 1.1 ARABIC LIGATURE SAD WITH MEEM ISOLATED FORM ++FC22 ; mapped ; 0636 062C # 1.1 ARABIC LIGATURE DAD WITH JEEM ISOLATED FORM ++FC23 ; mapped ; 0636 062D # 1.1 ARABIC LIGATURE DAD WITH HAH ISOLATED FORM ++FC24 ; mapped ; 0636 062E # 1.1 ARABIC LIGATURE DAD WITH KHAH ISOLATED FORM ++FC25 ; mapped ; 0636 0645 # 1.1 ARABIC LIGATURE DAD WITH MEEM ISOLATED FORM ++FC26 ; mapped ; 0637 062D # 1.1 ARABIC LIGATURE TAH WITH HAH ISOLATED FORM ++FC27 ; mapped ; 0637 0645 # 1.1 ARABIC LIGATURE TAH WITH MEEM ISOLATED FORM ++FC28 ; mapped ; 0638 0645 # 1.1 ARABIC LIGATURE ZAH WITH MEEM ISOLATED FORM ++FC29 ; mapped ; 0639 062C # 1.1 ARABIC LIGATURE AIN WITH JEEM ISOLATED FORM ++FC2A ; mapped ; 0639 0645 # 1.1 ARABIC LIGATURE AIN WITH MEEM ISOLATED FORM ++FC2B ; mapped ; 063A 062C # 1.1 ARABIC LIGATURE GHAIN WITH JEEM ISOLATED FORM ++FC2C ; mapped ; 063A 0645 # 1.1 ARABIC LIGATURE GHAIN WITH MEEM ISOLATED FORM ++FC2D ; mapped ; 0641 062C # 1.1 ARABIC LIGATURE FEH WITH JEEM ISOLATED FORM ++FC2E ; mapped ; 0641 062D # 1.1 ARABIC LIGATURE FEH WITH HAH ISOLATED FORM ++FC2F ; mapped ; 0641 062E # 1.1 ARABIC LIGATURE FEH WITH KHAH ISOLATED FORM ++FC30 ; mapped ; 0641 0645 # 1.1 ARABIC LIGATURE FEH WITH MEEM ISOLATED FORM ++FC31 ; mapped ; 0641 0649 # 1.1 ARABIC LIGATURE FEH WITH ALEF MAKSURA ISOLATED FORM ++FC32 ; mapped ; 0641 064A # 1.1 ARABIC LIGATURE FEH WITH YEH ISOLATED FORM ++FC33 ; mapped ; 0642 062D # 1.1 ARABIC LIGATURE QAF WITH HAH ISOLATED FORM ++FC34 ; mapped ; 0642 0645 # 1.1 ARABIC LIGATURE QAF WITH MEEM ISOLATED FORM ++FC35 ; mapped ; 0642 0649 # 1.1 ARABIC LIGATURE QAF WITH ALEF MAKSURA ISOLATED FORM ++FC36 ; mapped ; 0642 064A # 1.1 ARABIC LIGATURE QAF WITH YEH ISOLATED FORM ++FC37 ; mapped ; 0643 0627 # 1.1 ARABIC LIGATURE KAF WITH ALEF ISOLATED FORM ++FC38 ; mapped ; 0643 062C # 1.1 ARABIC LIGATURE KAF WITH JEEM ISOLATED FORM ++FC39 ; mapped ; 0643 062D # 1.1 ARABIC LIGATURE KAF WITH HAH ISOLATED FORM ++FC3A ; mapped ; 0643 062E # 1.1 ARABIC LIGATURE KAF WITH KHAH ISOLATED FORM ++FC3B ; mapped ; 0643 0644 # 1.1 ARABIC LIGATURE KAF WITH LAM ISOLATED FORM ++FC3C ; mapped ; 0643 0645 # 1.1 ARABIC LIGATURE KAF WITH MEEM ISOLATED FORM ++FC3D ; mapped ; 0643 0649 # 1.1 ARABIC LIGATURE KAF WITH ALEF MAKSURA ISOLATED FORM ++FC3E ; mapped ; 0643 064A # 1.1 ARABIC LIGATURE KAF WITH YEH ISOLATED FORM ++FC3F ; mapped ; 0644 062C # 1.1 ARABIC LIGATURE LAM WITH JEEM ISOLATED FORM ++FC40 ; mapped ; 0644 062D # 1.1 ARABIC LIGATURE LAM WITH HAH ISOLATED FORM ++FC41 ; mapped ; 0644 062E # 1.1 ARABIC LIGATURE LAM WITH KHAH ISOLATED FORM ++FC42 ; mapped ; 0644 0645 # 1.1 ARABIC LIGATURE LAM WITH MEEM ISOLATED FORM ++FC43 ; mapped ; 0644 0649 # 1.1 ARABIC LIGATURE LAM WITH ALEF MAKSURA ISOLATED FORM ++FC44 ; mapped ; 0644 064A # 1.1 ARABIC LIGATURE LAM WITH YEH ISOLATED FORM ++FC45 ; mapped ; 0645 062C # 1.1 ARABIC LIGATURE MEEM WITH JEEM ISOLATED FORM ++FC46 ; mapped ; 0645 062D # 1.1 ARABIC LIGATURE MEEM WITH HAH ISOLATED FORM ++FC47 ; mapped ; 0645 062E # 1.1 ARABIC LIGATURE MEEM WITH KHAH ISOLATED FORM ++FC48 ; mapped ; 0645 0645 # 1.1 ARABIC LIGATURE MEEM WITH MEEM ISOLATED FORM ++FC49 ; mapped ; 0645 0649 # 1.1 ARABIC LIGATURE MEEM WITH ALEF MAKSURA ISOLATED FORM ++FC4A ; mapped ; 0645 064A # 1.1 ARABIC LIGATURE MEEM WITH YEH ISOLATED FORM ++FC4B ; mapped ; 0646 062C # 1.1 ARABIC LIGATURE NOON WITH JEEM ISOLATED FORM ++FC4C ; mapped ; 0646 062D # 1.1 ARABIC LIGATURE NOON WITH HAH ISOLATED FORM ++FC4D ; mapped ; 0646 062E # 1.1 ARABIC LIGATURE NOON WITH KHAH ISOLATED FORM ++FC4E ; mapped ; 0646 0645 # 1.1 ARABIC LIGATURE NOON WITH MEEM ISOLATED FORM ++FC4F ; mapped ; 0646 0649 # 1.1 ARABIC LIGATURE NOON WITH ALEF MAKSURA ISOLATED FORM ++FC50 ; mapped ; 0646 064A # 1.1 ARABIC LIGATURE NOON WITH YEH ISOLATED FORM ++FC51 ; mapped ; 0647 062C # 1.1 ARABIC LIGATURE HEH WITH JEEM ISOLATED FORM ++FC52 ; mapped ; 0647 0645 # 1.1 ARABIC LIGATURE HEH WITH MEEM ISOLATED FORM ++FC53 ; mapped ; 0647 0649 # 1.1 ARABIC LIGATURE HEH WITH ALEF MAKSURA ISOLATED FORM ++FC54 ; mapped ; 0647 064A # 1.1 ARABIC LIGATURE HEH WITH YEH ISOLATED FORM ++FC55 ; mapped ; 064A 062C # 1.1 ARABIC LIGATURE YEH WITH JEEM ISOLATED FORM ++FC56 ; mapped ; 064A 062D # 1.1 ARABIC LIGATURE YEH WITH HAH ISOLATED FORM ++FC57 ; mapped ; 064A 062E # 1.1 ARABIC LIGATURE YEH WITH KHAH ISOLATED FORM ++FC58 ; mapped ; 064A 0645 # 1.1 ARABIC LIGATURE YEH WITH MEEM ISOLATED FORM ++FC59 ; mapped ; 064A 0649 # 1.1 ARABIC LIGATURE YEH WITH ALEF MAKSURA ISOLATED FORM ++FC5A ; mapped ; 064A 064A # 1.1 ARABIC LIGATURE YEH WITH YEH ISOLATED FORM ++FC5B ; mapped ; 0630 0670 # 1.1 ARABIC LIGATURE THAL WITH SUPERSCRIPT ALEF ISOLATED FORM ++FC5C ; mapped ; 0631 0670 # 1.1 ARABIC LIGATURE REH WITH SUPERSCRIPT ALEF ISOLATED FORM ++FC5D ; mapped ; 0649 0670 # 1.1 ARABIC LIGATURE ALEF MAKSURA WITH SUPERSCRIPT ALEF ISOLATED FORM ++FC5E ; disallowed_STD3_mapped ; 0020 064C 0651 #1.1 ARABIC LIGATURE SHADDA WITH DAMMATAN ISOLATED FORM ++FC5F ; disallowed_STD3_mapped ; 0020 064D 0651 #1.1 ARABIC LIGATURE SHADDA WITH KASRATAN ISOLATED FORM ++FC60 ; disallowed_STD3_mapped ; 0020 064E 0651 #1.1 ARABIC LIGATURE SHADDA WITH FATHA ISOLATED FORM ++FC61 ; disallowed_STD3_mapped ; 0020 064F 0651 #1.1 ARABIC LIGATURE SHADDA WITH DAMMA ISOLATED FORM ++FC62 ; disallowed_STD3_mapped ; 0020 0650 0651 #1.1 ARABIC LIGATURE SHADDA WITH KASRA ISOLATED FORM ++FC63 ; disallowed_STD3_mapped ; 0020 0651 0670 #1.1 ARABIC LIGATURE SHADDA WITH SUPERSCRIPT ALEF ISOLATED FORM ++FC64 ; mapped ; 0626 0631 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH REH FINAL FORM ++FC65 ; mapped ; 0626 0632 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ZAIN FINAL FORM ++FC66 ; mapped ; 0626 0645 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH MEEM FINAL FORM ++FC67 ; mapped ; 0626 0646 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH NOON FINAL FORM ++FC68 ; mapped ; 0626 0649 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH ALEF MAKSURA FINAL FORM ++FC69 ; mapped ; 0626 064A # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH YEH FINAL FORM ++FC6A ; mapped ; 0628 0631 # 1.1 ARABIC LIGATURE BEH WITH REH FINAL FORM ++FC6B ; mapped ; 0628 0632 # 1.1 ARABIC LIGATURE BEH WITH ZAIN FINAL FORM ++FC6C ; mapped ; 0628 0645 # 1.1 ARABIC LIGATURE BEH WITH MEEM FINAL FORM ++FC6D ; mapped ; 0628 0646 # 1.1 ARABIC LIGATURE BEH WITH NOON FINAL FORM ++FC6E ; mapped ; 0628 0649 # 1.1 ARABIC LIGATURE BEH WITH ALEF MAKSURA FINAL FORM ++FC6F ; mapped ; 0628 064A # 1.1 ARABIC LIGATURE BEH WITH YEH FINAL FORM ++FC70 ; mapped ; 062A 0631 # 1.1 ARABIC LIGATURE TEH WITH REH FINAL FORM ++FC71 ; mapped ; 062A 0632 # 1.1 ARABIC LIGATURE TEH WITH ZAIN FINAL FORM ++FC72 ; mapped ; 062A 0645 # 1.1 ARABIC LIGATURE TEH WITH MEEM FINAL FORM ++FC73 ; mapped ; 062A 0646 # 1.1 ARABIC LIGATURE TEH WITH NOON FINAL FORM ++FC74 ; mapped ; 062A 0649 # 1.1 ARABIC LIGATURE TEH WITH ALEF MAKSURA FINAL FORM ++FC75 ; mapped ; 062A 064A # 1.1 ARABIC LIGATURE TEH WITH YEH FINAL FORM ++FC76 ; mapped ; 062B 0631 # 1.1 ARABIC LIGATURE THEH WITH REH FINAL FORM ++FC77 ; mapped ; 062B 0632 # 1.1 ARABIC LIGATURE THEH WITH ZAIN FINAL FORM ++FC78 ; mapped ; 062B 0645 # 1.1 ARABIC LIGATURE THEH WITH MEEM FINAL FORM ++FC79 ; mapped ; 062B 0646 # 1.1 ARABIC LIGATURE THEH WITH NOON FINAL FORM ++FC7A ; mapped ; 062B 0649 # 1.1 ARABIC LIGATURE THEH WITH ALEF MAKSURA FINAL FORM ++FC7B ; mapped ; 062B 064A # 1.1 ARABIC LIGATURE THEH WITH YEH FINAL FORM ++FC7C ; mapped ; 0641 0649 # 1.1 ARABIC LIGATURE FEH WITH ALEF MAKSURA FINAL FORM ++FC7D ; mapped ; 0641 064A # 1.1 ARABIC LIGATURE FEH WITH YEH FINAL FORM ++FC7E ; mapped ; 0642 0649 # 1.1 ARABIC LIGATURE QAF WITH ALEF MAKSURA FINAL FORM ++FC7F ; mapped ; 0642 064A # 1.1 ARABIC LIGATURE QAF WITH YEH FINAL FORM ++FC80 ; mapped ; 0643 0627 # 1.1 ARABIC LIGATURE KAF WITH ALEF FINAL FORM ++FC81 ; mapped ; 0643 0644 # 1.1 ARABIC LIGATURE KAF WITH LAM FINAL FORM ++FC82 ; mapped ; 0643 0645 # 1.1 ARABIC LIGATURE KAF WITH MEEM FINAL FORM ++FC83 ; mapped ; 0643 0649 # 1.1 ARABIC LIGATURE KAF WITH ALEF MAKSURA FINAL FORM ++FC84 ; mapped ; 0643 064A # 1.1 ARABIC LIGATURE KAF WITH YEH FINAL FORM ++FC85 ; mapped ; 0644 0645 # 1.1 ARABIC LIGATURE LAM WITH MEEM FINAL FORM ++FC86 ; mapped ; 0644 0649 # 1.1 ARABIC LIGATURE LAM WITH ALEF MAKSURA FINAL FORM ++FC87 ; mapped ; 0644 064A # 1.1 ARABIC LIGATURE LAM WITH YEH FINAL FORM ++FC88 ; mapped ; 0645 0627 # 1.1 ARABIC LIGATURE MEEM WITH ALEF FINAL FORM ++FC89 ; mapped ; 0645 0645 # 1.1 ARABIC LIGATURE MEEM WITH MEEM FINAL FORM ++FC8A ; mapped ; 0646 0631 # 1.1 ARABIC LIGATURE NOON WITH REH FINAL FORM ++FC8B ; mapped ; 0646 0632 # 1.1 ARABIC LIGATURE NOON WITH ZAIN FINAL FORM ++FC8C ; mapped ; 0646 0645 # 1.1 ARABIC LIGATURE NOON WITH MEEM FINAL FORM ++FC8D ; mapped ; 0646 0646 # 1.1 ARABIC LIGATURE NOON WITH NOON FINAL FORM ++FC8E ; mapped ; 0646 0649 # 1.1 ARABIC LIGATURE NOON WITH ALEF MAKSURA FINAL FORM ++FC8F ; mapped ; 0646 064A # 1.1 ARABIC LIGATURE NOON WITH YEH FINAL FORM ++FC90 ; mapped ; 0649 0670 # 1.1 ARABIC LIGATURE ALEF MAKSURA WITH SUPERSCRIPT ALEF FINAL FORM ++FC91 ; mapped ; 064A 0631 # 1.1 ARABIC LIGATURE YEH WITH REH FINAL FORM ++FC92 ; mapped ; 064A 0632 # 1.1 ARABIC LIGATURE YEH WITH ZAIN FINAL FORM ++FC93 ; mapped ; 064A 0645 # 1.1 ARABIC LIGATURE YEH WITH MEEM FINAL FORM ++FC94 ; mapped ; 064A 0646 # 1.1 ARABIC LIGATURE YEH WITH NOON FINAL FORM ++FC95 ; mapped ; 064A 0649 # 1.1 ARABIC LIGATURE YEH WITH ALEF MAKSURA FINAL FORM ++FC96 ; mapped ; 064A 064A # 1.1 ARABIC LIGATURE YEH WITH YEH FINAL FORM ++FC97 ; mapped ; 0626 062C # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH JEEM INITIAL FORM ++FC98 ; mapped ; 0626 062D # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH HAH INITIAL FORM ++FC99 ; mapped ; 0626 062E # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH KHAH INITIAL FORM ++FC9A ; mapped ; 0626 0645 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH MEEM INITIAL FORM ++FC9B ; mapped ; 0626 0647 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH HEH INITIAL FORM ++FC9C ; mapped ; 0628 062C # 1.1 ARABIC LIGATURE BEH WITH JEEM INITIAL FORM ++FC9D ; mapped ; 0628 062D # 1.1 ARABIC LIGATURE BEH WITH HAH INITIAL FORM ++FC9E ; mapped ; 0628 062E # 1.1 ARABIC LIGATURE BEH WITH KHAH INITIAL FORM ++FC9F ; mapped ; 0628 0645 # 1.1 ARABIC LIGATURE BEH WITH MEEM INITIAL FORM ++FCA0 ; mapped ; 0628 0647 # 1.1 ARABIC LIGATURE BEH WITH HEH INITIAL FORM ++FCA1 ; mapped ; 062A 062C # 1.1 ARABIC LIGATURE TEH WITH JEEM INITIAL FORM ++FCA2 ; mapped ; 062A 062D # 1.1 ARABIC LIGATURE TEH WITH HAH INITIAL FORM ++FCA3 ; mapped ; 062A 062E # 1.1 ARABIC LIGATURE TEH WITH KHAH INITIAL FORM ++FCA4 ; mapped ; 062A 0645 # 1.1 ARABIC LIGATURE TEH WITH MEEM INITIAL FORM ++FCA5 ; mapped ; 062A 0647 # 1.1 ARABIC LIGATURE TEH WITH HEH INITIAL FORM ++FCA6 ; mapped ; 062B 0645 # 1.1 ARABIC LIGATURE THEH WITH MEEM INITIAL FORM ++FCA7 ; mapped ; 062C 062D # 1.1 ARABIC LIGATURE JEEM WITH HAH INITIAL FORM ++FCA8 ; mapped ; 062C 0645 # 1.1 ARABIC LIGATURE JEEM WITH MEEM INITIAL FORM ++FCA9 ; mapped ; 062D 062C # 1.1 ARABIC LIGATURE HAH WITH JEEM INITIAL FORM ++FCAA ; mapped ; 062D 0645 # 1.1 ARABIC LIGATURE HAH WITH MEEM INITIAL FORM ++FCAB ; mapped ; 062E 062C # 1.1 ARABIC LIGATURE KHAH WITH JEEM INITIAL FORM ++FCAC ; mapped ; 062E 0645 # 1.1 ARABIC LIGATURE KHAH WITH MEEM INITIAL FORM ++FCAD ; mapped ; 0633 062C # 1.1 ARABIC LIGATURE SEEN WITH JEEM INITIAL FORM ++FCAE ; mapped ; 0633 062D # 1.1 ARABIC LIGATURE SEEN WITH HAH INITIAL FORM ++FCAF ; mapped ; 0633 062E # 1.1 ARABIC LIGATURE SEEN WITH KHAH INITIAL FORM ++FCB0 ; mapped ; 0633 0645 # 1.1 ARABIC LIGATURE SEEN WITH MEEM INITIAL FORM ++FCB1 ; mapped ; 0635 062D # 1.1 ARABIC LIGATURE SAD WITH HAH INITIAL FORM ++FCB2 ; mapped ; 0635 062E # 1.1 ARABIC LIGATURE SAD WITH KHAH INITIAL FORM ++FCB3 ; mapped ; 0635 0645 # 1.1 ARABIC LIGATURE SAD WITH MEEM INITIAL FORM ++FCB4 ; mapped ; 0636 062C # 1.1 ARABIC LIGATURE DAD WITH JEEM INITIAL FORM ++FCB5 ; mapped ; 0636 062D # 1.1 ARABIC LIGATURE DAD WITH HAH INITIAL FORM ++FCB6 ; mapped ; 0636 062E # 1.1 ARABIC LIGATURE DAD WITH KHAH INITIAL FORM ++FCB7 ; mapped ; 0636 0645 # 1.1 ARABIC LIGATURE DAD WITH MEEM INITIAL FORM ++FCB8 ; mapped ; 0637 062D # 1.1 ARABIC LIGATURE TAH WITH HAH INITIAL FORM ++FCB9 ; mapped ; 0638 0645 # 1.1 ARABIC LIGATURE ZAH WITH MEEM INITIAL FORM ++FCBA ; mapped ; 0639 062C # 1.1 ARABIC LIGATURE AIN WITH JEEM INITIAL FORM ++FCBB ; mapped ; 0639 0645 # 1.1 ARABIC LIGATURE AIN WITH MEEM INITIAL FORM ++FCBC ; mapped ; 063A 062C # 1.1 ARABIC LIGATURE GHAIN WITH JEEM INITIAL FORM ++FCBD ; mapped ; 063A 0645 # 1.1 ARABIC LIGATURE GHAIN WITH MEEM INITIAL FORM ++FCBE ; mapped ; 0641 062C # 1.1 ARABIC LIGATURE FEH WITH JEEM INITIAL FORM ++FCBF ; mapped ; 0641 062D # 1.1 ARABIC LIGATURE FEH WITH HAH INITIAL FORM ++FCC0 ; mapped ; 0641 062E # 1.1 ARABIC LIGATURE FEH WITH KHAH INITIAL FORM ++FCC1 ; mapped ; 0641 0645 # 1.1 ARABIC LIGATURE FEH WITH MEEM INITIAL FORM ++FCC2 ; mapped ; 0642 062D # 1.1 ARABIC LIGATURE QAF WITH HAH INITIAL FORM ++FCC3 ; mapped ; 0642 0645 # 1.1 ARABIC LIGATURE QAF WITH MEEM INITIAL FORM ++FCC4 ; mapped ; 0643 062C # 1.1 ARABIC LIGATURE KAF WITH JEEM INITIAL FORM ++FCC5 ; mapped ; 0643 062D # 1.1 ARABIC LIGATURE KAF WITH HAH INITIAL FORM ++FCC6 ; mapped ; 0643 062E # 1.1 ARABIC LIGATURE KAF WITH KHAH INITIAL FORM ++FCC7 ; mapped ; 0643 0644 # 1.1 ARABIC LIGATURE KAF WITH LAM INITIAL FORM ++FCC8 ; mapped ; 0643 0645 # 1.1 ARABIC LIGATURE KAF WITH MEEM INITIAL FORM ++FCC9 ; mapped ; 0644 062C # 1.1 ARABIC LIGATURE LAM WITH JEEM INITIAL FORM ++FCCA ; mapped ; 0644 062D # 1.1 ARABIC LIGATURE LAM WITH HAH INITIAL FORM ++FCCB ; mapped ; 0644 062E # 1.1 ARABIC LIGATURE LAM WITH KHAH INITIAL FORM ++FCCC ; mapped ; 0644 0645 # 1.1 ARABIC LIGATURE LAM WITH MEEM INITIAL FORM ++FCCD ; mapped ; 0644 0647 # 1.1 ARABIC LIGATURE LAM WITH HEH INITIAL FORM ++FCCE ; mapped ; 0645 062C # 1.1 ARABIC LIGATURE MEEM WITH JEEM INITIAL FORM ++FCCF ; mapped ; 0645 062D # 1.1 ARABIC LIGATURE MEEM WITH HAH INITIAL FORM ++FCD0 ; mapped ; 0645 062E # 1.1 ARABIC LIGATURE MEEM WITH KHAH INITIAL FORM ++FCD1 ; mapped ; 0645 0645 # 1.1 ARABIC LIGATURE MEEM WITH MEEM INITIAL FORM ++FCD2 ; mapped ; 0646 062C # 1.1 ARABIC LIGATURE NOON WITH JEEM INITIAL FORM ++FCD3 ; mapped ; 0646 062D # 1.1 ARABIC LIGATURE NOON WITH HAH INITIAL FORM ++FCD4 ; mapped ; 0646 062E # 1.1 ARABIC LIGATURE NOON WITH KHAH INITIAL FORM ++FCD5 ; mapped ; 0646 0645 # 1.1 ARABIC LIGATURE NOON WITH MEEM INITIAL FORM ++FCD6 ; mapped ; 0646 0647 # 1.1 ARABIC LIGATURE NOON WITH HEH INITIAL FORM ++FCD7 ; mapped ; 0647 062C # 1.1 ARABIC LIGATURE HEH WITH JEEM INITIAL FORM ++FCD8 ; mapped ; 0647 0645 # 1.1 ARABIC LIGATURE HEH WITH MEEM INITIAL FORM ++FCD9 ; mapped ; 0647 0670 # 1.1 ARABIC LIGATURE HEH WITH SUPERSCRIPT ALEF INITIAL FORM ++FCDA ; mapped ; 064A 062C # 1.1 ARABIC LIGATURE YEH WITH JEEM INITIAL FORM ++FCDB ; mapped ; 064A 062D # 1.1 ARABIC LIGATURE YEH WITH HAH INITIAL FORM ++FCDC ; mapped ; 064A 062E # 1.1 ARABIC LIGATURE YEH WITH KHAH INITIAL FORM ++FCDD ; mapped ; 064A 0645 # 1.1 ARABIC LIGATURE YEH WITH MEEM INITIAL FORM ++FCDE ; mapped ; 064A 0647 # 1.1 ARABIC LIGATURE YEH WITH HEH INITIAL FORM ++FCDF ; mapped ; 0626 0645 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH MEEM MEDIAL FORM ++FCE0 ; mapped ; 0626 0647 # 1.1 ARABIC LIGATURE YEH WITH HAMZA ABOVE WITH HEH MEDIAL FORM ++FCE1 ; mapped ; 0628 0645 # 1.1 ARABIC LIGATURE BEH WITH MEEM MEDIAL FORM ++FCE2 ; mapped ; 0628 0647 # 1.1 ARABIC LIGATURE BEH WITH HEH MEDIAL FORM ++FCE3 ; mapped ; 062A 0645 # 1.1 ARABIC LIGATURE TEH WITH MEEM MEDIAL FORM ++FCE4 ; mapped ; 062A 0647 # 1.1 ARABIC LIGATURE TEH WITH HEH MEDIAL FORM ++FCE5 ; mapped ; 062B 0645 # 1.1 ARABIC LIGATURE THEH WITH MEEM MEDIAL FORM ++FCE6 ; mapped ; 062B 0647 # 1.1 ARABIC LIGATURE THEH WITH HEH MEDIAL FORM ++FCE7 ; mapped ; 0633 0645 # 1.1 ARABIC LIGATURE SEEN WITH MEEM MEDIAL FORM ++FCE8 ; mapped ; 0633 0647 # 1.1 ARABIC LIGATURE SEEN WITH HEH MEDIAL FORM ++FCE9 ; mapped ; 0634 0645 # 1.1 ARABIC LIGATURE SHEEN WITH MEEM MEDIAL FORM ++FCEA ; mapped ; 0634 0647 # 1.1 ARABIC LIGATURE SHEEN WITH HEH MEDIAL FORM ++FCEB ; mapped ; 0643 0644 # 1.1 ARABIC LIGATURE KAF WITH LAM MEDIAL FORM ++FCEC ; mapped ; 0643 0645 # 1.1 ARABIC LIGATURE KAF WITH MEEM MEDIAL FORM ++FCED ; mapped ; 0644 0645 # 1.1 ARABIC LIGATURE LAM WITH MEEM MEDIAL FORM ++FCEE ; mapped ; 0646 0645 # 1.1 ARABIC LIGATURE NOON WITH MEEM MEDIAL FORM ++FCEF ; mapped ; 0646 0647 # 1.1 ARABIC LIGATURE NOON WITH HEH MEDIAL FORM ++FCF0 ; mapped ; 064A 0645 # 1.1 ARABIC LIGATURE YEH WITH MEEM MEDIAL FORM ++FCF1 ; mapped ; 064A 0647 # 1.1 ARABIC LIGATURE YEH WITH HEH MEDIAL FORM ++FCF2 ; mapped ; 0640 064E 0651 #1.1 ARABIC LIGATURE SHADDA WITH FATHA MEDIAL FORM ++FCF3 ; mapped ; 0640 064F 0651 #1.1 ARABIC LIGATURE SHADDA WITH DAMMA MEDIAL FORM ++FCF4 ; mapped ; 0640 0650 0651 #1.1 ARABIC LIGATURE SHADDA WITH KASRA MEDIAL FORM ++FCF5 ; mapped ; 0637 0649 # 1.1 ARABIC LIGATURE TAH WITH ALEF MAKSURA ISOLATED FORM ++FCF6 ; mapped ; 0637 064A # 1.1 ARABIC LIGATURE TAH WITH YEH ISOLATED FORM ++FCF7 ; mapped ; 0639 0649 # 1.1 ARABIC LIGATURE AIN WITH ALEF MAKSURA ISOLATED FORM ++FCF8 ; mapped ; 0639 064A # 1.1 ARABIC LIGATURE AIN WITH YEH ISOLATED FORM ++FCF9 ; mapped ; 063A 0649 # 1.1 ARABIC LIGATURE GHAIN WITH ALEF MAKSURA ISOLATED FORM ++FCFA ; mapped ; 063A 064A # 1.1 ARABIC LIGATURE GHAIN WITH YEH ISOLATED FORM ++FCFB ; mapped ; 0633 0649 # 1.1 ARABIC LIGATURE SEEN WITH ALEF MAKSURA ISOLATED FORM ++FCFC ; mapped ; 0633 064A # 1.1 ARABIC LIGATURE SEEN WITH YEH ISOLATED FORM ++FCFD ; mapped ; 0634 0649 # 1.1 ARABIC LIGATURE SHEEN WITH ALEF MAKSURA ISOLATED FORM ++FCFE ; mapped ; 0634 064A # 1.1 ARABIC LIGATURE SHEEN WITH YEH ISOLATED FORM ++FCFF ; mapped ; 062D 0649 # 1.1 ARABIC LIGATURE HAH WITH ALEF MAKSURA ISOLATED FORM ++FD00 ; mapped ; 062D 064A # 1.1 ARABIC LIGATURE HAH WITH YEH ISOLATED FORM ++FD01 ; mapped ; 062C 0649 # 1.1 ARABIC LIGATURE JEEM WITH ALEF MAKSURA ISOLATED FORM ++FD02 ; mapped ; 062C 064A # 1.1 ARABIC LIGATURE JEEM WITH YEH ISOLATED FORM ++FD03 ; mapped ; 062E 0649 # 1.1 ARABIC LIGATURE KHAH WITH ALEF MAKSURA ISOLATED FORM ++FD04 ; mapped ; 062E 064A # 1.1 ARABIC LIGATURE KHAH WITH YEH ISOLATED FORM ++FD05 ; mapped ; 0635 0649 # 1.1 ARABIC LIGATURE SAD WITH ALEF MAKSURA ISOLATED FORM ++FD06 ; mapped ; 0635 064A # 1.1 ARABIC LIGATURE SAD WITH YEH ISOLATED FORM ++FD07 ; mapped ; 0636 0649 # 1.1 ARABIC LIGATURE DAD WITH ALEF MAKSURA ISOLATED FORM ++FD08 ; mapped ; 0636 064A # 1.1 ARABIC LIGATURE DAD WITH YEH ISOLATED FORM ++FD09 ; mapped ; 0634 062C # 1.1 ARABIC LIGATURE SHEEN WITH JEEM ISOLATED FORM ++FD0A ; mapped ; 0634 062D # 1.1 ARABIC LIGATURE SHEEN WITH HAH ISOLATED FORM ++FD0B ; mapped ; 0634 062E # 1.1 ARABIC LIGATURE SHEEN WITH KHAH ISOLATED FORM ++FD0C ; mapped ; 0634 0645 # 1.1 ARABIC LIGATURE SHEEN WITH MEEM ISOLATED FORM ++FD0D ; mapped ; 0634 0631 # 1.1 ARABIC LIGATURE SHEEN WITH REH ISOLATED FORM ++FD0E ; mapped ; 0633 0631 # 1.1 ARABIC LIGATURE SEEN WITH REH ISOLATED FORM ++FD0F ; mapped ; 0635 0631 # 1.1 ARABIC LIGATURE SAD WITH REH ISOLATED FORM ++FD10 ; mapped ; 0636 0631 # 1.1 ARABIC LIGATURE DAD WITH REH ISOLATED FORM ++FD11 ; mapped ; 0637 0649 # 1.1 ARABIC LIGATURE TAH WITH ALEF MAKSURA FINAL FORM ++FD12 ; mapped ; 0637 064A # 1.1 ARABIC LIGATURE TAH WITH YEH FINAL FORM ++FD13 ; mapped ; 0639 0649 # 1.1 ARABIC LIGATURE AIN WITH ALEF MAKSURA FINAL FORM ++FD14 ; mapped ; 0639 064A # 1.1 ARABIC LIGATURE AIN WITH YEH FINAL FORM ++FD15 ; mapped ; 063A 0649 # 1.1 ARABIC LIGATURE GHAIN WITH ALEF MAKSURA FINAL FORM ++FD16 ; mapped ; 063A 064A # 1.1 ARABIC LIGATURE GHAIN WITH YEH FINAL FORM ++FD17 ; mapped ; 0633 0649 # 1.1 ARABIC LIGATURE SEEN WITH ALEF MAKSURA FINAL FORM ++FD18 ; mapped ; 0633 064A # 1.1 ARABIC LIGATURE SEEN WITH YEH FINAL FORM ++FD19 ; mapped ; 0634 0649 # 1.1 ARABIC LIGATURE SHEEN WITH ALEF MAKSURA FINAL FORM ++FD1A ; mapped ; 0634 064A # 1.1 ARABIC LIGATURE SHEEN WITH YEH FINAL FORM ++FD1B ; mapped ; 062D 0649 # 1.1 ARABIC LIGATURE HAH WITH ALEF MAKSURA FINAL FORM ++FD1C ; mapped ; 062D 064A # 1.1 ARABIC LIGATURE HAH WITH YEH FINAL FORM ++FD1D ; mapped ; 062C 0649 # 1.1 ARABIC LIGATURE JEEM WITH ALEF MAKSURA FINAL FORM ++FD1E ; mapped ; 062C 064A # 1.1 ARABIC LIGATURE JEEM WITH YEH FINAL FORM ++FD1F ; mapped ; 062E 0649 # 1.1 ARABIC LIGATURE KHAH WITH ALEF MAKSURA FINAL FORM ++FD20 ; mapped ; 062E 064A # 1.1 ARABIC LIGATURE KHAH WITH YEH FINAL FORM ++FD21 ; mapped ; 0635 0649 # 1.1 ARABIC LIGATURE SAD WITH ALEF MAKSURA FINAL FORM ++FD22 ; mapped ; 0635 064A # 1.1 ARABIC LIGATURE SAD WITH YEH FINAL FORM ++FD23 ; mapped ; 0636 0649 # 1.1 ARABIC LIGATURE DAD WITH ALEF MAKSURA FINAL FORM ++FD24 ; mapped ; 0636 064A # 1.1 ARABIC LIGATURE DAD WITH YEH FINAL FORM ++FD25 ; mapped ; 0634 062C # 1.1 ARABIC LIGATURE SHEEN WITH JEEM FINAL FORM ++FD26 ; mapped ; 0634 062D # 1.1 ARABIC LIGATURE SHEEN WITH HAH FINAL FORM ++FD27 ; mapped ; 0634 062E # 1.1 ARABIC LIGATURE SHEEN WITH KHAH FINAL FORM ++FD28 ; mapped ; 0634 0645 # 1.1 ARABIC LIGATURE SHEEN WITH MEEM FINAL FORM ++FD29 ; mapped ; 0634 0631 # 1.1 ARABIC LIGATURE SHEEN WITH REH FINAL FORM ++FD2A ; mapped ; 0633 0631 # 1.1 ARABIC LIGATURE SEEN WITH REH FINAL FORM ++FD2B ; mapped ; 0635 0631 # 1.1 ARABIC LIGATURE SAD WITH REH FINAL FORM ++FD2C ; mapped ; 0636 0631 # 1.1 ARABIC LIGATURE DAD WITH REH FINAL FORM ++FD2D ; mapped ; 0634 062C # 1.1 ARABIC LIGATURE SHEEN WITH JEEM INITIAL FORM ++FD2E ; mapped ; 0634 062D # 1.1 ARABIC LIGATURE SHEEN WITH HAH INITIAL FORM ++FD2F ; mapped ; 0634 062E # 1.1 ARABIC LIGATURE SHEEN WITH KHAH INITIAL FORM ++FD30 ; mapped ; 0634 0645 # 1.1 ARABIC LIGATURE SHEEN WITH MEEM INITIAL FORM ++FD31 ; mapped ; 0633 0647 # 1.1 ARABIC LIGATURE SEEN WITH HEH INITIAL FORM ++FD32 ; mapped ; 0634 0647 # 1.1 ARABIC LIGATURE SHEEN WITH HEH INITIAL FORM ++FD33 ; mapped ; 0637 0645 # 1.1 ARABIC LIGATURE TAH WITH MEEM INITIAL FORM ++FD34 ; mapped ; 0633 062C # 1.1 ARABIC LIGATURE SEEN WITH JEEM MEDIAL FORM ++FD35 ; mapped ; 0633 062D # 1.1 ARABIC LIGATURE SEEN WITH HAH MEDIAL FORM ++FD36 ; mapped ; 0633 062E # 1.1 ARABIC LIGATURE SEEN WITH KHAH MEDIAL FORM ++FD37 ; mapped ; 0634 062C # 1.1 ARABIC LIGATURE SHEEN WITH JEEM MEDIAL FORM ++FD38 ; mapped ; 0634 062D # 1.1 ARABIC LIGATURE SHEEN WITH HAH MEDIAL FORM ++FD39 ; mapped ; 0634 062E # 1.1 ARABIC LIGATURE SHEEN WITH KHAH MEDIAL FORM ++FD3A ; mapped ; 0637 0645 # 1.1 ARABIC LIGATURE TAH WITH MEEM MEDIAL FORM ++FD3B ; mapped ; 0638 0645 # 1.1 ARABIC LIGATURE ZAH WITH MEEM MEDIAL FORM ++FD3C..FD3D ; mapped ; 0627 064B # 1.1 ARABIC LIGATURE ALEF WITH FATHATAN FINAL FORM..ARABIC LIGATURE ALEF WITH FATHATAN ISOLATED FORM ++FD3E..FD3F ; valid ; ; NV8 # 1.1 ORNATE LEFT PARENTHESIS..ORNATE RIGHT PARENTHESIS ++FD40..FD4F ; disallowed # NA .. ++FD50 ; mapped ; 062A 062C 0645 #1.1 ARABIC LIGATURE TEH WITH JEEM WITH MEEM INITIAL FORM ++FD51..FD52 ; mapped ; 062A 062D 062C #1.1 ARABIC LIGATURE TEH WITH HAH WITH JEEM FINAL FORM..ARABIC LIGATURE TEH WITH HAH WITH JEEM INITIAL FORM ++FD53 ; mapped ; 062A 062D 0645 #1.1 ARABIC LIGATURE TEH WITH HAH WITH MEEM INITIAL FORM ++FD54 ; mapped ; 062A 062E 0645 #1.1 ARABIC LIGATURE TEH WITH KHAH WITH MEEM INITIAL FORM ++FD55 ; mapped ; 062A 0645 062C #1.1 ARABIC LIGATURE TEH WITH MEEM WITH JEEM INITIAL FORM ++FD56 ; mapped ; 062A 0645 062D #1.1 ARABIC LIGATURE TEH WITH MEEM WITH HAH INITIAL FORM ++FD57 ; mapped ; 062A 0645 062E #1.1 ARABIC LIGATURE TEH WITH MEEM WITH KHAH INITIAL FORM ++FD58..FD59 ; mapped ; 062C 0645 062D #1.1 ARABIC LIGATURE JEEM WITH MEEM WITH HAH FINAL FORM..ARABIC LIGATURE JEEM WITH MEEM WITH HAH INITIAL FORM ++FD5A ; mapped ; 062D 0645 064A #1.1 ARABIC LIGATURE HAH WITH MEEM WITH YEH FINAL FORM ++FD5B ; mapped ; 062D 0645 0649 #1.1 ARABIC LIGATURE HAH WITH MEEM WITH ALEF MAKSURA FINAL FORM ++FD5C ; mapped ; 0633 062D 062C #1.1 ARABIC LIGATURE SEEN WITH HAH WITH JEEM INITIAL FORM ++FD5D ; mapped ; 0633 062C 062D #1.1 ARABIC LIGATURE SEEN WITH JEEM WITH HAH INITIAL FORM ++FD5E ; mapped ; 0633 062C 0649 #1.1 ARABIC LIGATURE SEEN WITH JEEM WITH ALEF MAKSURA FINAL FORM ++FD5F..FD60 ; mapped ; 0633 0645 062D #1.1 ARABIC LIGATURE SEEN WITH MEEM WITH HAH FINAL FORM..ARABIC LIGATURE SEEN WITH MEEM WITH HAH INITIAL FORM ++FD61 ; mapped ; 0633 0645 062C #1.1 ARABIC LIGATURE SEEN WITH MEEM WITH JEEM INITIAL FORM ++FD62..FD63 ; mapped ; 0633 0645 0645 #1.1 ARABIC LIGATURE SEEN WITH MEEM WITH MEEM FINAL FORM..ARABIC LIGATURE SEEN WITH MEEM WITH MEEM INITIAL FORM ++FD64..FD65 ; mapped ; 0635 062D 062D #1.1 ARABIC LIGATURE SAD WITH HAH WITH HAH FINAL FORM..ARABIC LIGATURE SAD WITH HAH WITH HAH INITIAL FORM ++FD66 ; mapped ; 0635 0645 0645 #1.1 ARABIC LIGATURE SAD WITH MEEM WITH MEEM FINAL FORM ++FD67..FD68 ; mapped ; 0634 062D 0645 #1.1 ARABIC LIGATURE SHEEN WITH HAH WITH MEEM FINAL FORM..ARABIC LIGATURE SHEEN WITH HAH WITH MEEM INITIAL FORM ++FD69 ; mapped ; 0634 062C 064A #1.1 ARABIC LIGATURE SHEEN WITH JEEM WITH YEH FINAL FORM ++FD6A..FD6B ; mapped ; 0634 0645 062E #1.1 ARABIC LIGATURE SHEEN WITH MEEM WITH KHAH FINAL FORM..ARABIC LIGATURE SHEEN WITH MEEM WITH KHAH INITIAL FORM ++FD6C..FD6D ; mapped ; 0634 0645 0645 #1.1 ARABIC LIGATURE SHEEN WITH MEEM WITH MEEM FINAL FORM..ARABIC LIGATURE SHEEN WITH MEEM WITH MEEM INITIAL FORM ++FD6E ; mapped ; 0636 062D 0649 #1.1 ARABIC LIGATURE DAD WITH HAH WITH ALEF MAKSURA FINAL FORM ++FD6F..FD70 ; mapped ; 0636 062E 0645 #1.1 ARABIC LIGATURE DAD WITH KHAH WITH MEEM FINAL FORM..ARABIC LIGATURE DAD WITH KHAH WITH MEEM INITIAL FORM ++FD71..FD72 ; mapped ; 0637 0645 062D #1.1 ARABIC LIGATURE TAH WITH MEEM WITH HAH FINAL FORM..ARABIC LIGATURE TAH WITH MEEM WITH HAH INITIAL FORM ++FD73 ; mapped ; 0637 0645 0645 #1.1 ARABIC LIGATURE TAH WITH MEEM WITH MEEM INITIAL FORM ++FD74 ; mapped ; 0637 0645 064A #1.1 ARABIC LIGATURE TAH WITH MEEM WITH YEH FINAL FORM ++FD75 ; mapped ; 0639 062C 0645 #1.1 ARABIC LIGATURE AIN WITH JEEM WITH MEEM FINAL FORM ++FD76..FD77 ; mapped ; 0639 0645 0645 #1.1 ARABIC LIGATURE AIN WITH MEEM WITH MEEM FINAL FORM..ARABIC LIGATURE AIN WITH MEEM WITH MEEM INITIAL FORM ++FD78 ; mapped ; 0639 0645 0649 #1.1 ARABIC LIGATURE AIN WITH MEEM WITH ALEF MAKSURA FINAL FORM ++FD79 ; mapped ; 063A 0645 0645 #1.1 ARABIC LIGATURE GHAIN WITH MEEM WITH MEEM FINAL FORM ++FD7A ; mapped ; 063A 0645 064A #1.1 ARABIC LIGATURE GHAIN WITH MEEM WITH YEH FINAL FORM ++FD7B ; mapped ; 063A 0645 0649 #1.1 ARABIC LIGATURE GHAIN WITH MEEM WITH ALEF MAKSURA FINAL FORM ++FD7C..FD7D ; mapped ; 0641 062E 0645 #1.1 ARABIC LIGATURE FEH WITH KHAH WITH MEEM FINAL FORM..ARABIC LIGATURE FEH WITH KHAH WITH MEEM INITIAL FORM ++FD7E ; mapped ; 0642 0645 062D #1.1 ARABIC LIGATURE QAF WITH MEEM WITH HAH FINAL FORM ++FD7F ; mapped ; 0642 0645 0645 #1.1 ARABIC LIGATURE QAF WITH MEEM WITH MEEM FINAL FORM ++FD80 ; mapped ; 0644 062D 0645 #1.1 ARABIC LIGATURE LAM WITH HAH WITH MEEM FINAL FORM ++FD81 ; mapped ; 0644 062D 064A #1.1 ARABIC LIGATURE LAM WITH HAH WITH YEH FINAL FORM ++FD82 ; mapped ; 0644 062D 0649 #1.1 ARABIC LIGATURE LAM WITH HAH WITH ALEF MAKSURA FINAL FORM ++FD83..FD84 ; mapped ; 0644 062C 062C #1.1 ARABIC LIGATURE LAM WITH JEEM WITH JEEM INITIAL FORM..ARABIC LIGATURE LAM WITH JEEM WITH JEEM FINAL FORM ++FD85..FD86 ; mapped ; 0644 062E 0645 #1.1 ARABIC LIGATURE LAM WITH KHAH WITH MEEM FINAL FORM..ARABIC LIGATURE LAM WITH KHAH WITH MEEM INITIAL FORM ++FD87..FD88 ; mapped ; 0644 0645 062D #1.1 ARABIC LIGATURE LAM WITH MEEM WITH HAH FINAL FORM..ARABIC LIGATURE LAM WITH MEEM WITH HAH INITIAL FORM ++FD89 ; mapped ; 0645 062D 062C #1.1 ARABIC LIGATURE MEEM WITH HAH WITH JEEM INITIAL FORM ++FD8A ; mapped ; 0645 062D 0645 #1.1 ARABIC LIGATURE MEEM WITH HAH WITH MEEM INITIAL FORM ++FD8B ; mapped ; 0645 062D 064A #1.1 ARABIC LIGATURE MEEM WITH HAH WITH YEH FINAL FORM ++FD8C ; mapped ; 0645 062C 062D #1.1 ARABIC LIGATURE MEEM WITH JEEM WITH HAH INITIAL FORM ++FD8D ; mapped ; 0645 062C 0645 #1.1 ARABIC LIGATURE MEEM WITH JEEM WITH MEEM INITIAL FORM ++FD8E ; mapped ; 0645 062E 062C #1.1 ARABIC LIGATURE MEEM WITH KHAH WITH JEEM INITIAL FORM ++FD8F ; mapped ; 0645 062E 0645 #1.1 ARABIC LIGATURE MEEM WITH KHAH WITH MEEM INITIAL FORM ++FD90..FD91 ; disallowed # NA .. ++FD92 ; mapped ; 0645 062C 062E #1.1 ARABIC LIGATURE MEEM WITH JEEM WITH KHAH INITIAL FORM ++FD93 ; mapped ; 0647 0645 062C #1.1 ARABIC LIGATURE HEH WITH MEEM WITH JEEM INITIAL FORM ++FD94 ; mapped ; 0647 0645 0645 #1.1 ARABIC LIGATURE HEH WITH MEEM WITH MEEM INITIAL FORM ++FD95 ; mapped ; 0646 062D 0645 #1.1 ARABIC LIGATURE NOON WITH HAH WITH MEEM INITIAL FORM ++FD96 ; mapped ; 0646 062D 0649 #1.1 ARABIC LIGATURE NOON WITH HAH WITH ALEF MAKSURA FINAL FORM ++FD97..FD98 ; mapped ; 0646 062C 0645 #1.1 ARABIC LIGATURE NOON WITH JEEM WITH MEEM FINAL FORM..ARABIC LIGATURE NOON WITH JEEM WITH MEEM INITIAL FORM ++FD99 ; mapped ; 0646 062C 0649 #1.1 ARABIC LIGATURE NOON WITH JEEM WITH ALEF MAKSURA FINAL FORM ++FD9A ; mapped ; 0646 0645 064A #1.1 ARABIC LIGATURE NOON WITH MEEM WITH YEH FINAL FORM ++FD9B ; mapped ; 0646 0645 0649 #1.1 ARABIC LIGATURE NOON WITH MEEM WITH ALEF MAKSURA FINAL FORM ++FD9C..FD9D ; mapped ; 064A 0645 0645 #1.1 ARABIC LIGATURE YEH WITH MEEM WITH MEEM FINAL FORM..ARABIC LIGATURE YEH WITH MEEM WITH MEEM INITIAL FORM ++FD9E ; mapped ; 0628 062E 064A #1.1 ARABIC LIGATURE BEH WITH KHAH WITH YEH FINAL FORM ++FD9F ; mapped ; 062A 062C 064A #1.1 ARABIC LIGATURE TEH WITH JEEM WITH YEH FINAL FORM ++FDA0 ; mapped ; 062A 062C 0649 #1.1 ARABIC LIGATURE TEH WITH JEEM WITH ALEF MAKSURA FINAL FORM ++FDA1 ; mapped ; 062A 062E 064A #1.1 ARABIC LIGATURE TEH WITH KHAH WITH YEH FINAL FORM ++FDA2 ; mapped ; 062A 062E 0649 #1.1 ARABIC LIGATURE TEH WITH KHAH WITH ALEF MAKSURA FINAL FORM ++FDA3 ; mapped ; 062A 0645 064A #1.1 ARABIC LIGATURE TEH WITH MEEM WITH YEH FINAL FORM ++FDA4 ; mapped ; 062A 0645 0649 #1.1 ARABIC LIGATURE TEH WITH MEEM WITH ALEF MAKSURA FINAL FORM ++FDA5 ; mapped ; 062C 0645 064A #1.1 ARABIC LIGATURE JEEM WITH MEEM WITH YEH FINAL FORM ++FDA6 ; mapped ; 062C 062D 0649 #1.1 ARABIC LIGATURE JEEM WITH HAH WITH ALEF MAKSURA FINAL FORM ++FDA7 ; mapped ; 062C 0645 0649 #1.1 ARABIC LIGATURE JEEM WITH MEEM WITH ALEF MAKSURA FINAL FORM ++FDA8 ; mapped ; 0633 062E 0649 #1.1 ARABIC LIGATURE SEEN WITH KHAH WITH ALEF MAKSURA FINAL FORM ++FDA9 ; mapped ; 0635 062D 064A #1.1 ARABIC LIGATURE SAD WITH HAH WITH YEH FINAL FORM ++FDAA ; mapped ; 0634 062D 064A #1.1 ARABIC LIGATURE SHEEN WITH HAH WITH YEH FINAL FORM ++FDAB ; mapped ; 0636 062D 064A #1.1 ARABIC LIGATURE DAD WITH HAH WITH YEH FINAL FORM ++FDAC ; mapped ; 0644 062C 064A #1.1 ARABIC LIGATURE LAM WITH JEEM WITH YEH FINAL FORM ++FDAD ; mapped ; 0644 0645 064A #1.1 ARABIC LIGATURE LAM WITH MEEM WITH YEH FINAL FORM ++FDAE ; mapped ; 064A 062D 064A #1.1 ARABIC LIGATURE YEH WITH HAH WITH YEH FINAL FORM ++FDAF ; mapped ; 064A 062C 064A #1.1 ARABIC LIGATURE YEH WITH JEEM WITH YEH FINAL FORM ++FDB0 ; mapped ; 064A 0645 064A #1.1 ARABIC LIGATURE YEH WITH MEEM WITH YEH FINAL FORM ++FDB1 ; mapped ; 0645 0645 064A #1.1 ARABIC LIGATURE MEEM WITH MEEM WITH YEH FINAL FORM ++FDB2 ; mapped ; 0642 0645 064A #1.1 ARABIC LIGATURE QAF WITH MEEM WITH YEH FINAL FORM ++FDB3 ; mapped ; 0646 062D 064A #1.1 ARABIC LIGATURE NOON WITH HAH WITH YEH FINAL FORM ++FDB4 ; mapped ; 0642 0645 062D #1.1 ARABIC LIGATURE QAF WITH MEEM WITH HAH INITIAL FORM ++FDB5 ; mapped ; 0644 062D 0645 #1.1 ARABIC LIGATURE LAM WITH HAH WITH MEEM INITIAL FORM ++FDB6 ; mapped ; 0639 0645 064A #1.1 ARABIC LIGATURE AIN WITH MEEM WITH YEH FINAL FORM ++FDB7 ; mapped ; 0643 0645 064A #1.1 ARABIC LIGATURE KAF WITH MEEM WITH YEH FINAL FORM ++FDB8 ; mapped ; 0646 062C 062D #1.1 ARABIC LIGATURE NOON WITH JEEM WITH HAH INITIAL FORM ++FDB9 ; mapped ; 0645 062E 064A #1.1 ARABIC LIGATURE MEEM WITH KHAH WITH YEH FINAL FORM ++FDBA ; mapped ; 0644 062C 0645 #1.1 ARABIC LIGATURE LAM WITH JEEM WITH MEEM INITIAL FORM ++FDBB ; mapped ; 0643 0645 0645 #1.1 ARABIC LIGATURE KAF WITH MEEM WITH MEEM FINAL FORM ++FDBC ; mapped ; 0644 062C 0645 #1.1 ARABIC LIGATURE LAM WITH JEEM WITH MEEM FINAL FORM ++FDBD ; mapped ; 0646 062C 062D #1.1 ARABIC LIGATURE NOON WITH JEEM WITH HAH FINAL FORM ++FDBE ; mapped ; 062C 062D 064A #1.1 ARABIC LIGATURE JEEM WITH HAH WITH YEH FINAL FORM ++FDBF ; mapped ; 062D 062C 064A #1.1 ARABIC LIGATURE HAH WITH JEEM WITH YEH FINAL FORM ++FDC0 ; mapped ; 0645 062C 064A #1.1 ARABIC LIGATURE MEEM WITH JEEM WITH YEH FINAL FORM ++FDC1 ; mapped ; 0641 0645 064A #1.1 ARABIC LIGATURE FEH WITH MEEM WITH YEH FINAL FORM ++FDC2 ; mapped ; 0628 062D 064A #1.1 ARABIC LIGATURE BEH WITH HAH WITH YEH FINAL FORM ++FDC3 ; mapped ; 0643 0645 0645 #1.1 ARABIC LIGATURE KAF WITH MEEM WITH MEEM INITIAL FORM ++FDC4 ; mapped ; 0639 062C 0645 #1.1 ARABIC LIGATURE AIN WITH JEEM WITH MEEM INITIAL FORM ++FDC5 ; mapped ; 0635 0645 0645 #1.1 ARABIC LIGATURE SAD WITH MEEM WITH MEEM INITIAL FORM ++FDC6 ; mapped ; 0633 062E 064A #1.1 ARABIC LIGATURE SEEN WITH KHAH WITH YEH FINAL FORM ++FDC7 ; mapped ; 0646 062C 064A #1.1 ARABIC LIGATURE NOON WITH JEEM WITH YEH FINAL FORM ++FDC8..FDCF ; disallowed # NA .. ++FDD0..FDEF ; disallowed # 3.1 .. ++FDF0 ; mapped ; 0635 0644 06D2 #1.1 ARABIC LIGATURE SALLA USED AS KORANIC STOP SIGN ISOLATED FORM ++FDF1 ; mapped ; 0642 0644 06D2 #1.1 ARABIC LIGATURE QALA USED AS KORANIC STOP SIGN ISOLATED FORM ++FDF2 ; mapped ; 0627 0644 0644 0647 #1.1 ARABIC LIGATURE ALLAH ISOLATED FORM ++FDF3 ; mapped ; 0627 0643 0628 0631 #1.1 ARABIC LIGATURE AKBAR ISOLATED FORM ++FDF4 ; mapped ; 0645 062D 0645 062F #1.1 ARABIC LIGATURE MOHAMMAD ISOLATED FORM ++FDF5 ; mapped ; 0635 0644 0639 0645 #1.1 ARABIC LIGATURE SALAM ISOLATED FORM ++FDF6 ; mapped ; 0631 0633 0648 0644 #1.1 ARABIC LIGATURE RASOUL ISOLATED FORM ++FDF7 ; mapped ; 0639 0644 064A 0647 #1.1 ARABIC LIGATURE ALAYHE ISOLATED FORM ++FDF8 ; mapped ; 0648 0633 0644 0645 #1.1 ARABIC LIGATURE WASALLAM ISOLATED FORM ++FDF9 ; mapped ; 0635 0644 0649 #1.1 ARABIC LIGATURE SALLA ISOLATED FORM ++FDFA ; disallowed_STD3_mapped ; 0635 0644 0649 0020 0627 0644 0644 0647 0020 0639 0644 064A 0647 0020 0648 0633 0644 0645 #1.1 ARABIC LIGATURE SALLALLAHOU ALAYHE WASALLAM ++FDFB ; disallowed_STD3_mapped ; 062C 0644 0020 062C 0644 0627 0644 0647 #1.1 ARABIC LIGATURE JALLAJALALOUHOU ++FDFC ; mapped ; 0631 06CC 0627 0644 #3.2 RIAL SIGN ++FDFD ; valid ; ; NV8 # 4.0 ARABIC LIGATURE BISMILLAH AR-RAHMAN AR-RAHEEM ++FDFE..FDFF ; disallowed # NA .. ++FE00..FE0F ; ignored # 3.2 VARIATION SELECTOR-1..VARIATION SELECTOR-16 ++FE10 ; disallowed_STD3_mapped ; 002C # 4.1 PRESENTATION FORM FOR VERTICAL COMMA ++FE11 ; mapped ; 3001 # 4.1 PRESENTATION FORM FOR VERTICAL IDEOGRAPHIC COMMA ++FE12 ; disallowed # 4.1 PRESENTATION FORM FOR VERTICAL IDEOGRAPHIC FULL STOP ++FE13 ; disallowed_STD3_mapped ; 003A # 4.1 PRESENTATION FORM FOR VERTICAL COLON ++FE14 ; disallowed_STD3_mapped ; 003B # 4.1 PRESENTATION FORM FOR VERTICAL SEMICOLON ++FE15 ; disallowed_STD3_mapped ; 0021 # 4.1 PRESENTATION FORM FOR VERTICAL EXCLAMATION MARK ++FE16 ; disallowed_STD3_mapped ; 003F # 4.1 PRESENTATION FORM FOR VERTICAL QUESTION MARK ++FE17 ; mapped ; 3016 # 4.1 PRESENTATION FORM FOR VERTICAL LEFT WHITE LENTICULAR BRACKET ++FE18 ; mapped ; 3017 # 4.1 PRESENTATION FORM FOR VERTICAL RIGHT WHITE LENTICULAR BRAKCET ++FE19 ; disallowed # 4.1 PRESENTATION FORM FOR VERTICAL HORIZONTAL ELLIPSIS ++FE1A..FE1F ; disallowed # NA .. ++FE20..FE23 ; valid # 1.1 COMBINING LIGATURE LEFT HALF..COMBINING DOUBLE TILDE RIGHT HALF ++FE24..FE26 ; valid # 5.1 COMBINING MACRON LEFT HALF..COMBINING CONJOINING MACRON ++FE27..FE2D ; valid # 7.0 COMBINING LIGATURE LEFT HALF BELOW..COMBINING CONJOINING MACRON BELOW ++FE2E..FE2F ; valid # 8.0 COMBINING CYRILLIC TITLO LEFT HALF..COMBINING CYRILLIC TITLO RIGHT HALF ++FE30 ; disallowed # 1.1 PRESENTATION FORM FOR VERTICAL TWO DOT LEADER ++FE31 ; mapped ; 2014 # 1.1 PRESENTATION FORM FOR VERTICAL EM DASH ++FE32 ; mapped ; 2013 # 1.1 PRESENTATION FORM FOR VERTICAL EN DASH ++FE33..FE34 ; disallowed_STD3_mapped ; 005F # 1.1 PRESENTATION FORM FOR VERTICAL LOW LINE..PRESENTATION FORM FOR VERTICAL WAVY LOW LINE ++FE35 ; disallowed_STD3_mapped ; 0028 # 1.1 PRESENTATION FORM FOR VERTICAL LEFT PARENTHESIS ++FE36 ; disallowed_STD3_mapped ; 0029 # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT PARENTHESIS ++FE37 ; disallowed_STD3_mapped ; 007B # 1.1 PRESENTATION FORM FOR VERTICAL LEFT CURLY BRACKET ++FE38 ; disallowed_STD3_mapped ; 007D # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT CURLY BRACKET ++FE39 ; mapped ; 3014 # 1.1 PRESENTATION FORM FOR VERTICAL LEFT TORTOISE SHELL BRACKET ++FE3A ; mapped ; 3015 # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT TORTOISE SHELL BRACKET ++FE3B ; mapped ; 3010 # 1.1 PRESENTATION FORM FOR VERTICAL LEFT BLACK LENTICULAR BRACKET ++FE3C ; mapped ; 3011 # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT BLACK LENTICULAR BRACKET ++FE3D ; mapped ; 300A # 1.1 PRESENTATION FORM FOR VERTICAL LEFT DOUBLE ANGLE BRACKET ++FE3E ; mapped ; 300B # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT DOUBLE ANGLE BRACKET ++FE3F ; mapped ; 3008 # 1.1 PRESENTATION FORM FOR VERTICAL LEFT ANGLE BRACKET ++FE40 ; mapped ; 3009 # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT ANGLE BRACKET ++FE41 ; mapped ; 300C # 1.1 PRESENTATION FORM FOR VERTICAL LEFT CORNER BRACKET ++FE42 ; mapped ; 300D # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT CORNER BRACKET ++FE43 ; mapped ; 300E # 1.1 PRESENTATION FORM FOR VERTICAL LEFT WHITE CORNER BRACKET ++FE44 ; mapped ; 300F # 1.1 PRESENTATION FORM FOR VERTICAL RIGHT WHITE CORNER BRACKET ++FE45..FE46 ; valid ; ; NV8 # 3.2 SESAME DOT..WHITE SESAME DOT ++FE47 ; disallowed_STD3_mapped ; 005B # 4.0 PRESENTATION FORM FOR VERTICAL LEFT SQUARE BRACKET ++FE48 ; disallowed_STD3_mapped ; 005D # 4.0 PRESENTATION FORM FOR VERTICAL RIGHT SQUARE BRACKET ++FE49..FE4C ; disallowed_STD3_mapped ; 0020 0305 # 1.1 DASHED OVERLINE..DOUBLE WAVY OVERLINE ++FE4D..FE4F ; disallowed_STD3_mapped ; 005F # 1.1 DASHED LOW LINE..WAVY LOW LINE ++FE50 ; disallowed_STD3_mapped ; 002C # 1.1 SMALL COMMA ++FE51 ; mapped ; 3001 # 1.1 SMALL IDEOGRAPHIC COMMA ++FE52 ; disallowed # 1.1 SMALL FULL STOP ++FE53 ; disallowed # NA ++FE54 ; disallowed_STD3_mapped ; 003B # 1.1 SMALL SEMICOLON ++FE55 ; disallowed_STD3_mapped ; 003A # 1.1 SMALL COLON ++FE56 ; disallowed_STD3_mapped ; 003F # 1.1 SMALL QUESTION MARK ++FE57 ; disallowed_STD3_mapped ; 0021 # 1.1 SMALL EXCLAMATION MARK ++FE58 ; mapped ; 2014 # 1.1 SMALL EM DASH ++FE59 ; disallowed_STD3_mapped ; 0028 # 1.1 SMALL LEFT PARENTHESIS ++FE5A ; disallowed_STD3_mapped ; 0029 # 1.1 SMALL RIGHT PARENTHESIS ++FE5B ; disallowed_STD3_mapped ; 007B # 1.1 SMALL LEFT CURLY BRACKET ++FE5C ; disallowed_STD3_mapped ; 007D # 1.1 SMALL RIGHT CURLY BRACKET ++FE5D ; mapped ; 3014 # 1.1 SMALL LEFT TORTOISE SHELL BRACKET ++FE5E ; mapped ; 3015 # 1.1 SMALL RIGHT TORTOISE SHELL BRACKET ++FE5F ; disallowed_STD3_mapped ; 0023 # 1.1 SMALL NUMBER SIGN ++FE60 ; disallowed_STD3_mapped ; 0026 # 1.1 SMALL AMPERSAND ++FE61 ; disallowed_STD3_mapped ; 002A # 1.1 SMALL ASTERISK ++FE62 ; disallowed_STD3_mapped ; 002B # 1.1 SMALL PLUS SIGN ++FE63 ; mapped ; 002D # 1.1 SMALL HYPHEN-MINUS ++FE64 ; disallowed_STD3_mapped ; 003C # 1.1 SMALL LESS-THAN SIGN ++FE65 ; disallowed_STD3_mapped ; 003E # 1.1 SMALL GREATER-THAN SIGN ++FE66 ; disallowed_STD3_mapped ; 003D # 1.1 SMALL EQUALS SIGN ++FE67 ; disallowed # NA ++FE68 ; disallowed_STD3_mapped ; 005C # 1.1 SMALL REVERSE SOLIDUS ++FE69 ; disallowed_STD3_mapped ; 0024 # 1.1 SMALL DOLLAR SIGN ++FE6A ; disallowed_STD3_mapped ; 0025 # 1.1 SMALL PERCENT SIGN ++FE6B ; disallowed_STD3_mapped ; 0040 # 1.1 SMALL COMMERCIAL AT ++FE6C..FE6F ; disallowed # NA .. ++FE70 ; disallowed_STD3_mapped ; 0020 064B # 1.1 ARABIC FATHATAN ISOLATED FORM ++FE71 ; mapped ; 0640 064B # 1.1 ARABIC TATWEEL WITH FATHATAN ABOVE ++FE72 ; disallowed_STD3_mapped ; 0020 064C # 1.1 ARABIC DAMMATAN ISOLATED FORM ++FE73 ; valid # 3.2 ARABIC TAIL FRAGMENT ++FE74 ; disallowed_STD3_mapped ; 0020 064D # 1.1 ARABIC KASRATAN ISOLATED FORM ++FE75 ; disallowed # NA ++FE76 ; disallowed_STD3_mapped ; 0020 064E # 1.1 ARABIC FATHA ISOLATED FORM ++FE77 ; mapped ; 0640 064E # 1.1 ARABIC FATHA MEDIAL FORM ++FE78 ; disallowed_STD3_mapped ; 0020 064F # 1.1 ARABIC DAMMA ISOLATED FORM ++FE79 ; mapped ; 0640 064F # 1.1 ARABIC DAMMA MEDIAL FORM ++FE7A ; disallowed_STD3_mapped ; 0020 0650 # 1.1 ARABIC KASRA ISOLATED FORM ++FE7B ; mapped ; 0640 0650 # 1.1 ARABIC KASRA MEDIAL FORM ++FE7C ; disallowed_STD3_mapped ; 0020 0651 # 1.1 ARABIC SHADDA ISOLATED FORM ++FE7D ; mapped ; 0640 0651 # 1.1 ARABIC SHADDA MEDIAL FORM ++FE7E ; disallowed_STD3_mapped ; 0020 0652 # 1.1 ARABIC SUKUN ISOLATED FORM ++FE7F ; mapped ; 0640 0652 # 1.1 ARABIC SUKUN MEDIAL FORM ++FE80 ; mapped ; 0621 # 1.1 ARABIC LETTER HAMZA ISOLATED FORM ++FE81..FE82 ; mapped ; 0622 # 1.1 ARABIC LETTER ALEF WITH MADDA ABOVE ISOLATED FORM..ARABIC LETTER ALEF WITH MADDA ABOVE FINAL FORM ++FE83..FE84 ; mapped ; 0623 # 1.1 ARABIC LETTER ALEF WITH HAMZA ABOVE ISOLATED FORM..ARABIC LETTER ALEF WITH HAMZA ABOVE FINAL FORM ++FE85..FE86 ; mapped ; 0624 # 1.1 ARABIC LETTER WAW WITH HAMZA ABOVE ISOLATED FORM..ARABIC LETTER WAW WITH HAMZA ABOVE FINAL FORM ++FE87..FE88 ; mapped ; 0625 # 1.1 ARABIC LETTER ALEF WITH HAMZA BELOW ISOLATED FORM..ARABIC LETTER ALEF WITH HAMZA BELOW FINAL FORM ++FE89..FE8C ; mapped ; 0626 # 1.1 ARABIC LETTER YEH WITH HAMZA ABOVE ISOLATED FORM..ARABIC LETTER YEH WITH HAMZA ABOVE MEDIAL FORM ++FE8D..FE8E ; mapped ; 0627 # 1.1 ARABIC LETTER ALEF ISOLATED FORM..ARABIC LETTER ALEF FINAL FORM ++FE8F..FE92 ; mapped ; 0628 # 1.1 ARABIC LETTER BEH ISOLATED FORM..ARABIC LETTER BEH MEDIAL FORM ++FE93..FE94 ; mapped ; 0629 # 1.1 ARABIC LETTER TEH MARBUTA ISOLATED FORM..ARABIC LETTER TEH MARBUTA FINAL FORM ++FE95..FE98 ; mapped ; 062A # 1.1 ARABIC LETTER TEH ISOLATED FORM..ARABIC LETTER TEH MEDIAL FORM ++FE99..FE9C ; mapped ; 062B # 1.1 ARABIC LETTER THEH ISOLATED FORM..ARABIC LETTER THEH MEDIAL FORM ++FE9D..FEA0 ; mapped ; 062C # 1.1 ARABIC LETTER JEEM ISOLATED FORM..ARABIC LETTER JEEM MEDIAL FORM ++FEA1..FEA4 ; mapped ; 062D # 1.1 ARABIC LETTER HAH ISOLATED FORM..ARABIC LETTER HAH MEDIAL FORM ++FEA5..FEA8 ; mapped ; 062E # 1.1 ARABIC LETTER KHAH ISOLATED FORM..ARABIC LETTER KHAH MEDIAL FORM ++FEA9..FEAA ; mapped ; 062F # 1.1 ARABIC LETTER DAL ISOLATED FORM..ARABIC LETTER DAL FINAL FORM ++FEAB..FEAC ; mapped ; 0630 # 1.1 ARABIC LETTER THAL ISOLATED FORM..ARABIC LETTER THAL FINAL FORM ++FEAD..FEAE ; mapped ; 0631 # 1.1 ARABIC LETTER REH ISOLATED FORM..ARABIC LETTER REH FINAL FORM ++FEAF..FEB0 ; mapped ; 0632 # 1.1 ARABIC LETTER ZAIN ISOLATED FORM..ARABIC LETTER ZAIN FINAL FORM ++FEB1..FEB4 ; mapped ; 0633 # 1.1 ARABIC LETTER SEEN ISOLATED FORM..ARABIC LETTER SEEN MEDIAL FORM ++FEB5..FEB8 ; mapped ; 0634 # 1.1 ARABIC LETTER SHEEN ISOLATED FORM..ARABIC LETTER SHEEN MEDIAL FORM ++FEB9..FEBC ; mapped ; 0635 # 1.1 ARABIC LETTER SAD ISOLATED FORM..ARABIC LETTER SAD MEDIAL FORM ++FEBD..FEC0 ; mapped ; 0636 # 1.1 ARABIC LETTER DAD ISOLATED FORM..ARABIC LETTER DAD MEDIAL FORM ++FEC1..FEC4 ; mapped ; 0637 # 1.1 ARABIC LETTER TAH ISOLATED FORM..ARABIC LETTER TAH MEDIAL FORM ++FEC5..FEC8 ; mapped ; 0638 # 1.1 ARABIC LETTER ZAH ISOLATED FORM..ARABIC LETTER ZAH MEDIAL FORM ++FEC9..FECC ; mapped ; 0639 # 1.1 ARABIC LETTER AIN ISOLATED FORM..ARABIC LETTER AIN MEDIAL FORM ++FECD..FED0 ; mapped ; 063A # 1.1 ARABIC LETTER GHAIN ISOLATED FORM..ARABIC LETTER GHAIN MEDIAL FORM ++FED1..FED4 ; mapped ; 0641 # 1.1 ARABIC LETTER FEH ISOLATED FORM..ARABIC LETTER FEH MEDIAL FORM ++FED5..FED8 ; mapped ; 0642 # 1.1 ARABIC LETTER QAF ISOLATED FORM..ARABIC LETTER QAF MEDIAL FORM ++FED9..FEDC ; mapped ; 0643 # 1.1 ARABIC LETTER KAF ISOLATED FORM..ARABIC LETTER KAF MEDIAL FORM ++FEDD..FEE0 ; mapped ; 0644 # 1.1 ARABIC LETTER LAM ISOLATED FORM..ARABIC LETTER LAM MEDIAL FORM ++FEE1..FEE4 ; mapped ; 0645 # 1.1 ARABIC LETTER MEEM ISOLATED FORM..ARABIC LETTER MEEM MEDIAL FORM ++FEE5..FEE8 ; mapped ; 0646 # 1.1 ARABIC LETTER NOON ISOLATED FORM..ARABIC LETTER NOON MEDIAL FORM ++FEE9..FEEC ; mapped ; 0647 # 1.1 ARABIC LETTER HEH ISOLATED FORM..ARABIC LETTER HEH MEDIAL FORM ++FEED..FEEE ; mapped ; 0648 # 1.1 ARABIC LETTER WAW ISOLATED FORM..ARABIC LETTER WAW FINAL FORM ++FEEF..FEF0 ; mapped ; 0649 # 1.1 ARABIC LETTER ALEF MAKSURA ISOLATED FORM..ARABIC LETTER ALEF MAKSURA FINAL FORM ++FEF1..FEF4 ; mapped ; 064A # 1.1 ARABIC LETTER YEH ISOLATED FORM..ARABIC LETTER YEH MEDIAL FORM ++FEF5..FEF6 ; mapped ; 0644 0622 # 1.1 ARABIC LIGATURE LAM WITH ALEF WITH MADDA ABOVE ISOLATED FORM..ARABIC LIGATURE LAM WITH ALEF WITH MADDA ABOVE FINAL FORM ++FEF7..FEF8 ; mapped ; 0644 0623 # 1.1 ARABIC LIGATURE LAM WITH ALEF WITH HAMZA ABOVE ISOLATED FORM..ARABIC LIGATURE LAM WITH ALEF WITH HAMZA ABOVE FINAL FORM ++FEF9..FEFA ; mapped ; 0644 0625 # 1.1 ARABIC LIGATURE LAM WITH ALEF WITH HAMZA BELOW ISOLATED FORM..ARABIC LIGATURE LAM WITH ALEF WITH HAMZA BELOW FINAL FORM ++FEFB..FEFC ; mapped ; 0644 0627 # 1.1 ARABIC LIGATURE LAM WITH ALEF ISOLATED FORM..ARABIC LIGATURE LAM WITH ALEF FINAL FORM ++FEFD..FEFE ; disallowed # NA .. ++FEFF ; ignored # 1.1 ZERO WIDTH NO-BREAK SPACE ++FF00 ; disallowed # NA ++FF01 ; disallowed_STD3_mapped ; 0021 # 1.1 FULLWIDTH EXCLAMATION MARK ++FF02 ; disallowed_STD3_mapped ; 0022 # 1.1 FULLWIDTH QUOTATION MARK ++FF03 ; disallowed_STD3_mapped ; 0023 # 1.1 FULLWIDTH NUMBER SIGN ++FF04 ; disallowed_STD3_mapped ; 0024 # 1.1 FULLWIDTH DOLLAR SIGN ++FF05 ; disallowed_STD3_mapped ; 0025 # 1.1 FULLWIDTH PERCENT SIGN ++FF06 ; disallowed_STD3_mapped ; 0026 # 1.1 FULLWIDTH AMPERSAND ++FF07 ; disallowed_STD3_mapped ; 0027 # 1.1 FULLWIDTH APOSTROPHE ++FF08 ; disallowed_STD3_mapped ; 0028 # 1.1 FULLWIDTH LEFT PARENTHESIS ++FF09 ; disallowed_STD3_mapped ; 0029 # 1.1 FULLWIDTH RIGHT PARENTHESIS ++FF0A ; disallowed_STD3_mapped ; 002A # 1.1 FULLWIDTH ASTERISK ++FF0B ; disallowed_STD3_mapped ; 002B # 1.1 FULLWIDTH PLUS SIGN ++FF0C ; disallowed_STD3_mapped ; 002C # 1.1 FULLWIDTH COMMA ++FF0D ; mapped ; 002D # 1.1 FULLWIDTH HYPHEN-MINUS ++FF0E ; mapped ; 002E # 1.1 FULLWIDTH FULL STOP ++FF0F ; disallowed_STD3_mapped ; 002F # 1.1 FULLWIDTH SOLIDUS ++FF10 ; mapped ; 0030 # 1.1 FULLWIDTH DIGIT ZERO ++FF11 ; mapped ; 0031 # 1.1 FULLWIDTH DIGIT ONE ++FF12 ; mapped ; 0032 # 1.1 FULLWIDTH DIGIT TWO ++FF13 ; mapped ; 0033 # 1.1 FULLWIDTH DIGIT THREE ++FF14 ; mapped ; 0034 # 1.1 FULLWIDTH DIGIT FOUR ++FF15 ; mapped ; 0035 # 1.1 FULLWIDTH DIGIT FIVE ++FF16 ; mapped ; 0036 # 1.1 FULLWIDTH DIGIT SIX ++FF17 ; mapped ; 0037 # 1.1 FULLWIDTH DIGIT SEVEN ++FF18 ; mapped ; 0038 # 1.1 FULLWIDTH DIGIT EIGHT ++FF19 ; mapped ; 0039 # 1.1 FULLWIDTH DIGIT NINE ++FF1A ; disallowed_STD3_mapped ; 003A # 1.1 FULLWIDTH COLON ++FF1B ; disallowed_STD3_mapped ; 003B # 1.1 FULLWIDTH SEMICOLON ++FF1C ; disallowed_STD3_mapped ; 003C # 1.1 FULLWIDTH LESS-THAN SIGN ++FF1D ; disallowed_STD3_mapped ; 003D # 1.1 FULLWIDTH EQUALS SIGN ++FF1E ; disallowed_STD3_mapped ; 003E # 1.1 FULLWIDTH GREATER-THAN SIGN ++FF1F ; disallowed_STD3_mapped ; 003F # 1.1 FULLWIDTH QUESTION MARK ++FF20 ; disallowed_STD3_mapped ; 0040 # 1.1 FULLWIDTH COMMERCIAL AT ++FF21 ; mapped ; 0061 # 1.1 FULLWIDTH LATIN CAPITAL LETTER A ++FF22 ; mapped ; 0062 # 1.1 FULLWIDTH LATIN CAPITAL LETTER B ++FF23 ; mapped ; 0063 # 1.1 FULLWIDTH LATIN CAPITAL LETTER C ++FF24 ; mapped ; 0064 # 1.1 FULLWIDTH LATIN CAPITAL LETTER D ++FF25 ; mapped ; 0065 # 1.1 FULLWIDTH LATIN CAPITAL LETTER E ++FF26 ; mapped ; 0066 # 1.1 FULLWIDTH LATIN CAPITAL LETTER F ++FF27 ; mapped ; 0067 # 1.1 FULLWIDTH LATIN CAPITAL LETTER G ++FF28 ; mapped ; 0068 # 1.1 FULLWIDTH LATIN CAPITAL LETTER H ++FF29 ; mapped ; 0069 # 1.1 FULLWIDTH LATIN CAPITAL LETTER I ++FF2A ; mapped ; 006A # 1.1 FULLWIDTH LATIN CAPITAL LETTER J ++FF2B ; mapped ; 006B # 1.1 FULLWIDTH LATIN CAPITAL LETTER K ++FF2C ; mapped ; 006C # 1.1 FULLWIDTH LATIN CAPITAL LETTER L ++FF2D ; mapped ; 006D # 1.1 FULLWIDTH LATIN CAPITAL LETTER M ++FF2E ; mapped ; 006E # 1.1 FULLWIDTH LATIN CAPITAL LETTER N ++FF2F ; mapped ; 006F # 1.1 FULLWIDTH LATIN CAPITAL LETTER O ++FF30 ; mapped ; 0070 # 1.1 FULLWIDTH LATIN CAPITAL LETTER P ++FF31 ; mapped ; 0071 # 1.1 FULLWIDTH LATIN CAPITAL LETTER Q ++FF32 ; mapped ; 0072 # 1.1 FULLWIDTH LATIN CAPITAL LETTER R ++FF33 ; mapped ; 0073 # 1.1 FULLWIDTH LATIN CAPITAL LETTER S ++FF34 ; mapped ; 0074 # 1.1 FULLWIDTH LATIN CAPITAL LETTER T ++FF35 ; mapped ; 0075 # 1.1 FULLWIDTH LATIN CAPITAL LETTER U ++FF36 ; mapped ; 0076 # 1.1 FULLWIDTH LATIN CAPITAL LETTER V ++FF37 ; mapped ; 0077 # 1.1 FULLWIDTH LATIN CAPITAL LETTER W ++FF38 ; mapped ; 0078 # 1.1 FULLWIDTH LATIN CAPITAL LETTER X ++FF39 ; mapped ; 0079 # 1.1 FULLWIDTH LATIN CAPITAL LETTER Y ++FF3A ; mapped ; 007A # 1.1 FULLWIDTH LATIN CAPITAL LETTER Z ++FF3B ; disallowed_STD3_mapped ; 005B # 1.1 FULLWIDTH LEFT SQUARE BRACKET ++FF3C ; disallowed_STD3_mapped ; 005C # 1.1 FULLWIDTH REVERSE SOLIDUS ++FF3D ; disallowed_STD3_mapped ; 005D # 1.1 FULLWIDTH RIGHT SQUARE BRACKET ++FF3E ; disallowed_STD3_mapped ; 005E # 1.1 FULLWIDTH CIRCUMFLEX ACCENT ++FF3F ; disallowed_STD3_mapped ; 005F # 1.1 FULLWIDTH LOW LINE ++FF40 ; disallowed_STD3_mapped ; 0060 # 1.1 FULLWIDTH GRAVE ACCENT ++FF41 ; mapped ; 0061 # 1.1 FULLWIDTH LATIN SMALL LETTER A ++FF42 ; mapped ; 0062 # 1.1 FULLWIDTH LATIN SMALL LETTER B ++FF43 ; mapped ; 0063 # 1.1 FULLWIDTH LATIN SMALL LETTER C ++FF44 ; mapped ; 0064 # 1.1 FULLWIDTH LATIN SMALL LETTER D ++FF45 ; mapped ; 0065 # 1.1 FULLWIDTH LATIN SMALL LETTER E ++FF46 ; mapped ; 0066 # 1.1 FULLWIDTH LATIN SMALL LETTER F ++FF47 ; mapped ; 0067 # 1.1 FULLWIDTH LATIN SMALL LETTER G ++FF48 ; mapped ; 0068 # 1.1 FULLWIDTH LATIN SMALL LETTER H ++FF49 ; mapped ; 0069 # 1.1 FULLWIDTH LATIN SMALL LETTER I ++FF4A ; mapped ; 006A # 1.1 FULLWIDTH LATIN SMALL LETTER J ++FF4B ; mapped ; 006B # 1.1 FULLWIDTH LATIN SMALL LETTER K ++FF4C ; mapped ; 006C # 1.1 FULLWIDTH LATIN SMALL LETTER L ++FF4D ; mapped ; 006D # 1.1 FULLWIDTH LATIN SMALL LETTER M ++FF4E ; mapped ; 006E # 1.1 FULLWIDTH LATIN SMALL LETTER N ++FF4F ; mapped ; 006F # 1.1 FULLWIDTH LATIN SMALL LETTER O ++FF50 ; mapped ; 0070 # 1.1 FULLWIDTH LATIN SMALL LETTER P ++FF51 ; mapped ; 0071 # 1.1 FULLWIDTH LATIN SMALL LETTER Q ++FF52 ; mapped ; 0072 # 1.1 FULLWIDTH LATIN SMALL LETTER R ++FF53 ; mapped ; 0073 # 1.1 FULLWIDTH LATIN SMALL LETTER S ++FF54 ; mapped ; 0074 # 1.1 FULLWIDTH LATIN SMALL LETTER T ++FF55 ; mapped ; 0075 # 1.1 FULLWIDTH LATIN SMALL LETTER U ++FF56 ; mapped ; 0076 # 1.1 FULLWIDTH LATIN SMALL LETTER V ++FF57 ; mapped ; 0077 # 1.1 FULLWIDTH LATIN SMALL LETTER W ++FF58 ; mapped ; 0078 # 1.1 FULLWIDTH LATIN SMALL LETTER X ++FF59 ; mapped ; 0079 # 1.1 FULLWIDTH LATIN SMALL LETTER Y ++FF5A ; mapped ; 007A # 1.1 FULLWIDTH LATIN SMALL LETTER Z ++FF5B ; disallowed_STD3_mapped ; 007B # 1.1 FULLWIDTH LEFT CURLY BRACKET ++FF5C ; disallowed_STD3_mapped ; 007C # 1.1 FULLWIDTH VERTICAL LINE ++FF5D ; disallowed_STD3_mapped ; 007D # 1.1 FULLWIDTH RIGHT CURLY BRACKET ++FF5E ; disallowed_STD3_mapped ; 007E # 1.1 FULLWIDTH TILDE ++FF5F ; mapped ; 2985 # 3.2 FULLWIDTH LEFT WHITE PARENTHESIS ++FF60 ; mapped ; 2986 # 3.2 FULLWIDTH RIGHT WHITE PARENTHESIS ++FF61 ; mapped ; 002E # 1.1 HALFWIDTH IDEOGRAPHIC FULL STOP ++FF62 ; mapped ; 300C # 1.1 HALFWIDTH LEFT CORNER BRACKET ++FF63 ; mapped ; 300D # 1.1 HALFWIDTH RIGHT CORNER BRACKET ++FF64 ; mapped ; 3001 # 1.1 HALFWIDTH IDEOGRAPHIC COMMA ++FF65 ; mapped ; 30FB # 1.1 HALFWIDTH KATAKANA MIDDLE DOT ++FF66 ; mapped ; 30F2 # 1.1 HALFWIDTH KATAKANA LETTER WO ++FF67 ; mapped ; 30A1 # 1.1 HALFWIDTH KATAKANA LETTER SMALL A ++FF68 ; mapped ; 30A3 # 1.1 HALFWIDTH KATAKANA LETTER SMALL I ++FF69 ; mapped ; 30A5 # 1.1 HALFWIDTH KATAKANA LETTER SMALL U ++FF6A ; mapped ; 30A7 # 1.1 HALFWIDTH KATAKANA LETTER SMALL E ++FF6B ; mapped ; 30A9 # 1.1 HALFWIDTH KATAKANA LETTER SMALL O ++FF6C ; mapped ; 30E3 # 1.1 HALFWIDTH KATAKANA LETTER SMALL YA ++FF6D ; mapped ; 30E5 # 1.1 HALFWIDTH KATAKANA LETTER SMALL YU ++FF6E ; mapped ; 30E7 # 1.1 HALFWIDTH KATAKANA LETTER SMALL YO ++FF6F ; mapped ; 30C3 # 1.1 HALFWIDTH KATAKANA LETTER SMALL TU ++FF70 ; mapped ; 30FC # 1.1 HALFWIDTH KATAKANA-HIRAGANA PROLONGED SOUND MARK ++FF71 ; mapped ; 30A2 # 1.1 HALFWIDTH KATAKANA LETTER A ++FF72 ; mapped ; 30A4 # 1.1 HALFWIDTH KATAKANA LETTER I ++FF73 ; mapped ; 30A6 # 1.1 HALFWIDTH KATAKANA LETTER U ++FF74 ; mapped ; 30A8 # 1.1 HALFWIDTH KATAKANA LETTER E ++FF75 ; mapped ; 30AA # 1.1 HALFWIDTH KATAKANA LETTER O ++FF76 ; mapped ; 30AB # 1.1 HALFWIDTH KATAKANA LETTER KA ++FF77 ; mapped ; 30AD # 1.1 HALFWIDTH KATAKANA LETTER KI ++FF78 ; mapped ; 30AF # 1.1 HALFWIDTH KATAKANA LETTER KU ++FF79 ; mapped ; 30B1 # 1.1 HALFWIDTH KATAKANA LETTER KE ++FF7A ; mapped ; 30B3 # 1.1 HALFWIDTH KATAKANA LETTER KO ++FF7B ; mapped ; 30B5 # 1.1 HALFWIDTH KATAKANA LETTER SA ++FF7C ; mapped ; 30B7 # 1.1 HALFWIDTH KATAKANA LETTER SI ++FF7D ; mapped ; 30B9 # 1.1 HALFWIDTH KATAKANA LETTER SU ++FF7E ; mapped ; 30BB # 1.1 HALFWIDTH KATAKANA LETTER SE ++FF7F ; mapped ; 30BD # 1.1 HALFWIDTH KATAKANA LETTER SO ++FF80 ; mapped ; 30BF # 1.1 HALFWIDTH KATAKANA LETTER TA ++FF81 ; mapped ; 30C1 # 1.1 HALFWIDTH KATAKANA LETTER TI ++FF82 ; mapped ; 30C4 # 1.1 HALFWIDTH KATAKANA LETTER TU ++FF83 ; mapped ; 30C6 # 1.1 HALFWIDTH KATAKANA LETTER TE ++FF84 ; mapped ; 30C8 # 1.1 HALFWIDTH KATAKANA LETTER TO ++FF85 ; mapped ; 30CA # 1.1 HALFWIDTH KATAKANA LETTER NA ++FF86 ; mapped ; 30CB # 1.1 HALFWIDTH KATAKANA LETTER NI ++FF87 ; mapped ; 30CC # 1.1 HALFWIDTH KATAKANA LETTER NU ++FF88 ; mapped ; 30CD # 1.1 HALFWIDTH KATAKANA LETTER NE ++FF89 ; mapped ; 30CE # 1.1 HALFWIDTH KATAKANA LETTER NO ++FF8A ; mapped ; 30CF # 1.1 HALFWIDTH KATAKANA LETTER HA ++FF8B ; mapped ; 30D2 # 1.1 HALFWIDTH KATAKANA LETTER HI ++FF8C ; mapped ; 30D5 # 1.1 HALFWIDTH KATAKANA LETTER HU ++FF8D ; mapped ; 30D8 # 1.1 HALFWIDTH KATAKANA LETTER HE ++FF8E ; mapped ; 30DB # 1.1 HALFWIDTH KATAKANA LETTER HO ++FF8F ; mapped ; 30DE # 1.1 HALFWIDTH KATAKANA LETTER MA ++FF90 ; mapped ; 30DF # 1.1 HALFWIDTH KATAKANA LETTER MI ++FF91 ; mapped ; 30E0 # 1.1 HALFWIDTH KATAKANA LETTER MU ++FF92 ; mapped ; 30E1 # 1.1 HALFWIDTH KATAKANA LETTER ME ++FF93 ; mapped ; 30E2 # 1.1 HALFWIDTH KATAKANA LETTER MO ++FF94 ; mapped ; 30E4 # 1.1 HALFWIDTH KATAKANA LETTER YA ++FF95 ; mapped ; 30E6 # 1.1 HALFWIDTH KATAKANA LETTER YU ++FF96 ; mapped ; 30E8 # 1.1 HALFWIDTH KATAKANA LETTER YO ++FF97 ; mapped ; 30E9 # 1.1 HALFWIDTH KATAKANA LETTER RA ++FF98 ; mapped ; 30EA # 1.1 HALFWIDTH KATAKANA LETTER RI ++FF99 ; mapped ; 30EB # 1.1 HALFWIDTH KATAKANA LETTER RU ++FF9A ; mapped ; 30EC # 1.1 HALFWIDTH KATAKANA LETTER RE ++FF9B ; mapped ; 30ED # 1.1 HALFWIDTH KATAKANA LETTER RO ++FF9C ; mapped ; 30EF # 1.1 HALFWIDTH KATAKANA LETTER WA ++FF9D ; mapped ; 30F3 # 1.1 HALFWIDTH KATAKANA LETTER N ++FF9E ; mapped ; 3099 # 1.1 HALFWIDTH KATAKANA VOICED SOUND MARK ++FF9F ; mapped ; 309A # 1.1 HALFWIDTH KATAKANA SEMI-VOICED SOUND MARK ++FFA0 ; disallowed # 1.1 HALFWIDTH HANGUL FILLER ++FFA1 ; mapped ; 1100 # 1.1 HALFWIDTH HANGUL LETTER KIYEOK ++FFA2 ; mapped ; 1101 # 1.1 HALFWIDTH HANGUL LETTER SSANGKIYEOK ++FFA3 ; mapped ; 11AA # 1.1 HALFWIDTH HANGUL LETTER KIYEOK-SIOS ++FFA4 ; mapped ; 1102 # 1.1 HALFWIDTH HANGUL LETTER NIEUN ++FFA5 ; mapped ; 11AC # 1.1 HALFWIDTH HANGUL LETTER NIEUN-CIEUC ++FFA6 ; mapped ; 11AD # 1.1 HALFWIDTH HANGUL LETTER NIEUN-HIEUH ++FFA7 ; mapped ; 1103 # 1.1 HALFWIDTH HANGUL LETTER TIKEUT ++FFA8 ; mapped ; 1104 # 1.1 HALFWIDTH HANGUL LETTER SSANGTIKEUT ++FFA9 ; mapped ; 1105 # 1.1 HALFWIDTH HANGUL LETTER RIEUL ++FFAA ; mapped ; 11B0 # 1.1 HALFWIDTH HANGUL LETTER RIEUL-KIYEOK ++FFAB ; mapped ; 11B1 # 1.1 HALFWIDTH HANGUL LETTER RIEUL-MIEUM ++FFAC ; mapped ; 11B2 # 1.1 HALFWIDTH HANGUL LETTER RIEUL-PIEUP ++FFAD ; mapped ; 11B3 # 1.1 HALFWIDTH HANGUL LETTER RIEUL-SIOS ++FFAE ; mapped ; 11B4 # 1.1 HALFWIDTH HANGUL LETTER RIEUL-THIEUTH ++FFAF ; mapped ; 11B5 # 1.1 HALFWIDTH HANGUL LETTER RIEUL-PHIEUPH ++FFB0 ; mapped ; 111A # 1.1 HALFWIDTH HANGUL LETTER RIEUL-HIEUH ++FFB1 ; mapped ; 1106 # 1.1 HALFWIDTH HANGUL LETTER MIEUM ++FFB2 ; mapped ; 1107 # 1.1 HALFWIDTH HANGUL LETTER PIEUP ++FFB3 ; mapped ; 1108 # 1.1 HALFWIDTH HANGUL LETTER SSANGPIEUP ++FFB4 ; mapped ; 1121 # 1.1 HALFWIDTH HANGUL LETTER PIEUP-SIOS ++FFB5 ; mapped ; 1109 # 1.1 HALFWIDTH HANGUL LETTER SIOS ++FFB6 ; mapped ; 110A # 1.1 HALFWIDTH HANGUL LETTER SSANGSIOS ++FFB7 ; mapped ; 110B # 1.1 HALFWIDTH HANGUL LETTER IEUNG ++FFB8 ; mapped ; 110C # 1.1 HALFWIDTH HANGUL LETTER CIEUC ++FFB9 ; mapped ; 110D # 1.1 HALFWIDTH HANGUL LETTER SSANGCIEUC ++FFBA ; mapped ; 110E # 1.1 HALFWIDTH HANGUL LETTER CHIEUCH ++FFBB ; mapped ; 110F # 1.1 HALFWIDTH HANGUL LETTER KHIEUKH ++FFBC ; mapped ; 1110 # 1.1 HALFWIDTH HANGUL LETTER THIEUTH ++FFBD ; mapped ; 1111 # 1.1 HALFWIDTH HANGUL LETTER PHIEUPH ++FFBE ; mapped ; 1112 # 1.1 HALFWIDTH HANGUL LETTER HIEUH ++FFBF..FFC1 ; disallowed # NA .. ++FFC2 ; mapped ; 1161 # 1.1 HALFWIDTH HANGUL LETTER A ++FFC3 ; mapped ; 1162 # 1.1 HALFWIDTH HANGUL LETTER AE ++FFC4 ; mapped ; 1163 # 1.1 HALFWIDTH HANGUL LETTER YA ++FFC5 ; mapped ; 1164 # 1.1 HALFWIDTH HANGUL LETTER YAE ++FFC6 ; mapped ; 1165 # 1.1 HALFWIDTH HANGUL LETTER EO ++FFC7 ; mapped ; 1166 # 1.1 HALFWIDTH HANGUL LETTER E ++FFC8..FFC9 ; disallowed # NA .. ++FFCA ; mapped ; 1167 # 1.1 HALFWIDTH HANGUL LETTER YEO ++FFCB ; mapped ; 1168 # 1.1 HALFWIDTH HANGUL LETTER YE ++FFCC ; mapped ; 1169 # 1.1 HALFWIDTH HANGUL LETTER O ++FFCD ; mapped ; 116A # 1.1 HALFWIDTH HANGUL LETTER WA ++FFCE ; mapped ; 116B # 1.1 HALFWIDTH HANGUL LETTER WAE ++FFCF ; mapped ; 116C # 1.1 HALFWIDTH HANGUL LETTER OE ++FFD0..FFD1 ; disallowed # NA .. ++FFD2 ; mapped ; 116D # 1.1 HALFWIDTH HANGUL LETTER YO ++FFD3 ; mapped ; 116E # 1.1 HALFWIDTH HANGUL LETTER U ++FFD4 ; mapped ; 116F # 1.1 HALFWIDTH HANGUL LETTER WEO ++FFD5 ; mapped ; 1170 # 1.1 HALFWIDTH HANGUL LETTER WE ++FFD6 ; mapped ; 1171 # 1.1 HALFWIDTH HANGUL LETTER WI ++FFD7 ; mapped ; 1172 # 1.1 HALFWIDTH HANGUL LETTER YU ++FFD8..FFD9 ; disallowed # NA .. ++FFDA ; mapped ; 1173 # 1.1 HALFWIDTH HANGUL LETTER EU ++FFDB ; mapped ; 1174 # 1.1 HALFWIDTH HANGUL LETTER YI ++FFDC ; mapped ; 1175 # 1.1 HALFWIDTH HANGUL LETTER I ++FFDD..FFDF ; disallowed # NA .. ++FFE0 ; mapped ; 00A2 # 1.1 FULLWIDTH CENT SIGN ++FFE1 ; mapped ; 00A3 # 1.1 FULLWIDTH POUND SIGN ++FFE2 ; mapped ; 00AC # 1.1 FULLWIDTH NOT SIGN ++FFE3 ; disallowed_STD3_mapped ; 0020 0304 # 1.1 FULLWIDTH MACRON ++FFE4 ; mapped ; 00A6 # 1.1 FULLWIDTH BROKEN BAR ++FFE5 ; mapped ; 00A5 # 1.1 FULLWIDTH YEN SIGN ++FFE6 ; mapped ; 20A9 # 1.1 FULLWIDTH WON SIGN ++FFE7 ; disallowed # NA ++FFE8 ; mapped ; 2502 # 1.1 HALFWIDTH FORMS LIGHT VERTICAL ++FFE9 ; mapped ; 2190 # 1.1 HALFWIDTH LEFTWARDS ARROW ++FFEA ; mapped ; 2191 # 1.1 HALFWIDTH UPWARDS ARROW ++FFEB ; mapped ; 2192 # 1.1 HALFWIDTH RIGHTWARDS ARROW ++FFEC ; mapped ; 2193 # 1.1 HALFWIDTH DOWNWARDS ARROW ++FFED ; mapped ; 25A0 # 1.1 HALFWIDTH BLACK SQUARE ++FFEE ; mapped ; 25CB # 1.1 HALFWIDTH WHITE CIRCLE ++FFEF..FFF8 ; disallowed # NA .. ++FFF9..FFFB ; disallowed # 3.0 INTERLINEAR ANNOTATION ANCHOR..INTERLINEAR ANNOTATION TERMINATOR ++FFFC ; disallowed # 2.1 OBJECT REPLACEMENT CHARACTER ++FFFD ; disallowed # 1.1 REPLACEMENT CHARACTER ++FFFE..FFFF ; disallowed # 1.1 .. ++10000..1000B ; valid # 4.0 LINEAR B SYLLABLE B008 A..LINEAR B SYLLABLE B046 JE ++1000C ; disallowed # NA ++1000D..10026 ; valid # 4.0 LINEAR B SYLLABLE B036 JO..LINEAR B SYLLABLE B032 QO ++10027 ; disallowed # NA ++10028..1003A ; valid # 4.0 LINEAR B SYLLABLE B060 RA..LINEAR B SYLLABLE B042 WO ++1003B ; disallowed # NA ++1003C..1003D ; valid # 4.0 LINEAR B SYLLABLE B017 ZA..LINEAR B SYLLABLE B074 ZE ++1003E ; disallowed # NA ++1003F..1004D ; valid # 4.0 LINEAR B SYLLABLE B020 ZO..LINEAR B SYLLABLE B091 TWO ++1004E..1004F ; disallowed # NA .. ++10050..1005D ; valid # 4.0 LINEAR B SYMBOL B018..LINEAR B SYMBOL B089 ++1005E..1007F ; disallowed # NA .. ++10080..100FA ; valid # 4.0 LINEAR B IDEOGRAM B100 MAN..LINEAR B IDEOGRAM VESSEL B305 ++100FB..100FF ; disallowed # NA .. ++10100..10102 ; valid ; ; NV8 # 4.0 AEGEAN WORD SEPARATOR LINE..AEGEAN CHECK MARK ++10103..10106 ; disallowed # NA .. ++10107..10133 ; valid ; ; NV8 # 4.0 AEGEAN NUMBER ONE..AEGEAN NUMBER NINETY THOUSAND ++10134..10136 ; disallowed # NA .. ++10137..1013F ; valid ; ; NV8 # 4.0 AEGEAN WEIGHT BASE UNIT..AEGEAN MEASURE THIRD SUBUNIT ++10140..1018A ; valid ; ; NV8 # 4.1 GREEK ACROPHONIC ATTIC ONE QUARTER..GREEK ZERO SIGN ++1018B..1018C ; valid ; ; NV8 # 7.0 GREEK ONE QUARTER SIGN..GREEK SINUSOID SIGN ++1018D..1018E ; valid ; ; NV8 # 9.0 GREEK INDICTION SIGN..NOMISMA SIGN ++1018F ; disallowed # NA ++10190..1019B ; valid ; ; NV8 # 5.1 ROMAN SEXTANS SIGN..ROMAN CENTURIAL SIGN ++1019C..1019F ; disallowed # NA .. ++101A0 ; valid ; ; NV8 # 7.0 GREEK SYMBOL TAU RHO ++101A1..101CF ; disallowed # NA .. ++101D0..101FC ; valid ; ; NV8 # 5.1 PHAISTOS DISC SIGN PEDESTRIAN..PHAISTOS DISC SIGN WAVY BAND ++101FD ; valid # 5.1 PHAISTOS DISC SIGN COMBINING OBLIQUE STROKE ++101FE..1027F ; disallowed # NA .. ++10280..1029C ; valid # 5.1 LYCIAN LETTER A..LYCIAN LETTER X ++1029D..1029F ; disallowed # NA .. ++102A0..102D0 ; valid # 5.1 CARIAN LETTER A..CARIAN LETTER UUU3 ++102D1..102DF ; disallowed # NA .. ++102E0 ; valid # 7.0 COPTIC EPACT THOUSANDS MARK ++102E1..102FB ; valid ; ; NV8 # 7.0 COPTIC EPACT DIGIT ONE..COPTIC EPACT NUMBER NINE HUNDRED ++102FC..102FF ; disallowed # NA .. ++10300..1031E ; valid # 3.1 OLD ITALIC LETTER A..OLD ITALIC LETTER UU ++1031F ; valid # 7.0 OLD ITALIC LETTER ESS ++10320..10323 ; valid ; ; NV8 # 3.1 OLD ITALIC NUMERAL ONE..OLD ITALIC NUMERAL FIFTY ++10324..1032C ; disallowed # NA .. ++1032D..1032F ; valid # 10.0 OLD ITALIC LETTER YE..OLD ITALIC LETTER SOUTHERN TSE ++10330..10340 ; valid # 3.1 GOTHIC LETTER AHSA..GOTHIC LETTER PAIRTHRA ++10341 ; valid ; ; NV8 # 3.1 GOTHIC LETTER NINETY ++10342..10349 ; valid # 3.1 GOTHIC LETTER RAIDA..GOTHIC LETTER OTHAL ++1034A ; valid ; ; NV8 # 3.1 GOTHIC LETTER NINE HUNDRED ++1034B..1034F ; disallowed # NA .. ++10350..1037A ; valid # 7.0 OLD PERMIC LETTER AN..COMBINING OLD PERMIC LETTER SII ++1037B..1037F ; disallowed # NA .. ++10380..1039D ; valid # 4.0 UGARITIC LETTER ALPA..UGARITIC LETTER SSU ++1039E ; disallowed # NA ++1039F ; valid ; ; NV8 # 4.0 UGARITIC WORD DIVIDER ++103A0..103C3 ; valid # 4.1 OLD PERSIAN SIGN A..OLD PERSIAN SIGN HA ++103C4..103C7 ; disallowed # NA .. ++103C8..103CF ; valid # 4.1 OLD PERSIAN SIGN AURAMAZDAA..OLD PERSIAN SIGN BUUMISH ++103D0..103D5 ; valid ; ; NV8 # 4.1 OLD PERSIAN WORD DIVIDER..OLD PERSIAN NUMBER HUNDRED ++103D6..103FF ; disallowed # NA .. ++10400 ; mapped ; 10428 # 3.1 DESERET CAPITAL LETTER LONG I ++10401 ; mapped ; 10429 # 3.1 DESERET CAPITAL LETTER LONG E ++10402 ; mapped ; 1042A # 3.1 DESERET CAPITAL LETTER LONG A ++10403 ; mapped ; 1042B # 3.1 DESERET CAPITAL LETTER LONG AH ++10404 ; mapped ; 1042C # 3.1 DESERET CAPITAL LETTER LONG O ++10405 ; mapped ; 1042D # 3.1 DESERET CAPITAL LETTER LONG OO ++10406 ; mapped ; 1042E # 3.1 DESERET CAPITAL LETTER SHORT I ++10407 ; mapped ; 1042F # 3.1 DESERET CAPITAL LETTER SHORT E ++10408 ; mapped ; 10430 # 3.1 DESERET CAPITAL LETTER SHORT A ++10409 ; mapped ; 10431 # 3.1 DESERET CAPITAL LETTER SHORT AH ++1040A ; mapped ; 10432 # 3.1 DESERET CAPITAL LETTER SHORT O ++1040B ; mapped ; 10433 # 3.1 DESERET CAPITAL LETTER SHORT OO ++1040C ; mapped ; 10434 # 3.1 DESERET CAPITAL LETTER AY ++1040D ; mapped ; 10435 # 3.1 DESERET CAPITAL LETTER OW ++1040E ; mapped ; 10436 # 3.1 DESERET CAPITAL LETTER WU ++1040F ; mapped ; 10437 # 3.1 DESERET CAPITAL LETTER YEE ++10410 ; mapped ; 10438 # 3.1 DESERET CAPITAL LETTER H ++10411 ; mapped ; 10439 # 3.1 DESERET CAPITAL LETTER PEE ++10412 ; mapped ; 1043A # 3.1 DESERET CAPITAL LETTER BEE ++10413 ; mapped ; 1043B # 3.1 DESERET CAPITAL LETTER TEE ++10414 ; mapped ; 1043C # 3.1 DESERET CAPITAL LETTER DEE ++10415 ; mapped ; 1043D # 3.1 DESERET CAPITAL LETTER CHEE ++10416 ; mapped ; 1043E # 3.1 DESERET CAPITAL LETTER JEE ++10417 ; mapped ; 1043F # 3.1 DESERET CAPITAL LETTER KAY ++10418 ; mapped ; 10440 # 3.1 DESERET CAPITAL LETTER GAY ++10419 ; mapped ; 10441 # 3.1 DESERET CAPITAL LETTER EF ++1041A ; mapped ; 10442 # 3.1 DESERET CAPITAL LETTER VEE ++1041B ; mapped ; 10443 # 3.1 DESERET CAPITAL LETTER ETH ++1041C ; mapped ; 10444 # 3.1 DESERET CAPITAL LETTER THEE ++1041D ; mapped ; 10445 # 3.1 DESERET CAPITAL LETTER ES ++1041E ; mapped ; 10446 # 3.1 DESERET CAPITAL LETTER ZEE ++1041F ; mapped ; 10447 # 3.1 DESERET CAPITAL LETTER ESH ++10420 ; mapped ; 10448 # 3.1 DESERET CAPITAL LETTER ZHEE ++10421 ; mapped ; 10449 # 3.1 DESERET CAPITAL LETTER ER ++10422 ; mapped ; 1044A # 3.1 DESERET CAPITAL LETTER EL ++10423 ; mapped ; 1044B # 3.1 DESERET CAPITAL LETTER EM ++10424 ; mapped ; 1044C # 3.1 DESERET CAPITAL LETTER EN ++10425 ; mapped ; 1044D # 3.1 DESERET CAPITAL LETTER ENG ++10426 ; mapped ; 1044E # 4.0 DESERET CAPITAL LETTER OI ++10427 ; mapped ; 1044F # 4.0 DESERET CAPITAL LETTER EW ++10428..1044D ; valid # 3.1 DESERET SMALL LETTER LONG I..DESERET SMALL LETTER ENG ++1044E..1049D ; valid # 4.0 DESERET SMALL LETTER OI..OSMANYA LETTER OO ++1049E..1049F ; disallowed # NA .. ++104A0..104A9 ; valid # 4.0 OSMANYA DIGIT ZERO..OSMANYA DIGIT NINE ++104AA..104AF ; disallowed # NA .. ++104B0 ; mapped ; 104D8 # 9.0 OSAGE CAPITAL LETTER A ++104B1 ; mapped ; 104D9 # 9.0 OSAGE CAPITAL LETTER AI ++104B2 ; mapped ; 104DA # 9.0 OSAGE CAPITAL LETTER AIN ++104B3 ; mapped ; 104DB # 9.0 OSAGE CAPITAL LETTER AH ++104B4 ; mapped ; 104DC # 9.0 OSAGE CAPITAL LETTER BRA ++104B5 ; mapped ; 104DD # 9.0 OSAGE CAPITAL LETTER CHA ++104B6 ; mapped ; 104DE # 9.0 OSAGE CAPITAL LETTER EHCHA ++104B7 ; mapped ; 104DF # 9.0 OSAGE CAPITAL LETTER E ++104B8 ; mapped ; 104E0 # 9.0 OSAGE CAPITAL LETTER EIN ++104B9 ; mapped ; 104E1 # 9.0 OSAGE CAPITAL LETTER HA ++104BA ; mapped ; 104E2 # 9.0 OSAGE CAPITAL LETTER HYA ++104BB ; mapped ; 104E3 # 9.0 OSAGE CAPITAL LETTER I ++104BC ; mapped ; 104E4 # 9.0 OSAGE CAPITAL LETTER KA ++104BD ; mapped ; 104E5 # 9.0 OSAGE CAPITAL LETTER EHKA ++104BE ; mapped ; 104E6 # 9.0 OSAGE CAPITAL LETTER KYA ++104BF ; mapped ; 104E7 # 9.0 OSAGE CAPITAL LETTER LA ++104C0 ; mapped ; 104E8 # 9.0 OSAGE CAPITAL LETTER MA ++104C1 ; mapped ; 104E9 # 9.0 OSAGE CAPITAL LETTER NA ++104C2 ; mapped ; 104EA # 9.0 OSAGE CAPITAL LETTER O ++104C3 ; mapped ; 104EB # 9.0 OSAGE CAPITAL LETTER OIN ++104C4 ; mapped ; 104EC # 9.0 OSAGE CAPITAL LETTER PA ++104C5 ; mapped ; 104ED # 9.0 OSAGE CAPITAL LETTER EHPA ++104C6 ; mapped ; 104EE # 9.0 OSAGE CAPITAL LETTER SA ++104C7 ; mapped ; 104EF # 9.0 OSAGE CAPITAL LETTER SHA ++104C8 ; mapped ; 104F0 # 9.0 OSAGE CAPITAL LETTER TA ++104C9 ; mapped ; 104F1 # 9.0 OSAGE CAPITAL LETTER EHTA ++104CA ; mapped ; 104F2 # 9.0 OSAGE CAPITAL LETTER TSA ++104CB ; mapped ; 104F3 # 9.0 OSAGE CAPITAL LETTER EHTSA ++104CC ; mapped ; 104F4 # 9.0 OSAGE CAPITAL LETTER TSHA ++104CD ; mapped ; 104F5 # 9.0 OSAGE CAPITAL LETTER DHA ++104CE ; mapped ; 104F6 # 9.0 OSAGE CAPITAL LETTER U ++104CF ; mapped ; 104F7 # 9.0 OSAGE CAPITAL LETTER WA ++104D0 ; mapped ; 104F8 # 9.0 OSAGE CAPITAL LETTER KHA ++104D1 ; mapped ; 104F9 # 9.0 OSAGE CAPITAL LETTER GHA ++104D2 ; mapped ; 104FA # 9.0 OSAGE CAPITAL LETTER ZA ++104D3 ; mapped ; 104FB # 9.0 OSAGE CAPITAL LETTER ZHA ++104D4..104D7 ; disallowed # NA .. ++104D8..104FB ; valid # 9.0 OSAGE SMALL LETTER A..OSAGE SMALL LETTER ZHA ++104FC..104FF ; disallowed # NA .. ++10500..10527 ; valid # 7.0 ELBASAN LETTER A..ELBASAN LETTER KHE ++10528..1052F ; disallowed # NA .. ++10530..10563 ; valid # 7.0 CAUCASIAN ALBANIAN LETTER ALT..CAUCASIAN ALBANIAN LETTER KIW ++10564..1056E ; disallowed # NA .. ++1056F ; valid ; ; NV8 # 7.0 CAUCASIAN ALBANIAN CITATION MARK ++10570..105FF ; disallowed # NA .. ++10600..10736 ; valid # 7.0 LINEAR A SIGN AB001..LINEAR A SIGN A664 ++10737..1073F ; disallowed # NA .. ++10740..10755 ; valid # 7.0 LINEAR A SIGN A701 A..LINEAR A SIGN A732 JE ++10756..1075F ; disallowed # NA .. ++10760..10767 ; valid # 7.0 LINEAR A SIGN A800..LINEAR A SIGN A807 ++10768..107FF ; disallowed # NA .. ++10800..10805 ; valid # 4.0 CYPRIOT SYLLABLE A..CYPRIOT SYLLABLE JA ++10806..10807 ; disallowed # NA .. ++10808 ; valid # 4.0 CYPRIOT SYLLABLE JO ++10809 ; disallowed # NA ++1080A..10835 ; valid # 4.0 CYPRIOT SYLLABLE KA..CYPRIOT SYLLABLE WO ++10836 ; disallowed # NA ++10837..10838 ; valid # 4.0 CYPRIOT SYLLABLE XA..CYPRIOT SYLLABLE XE ++10839..1083B ; disallowed # NA .. ++1083C ; valid # 4.0 CYPRIOT SYLLABLE ZA ++1083D..1083E ; disallowed # NA .. ++1083F ; valid # 4.0 CYPRIOT SYLLABLE ZO ++10840..10855 ; valid # 5.2 IMPERIAL ARAMAIC LETTER ALEPH..IMPERIAL ARAMAIC LETTER TAW ++10856 ; disallowed # NA ++10857..1085F ; valid ; ; NV8 # 5.2 IMPERIAL ARAMAIC SECTION SIGN..IMPERIAL ARAMAIC NUMBER TEN THOUSAND ++10860..10876 ; valid # 7.0 PALMYRENE LETTER ALEPH..PALMYRENE LETTER TAW ++10877..1087F ; valid ; ; NV8 # 7.0 PALMYRENE LEFT-POINTING FLEURON..PALMYRENE NUMBER TWENTY ++10880..1089E ; valid # 7.0 NABATAEAN LETTER FINAL ALEPH..NABATAEAN LETTER TAW ++1089F..108A6 ; disallowed # NA .. ++108A7..108AF ; valid ; ; NV8 # 7.0 NABATAEAN NUMBER ONE..NABATAEAN NUMBER ONE HUNDRED ++108B0..108DF ; disallowed # NA .. ++108E0..108F2 ; valid # 8.0 HATRAN LETTER ALEPH..HATRAN LETTER QOPH ++108F3 ; disallowed # NA ++108F4..108F5 ; valid # 8.0 HATRAN LETTER SHIN..HATRAN LETTER TAW ++108F6..108FA ; disallowed # NA .. ++108FB..108FF ; valid ; ; NV8 # 8.0 HATRAN NUMBER ONE..HATRAN NUMBER ONE HUNDRED ++10900..10915 ; valid # 5.0 PHOENICIAN LETTER ALF..PHOENICIAN LETTER TAU ++10916..10919 ; valid ; ; NV8 # 5.0 PHOENICIAN NUMBER ONE..PHOENICIAN NUMBER ONE HUNDRED ++1091A..1091B ; valid ; ; NV8 # 5.2 PHOENICIAN NUMBER TWO..PHOENICIAN NUMBER THREE ++1091C..1091E ; disallowed # NA .. ++1091F ; valid ; ; NV8 # 5.0 PHOENICIAN WORD SEPARATOR ++10920..10939 ; valid # 5.1 LYDIAN LETTER A..LYDIAN LETTER C ++1093A..1093E ; disallowed # NA .. ++1093F ; valid ; ; NV8 # 5.1 LYDIAN TRIANGULAR MARK ++10940..1097F ; disallowed # NA .. ++10980..109B7 ; valid # 6.1 MEROITIC HIEROGLYPHIC LETTER A..MEROITIC CURSIVE LETTER DA ++109B8..109BB ; disallowed # NA .. ++109BC..109BD ; valid ; ; NV8 # 8.0 MEROITIC CURSIVE FRACTION ELEVEN TWELFTHS..MEROITIC CURSIVE FRACTION ONE HALF ++109BE..109BF ; valid # 6.1 MEROITIC CURSIVE LOGOGRAM RMT..MEROITIC CURSIVE LOGOGRAM IMN ++109C0..109CF ; valid ; ; NV8 # 8.0 MEROITIC CURSIVE NUMBER ONE..MEROITIC CURSIVE NUMBER SEVENTY ++109D0..109D1 ; disallowed # NA .. ++109D2..109FF ; valid ; ; NV8 # 8.0 MEROITIC CURSIVE NUMBER ONE HUNDRED..MEROITIC CURSIVE FRACTION TEN TWELFTHS ++10A00..10A03 ; valid # 4.1 KHAROSHTHI LETTER A..KHAROSHTHI VOWEL SIGN VOCALIC R ++10A04 ; disallowed # NA ++10A05..10A06 ; valid # 4.1 KHAROSHTHI VOWEL SIGN E..KHAROSHTHI VOWEL SIGN O ++10A07..10A0B ; disallowed # NA .. ++10A0C..10A13 ; valid # 4.1 KHAROSHTHI VOWEL LENGTH MARK..KHAROSHTHI LETTER GHA ++10A14 ; disallowed # NA ++10A15..10A17 ; valid # 4.1 KHAROSHTHI LETTER CA..KHAROSHTHI LETTER JA ++10A18 ; disallowed # NA ++10A19..10A33 ; valid # 4.1 KHAROSHTHI LETTER NYA..KHAROSHTHI LETTER TTTHA ++10A34..10A37 ; disallowed # NA .. ++10A38..10A3A ; valid # 4.1 KHAROSHTHI SIGN BAR ABOVE..KHAROSHTHI SIGN DOT BELOW ++10A3B..10A3E ; disallowed # NA .. ++10A3F ; valid # 4.1 KHAROSHTHI VIRAMA ++10A40..10A47 ; valid ; ; NV8 # 4.1 KHAROSHTHI DIGIT ONE..KHAROSHTHI NUMBER ONE THOUSAND ++10A48..10A4F ; disallowed # NA .. ++10A50..10A58 ; valid ; ; NV8 # 4.1 KHAROSHTHI PUNCTUATION DOT..KHAROSHTHI PUNCTUATION LINES ++10A59..10A5F ; disallowed # NA .. ++10A60..10A7C ; valid # 5.2 OLD SOUTH ARABIAN LETTER HE..OLD SOUTH ARABIAN LETTER THETH ++10A7D..10A7F ; valid ; ; NV8 # 5.2 OLD SOUTH ARABIAN NUMBER ONE..OLD SOUTH ARABIAN NUMERIC INDICATOR ++10A80..10A9C ; valid # 7.0 OLD NORTH ARABIAN LETTER HEH..OLD NORTH ARABIAN LETTER ZAH ++10A9D..10A9F ; valid ; ; NV8 # 7.0 OLD NORTH ARABIAN NUMBER ONE..OLD NORTH ARABIAN NUMBER TWENTY ++10AA0..10ABF ; disallowed # NA .. ++10AC0..10AC7 ; valid # 7.0 MANICHAEAN LETTER ALEPH..MANICHAEAN LETTER WAW ++10AC8 ; valid ; ; NV8 # 7.0 MANICHAEAN SIGN UD ++10AC9..10AE6 ; valid # 7.0 MANICHAEAN LETTER ZAYIN..MANICHAEAN ABBREVIATION MARK BELOW ++10AE7..10AEA ; disallowed # NA .. ++10AEB..10AF6 ; valid ; ; NV8 # 7.0 MANICHAEAN NUMBER ONE..MANICHAEAN PUNCTUATION LINE FILLER ++10AF7..10AFF ; disallowed # NA .. ++10B00..10B35 ; valid # 5.2 AVESTAN LETTER A..AVESTAN LETTER HE ++10B36..10B38 ; disallowed # NA .. ++10B39..10B3F ; valid ; ; NV8 # 5.2 AVESTAN ABBREVIATION MARK..LARGE ONE RING OVER TWO RINGS PUNCTUATION ++10B40..10B55 ; valid # 5.2 INSCRIPTIONAL PARTHIAN LETTER ALEPH..INSCRIPTIONAL PARTHIAN LETTER TAW ++10B56..10B57 ; disallowed # NA .. ++10B58..10B5F ; valid ; ; NV8 # 5.2 INSCRIPTIONAL PARTHIAN NUMBER ONE..INSCRIPTIONAL PARTHIAN NUMBER ONE THOUSAND ++10B60..10B72 ; valid # 5.2 INSCRIPTIONAL PAHLAVI LETTER ALEPH..INSCRIPTIONAL PAHLAVI LETTER TAW ++10B73..10B77 ; disallowed # NA .. ++10B78..10B7F ; valid ; ; NV8 # 5.2 INSCRIPTIONAL PAHLAVI NUMBER ONE..INSCRIPTIONAL PAHLAVI NUMBER ONE THOUSAND ++10B80..10B91 ; valid # 7.0 PSALTER PAHLAVI LETTER ALEPH..PSALTER PAHLAVI LETTER TAW ++10B92..10B98 ; disallowed # NA .. ++10B99..10B9C ; valid ; ; NV8 # 7.0 PSALTER PAHLAVI SECTION MARK..PSALTER PAHLAVI FOUR DOTS WITH DOT ++10B9D..10BA8 ; disallowed # NA .. ++10BA9..10BAF ; valid ; ; NV8 # 7.0 PSALTER PAHLAVI NUMBER ONE..PSALTER PAHLAVI NUMBER ONE HUNDRED ++10BB0..10BFF ; disallowed # NA .. ++10C00..10C48 ; valid # 5.2 OLD TURKIC LETTER ORKHON A..OLD TURKIC LETTER ORKHON BASH ++10C49..10C7F ; disallowed # NA .. ++10C80 ; mapped ; 10CC0 # 8.0 OLD HUNGARIAN CAPITAL LETTER A ++10C81 ; mapped ; 10CC1 # 8.0 OLD HUNGARIAN CAPITAL LETTER AA ++10C82 ; mapped ; 10CC2 # 8.0 OLD HUNGARIAN CAPITAL LETTER EB ++10C83 ; mapped ; 10CC3 # 8.0 OLD HUNGARIAN CAPITAL LETTER AMB ++10C84 ; mapped ; 10CC4 # 8.0 OLD HUNGARIAN CAPITAL LETTER EC ++10C85 ; mapped ; 10CC5 # 8.0 OLD HUNGARIAN CAPITAL LETTER ENC ++10C86 ; mapped ; 10CC6 # 8.0 OLD HUNGARIAN CAPITAL LETTER ECS ++10C87 ; mapped ; 10CC7 # 8.0 OLD HUNGARIAN CAPITAL LETTER ED ++10C88 ; mapped ; 10CC8 # 8.0 OLD HUNGARIAN CAPITAL LETTER AND ++10C89 ; mapped ; 10CC9 # 8.0 OLD HUNGARIAN CAPITAL LETTER E ++10C8A ; mapped ; 10CCA # 8.0 OLD HUNGARIAN CAPITAL LETTER CLOSE E ++10C8B ; mapped ; 10CCB # 8.0 OLD HUNGARIAN CAPITAL LETTER EE ++10C8C ; mapped ; 10CCC # 8.0 OLD HUNGARIAN CAPITAL LETTER EF ++10C8D ; mapped ; 10CCD # 8.0 OLD HUNGARIAN CAPITAL LETTER EG ++10C8E ; mapped ; 10CCE # 8.0 OLD HUNGARIAN CAPITAL LETTER EGY ++10C8F ; mapped ; 10CCF # 8.0 OLD HUNGARIAN CAPITAL LETTER EH ++10C90 ; mapped ; 10CD0 # 8.0 OLD HUNGARIAN CAPITAL LETTER I ++10C91 ; mapped ; 10CD1 # 8.0 OLD HUNGARIAN CAPITAL LETTER II ++10C92 ; mapped ; 10CD2 # 8.0 OLD HUNGARIAN CAPITAL LETTER EJ ++10C93 ; mapped ; 10CD3 # 8.0 OLD HUNGARIAN CAPITAL LETTER EK ++10C94 ; mapped ; 10CD4 # 8.0 OLD HUNGARIAN CAPITAL LETTER AK ++10C95 ; mapped ; 10CD5 # 8.0 OLD HUNGARIAN CAPITAL LETTER UNK ++10C96 ; mapped ; 10CD6 # 8.0 OLD HUNGARIAN CAPITAL LETTER EL ++10C97 ; mapped ; 10CD7 # 8.0 OLD HUNGARIAN CAPITAL LETTER ELY ++10C98 ; mapped ; 10CD8 # 8.0 OLD HUNGARIAN CAPITAL LETTER EM ++10C99 ; mapped ; 10CD9 # 8.0 OLD HUNGARIAN CAPITAL LETTER EN ++10C9A ; mapped ; 10CDA # 8.0 OLD HUNGARIAN CAPITAL LETTER ENY ++10C9B ; mapped ; 10CDB # 8.0 OLD HUNGARIAN CAPITAL LETTER O ++10C9C ; mapped ; 10CDC # 8.0 OLD HUNGARIAN CAPITAL LETTER OO ++10C9D ; mapped ; 10CDD # 8.0 OLD HUNGARIAN CAPITAL LETTER NIKOLSBURG OE ++10C9E ; mapped ; 10CDE # 8.0 OLD HUNGARIAN CAPITAL LETTER RUDIMENTA OE ++10C9F ; mapped ; 10CDF # 8.0 OLD HUNGARIAN CAPITAL LETTER OEE ++10CA0 ; mapped ; 10CE0 # 8.0 OLD HUNGARIAN CAPITAL LETTER EP ++10CA1 ; mapped ; 10CE1 # 8.0 OLD HUNGARIAN CAPITAL LETTER EMP ++10CA2 ; mapped ; 10CE2 # 8.0 OLD HUNGARIAN CAPITAL LETTER ER ++10CA3 ; mapped ; 10CE3 # 8.0 OLD HUNGARIAN CAPITAL LETTER SHORT ER ++10CA4 ; mapped ; 10CE4 # 8.0 OLD HUNGARIAN CAPITAL LETTER ES ++10CA5 ; mapped ; 10CE5 # 8.0 OLD HUNGARIAN CAPITAL LETTER ESZ ++10CA6 ; mapped ; 10CE6 # 8.0 OLD HUNGARIAN CAPITAL LETTER ET ++10CA7 ; mapped ; 10CE7 # 8.0 OLD HUNGARIAN CAPITAL LETTER ENT ++10CA8 ; mapped ; 10CE8 # 8.0 OLD HUNGARIAN CAPITAL LETTER ETY ++10CA9 ; mapped ; 10CE9 # 8.0 OLD HUNGARIAN CAPITAL LETTER ECH ++10CAA ; mapped ; 10CEA # 8.0 OLD HUNGARIAN CAPITAL LETTER U ++10CAB ; mapped ; 10CEB # 8.0 OLD HUNGARIAN CAPITAL LETTER UU ++10CAC ; mapped ; 10CEC # 8.0 OLD HUNGARIAN CAPITAL LETTER NIKOLSBURG UE ++10CAD ; mapped ; 10CED # 8.0 OLD HUNGARIAN CAPITAL LETTER RUDIMENTA UE ++10CAE ; mapped ; 10CEE # 8.0 OLD HUNGARIAN CAPITAL LETTER EV ++10CAF ; mapped ; 10CEF # 8.0 OLD HUNGARIAN CAPITAL LETTER EZ ++10CB0 ; mapped ; 10CF0 # 8.0 OLD HUNGARIAN CAPITAL LETTER EZS ++10CB1 ; mapped ; 10CF1 # 8.0 OLD HUNGARIAN CAPITAL LETTER ENT-SHAPED SIGN ++10CB2 ; mapped ; 10CF2 # 8.0 OLD HUNGARIAN CAPITAL LETTER US ++10CB3..10CBF ; disallowed # NA .. ++10CC0..10CF2 ; valid # 8.0 OLD HUNGARIAN SMALL LETTER A..OLD HUNGARIAN SMALL LETTER US ++10CF3..10CF9 ; disallowed # NA .. ++10CFA..10CFF ; valid ; ; NV8 # 8.0 OLD HUNGARIAN NUMBER ONE..OLD HUNGARIAN NUMBER ONE THOUSAND ++10D00..10E5F ; disallowed # NA .. ++10E60..10E7E ; valid ; ; NV8 # 5.2 RUMI DIGIT ONE..RUMI FRACTION TWO THIRDS ++10E7F..10FFF ; disallowed # NA .. ++11000..11046 ; valid # 6.0 BRAHMI SIGN CANDRABINDU..BRAHMI VIRAMA ++11047..1104D ; valid ; ; NV8 # 6.0 BRAHMI DANDA..BRAHMI PUNCTUATION LOTUS ++1104E..11051 ; disallowed # NA .. ++11052..11065 ; valid ; ; NV8 # 6.0 BRAHMI NUMBER ONE..BRAHMI NUMBER ONE THOUSAND ++11066..1106F ; valid # 6.0 BRAHMI DIGIT ZERO..BRAHMI DIGIT NINE ++11070..1107E ; disallowed # NA .. ++1107F ; valid # 7.0 BRAHMI NUMBER JOINER ++11080..110BA ; valid # 5.2 KAITHI SIGN CANDRABINDU..KAITHI SIGN NUKTA ++110BB..110BC ; valid ; ; NV8 # 5.2 KAITHI ABBREVIATION SIGN..KAITHI ENUMERATION SIGN ++110BD ; disallowed # 5.2 KAITHI NUMBER SIGN ++110BE..110C1 ; valid ; ; NV8 # 5.2 KAITHI SECTION MARK..KAITHI DOUBLE DANDA ++110C2..110CF ; disallowed # NA .. ++110D0..110E8 ; valid # 6.1 SORA SOMPENG LETTER SAH..SORA SOMPENG LETTER MAE ++110E9..110EF ; disallowed # NA .. ++110F0..110F9 ; valid # 6.1 SORA SOMPENG DIGIT ZERO..SORA SOMPENG DIGIT NINE ++110FA..110FF ; disallowed # NA .. ++11100..11134 ; valid # 6.1 CHAKMA SIGN CANDRABINDU..CHAKMA MAAYYAA ++11135 ; disallowed # NA ++11136..1113F ; valid # 6.1 CHAKMA DIGIT ZERO..CHAKMA DIGIT NINE ++11140..11143 ; valid ; ; NV8 # 6.1 CHAKMA SECTION MARK..CHAKMA QUESTION MARK ++11144..1114F ; disallowed # NA .. ++11150..11173 ; valid # 7.0 MAHAJANI LETTER A..MAHAJANI SIGN NUKTA ++11174..11175 ; valid ; ; NV8 # 7.0 MAHAJANI ABBREVIATION SIGN..MAHAJANI SECTION MARK ++11176 ; valid # 7.0 MAHAJANI LIGATURE SHRI ++11177..1117F ; disallowed # NA .. ++11180..111C4 ; valid # 6.1 SHARADA SIGN CANDRABINDU..SHARADA OM ++111C5..111C8 ; valid ; ; NV8 # 6.1 SHARADA DANDA..SHARADA SEPARATOR ++111C9 ; valid ; ; NV8 # 8.0 SHARADA SANDHI MARK ++111CA..111CC ; valid # 8.0 SHARADA SIGN NUKTA..SHARADA EXTRA SHORT VOWEL MARK ++111CD ; valid ; ; NV8 # 7.0 SHARADA SUTRA MARK ++111CE..111CF ; disallowed # NA .. ++111D0..111D9 ; valid # 6.1 SHARADA DIGIT ZERO..SHARADA DIGIT NINE ++111DA ; valid # 7.0 SHARADA EKAM ++111DB ; valid ; ; NV8 # 8.0 SHARADA SIGN SIDDHAM ++111DC ; valid # 8.0 SHARADA HEADSTROKE ++111DD..111DF ; valid ; ; NV8 # 8.0 SHARADA CONTINUATION SIGN..SHARADA SECTION MARK-2 ++111E0 ; disallowed # NA ++111E1..111F4 ; valid ; ; NV8 # 7.0 SINHALA ARCHAIC DIGIT ONE..SINHALA ARCHAIC NUMBER ONE THOUSAND ++111F5..111FF ; disallowed # NA .. ++11200..11211 ; valid # 7.0 KHOJKI LETTER A..KHOJKI LETTER JJA ++11212 ; disallowed # NA ++11213..11237 ; valid # 7.0 KHOJKI LETTER NYA..KHOJKI SIGN SHADDA ++11238..1123D ; valid ; ; NV8 # 7.0 KHOJKI DANDA..KHOJKI ABBREVIATION SIGN ++1123E ; valid # 9.0 KHOJKI SIGN SUKUN ++1123F..1127F ; disallowed # NA .. ++11280..11286 ; valid # 8.0 MULTANI LETTER A..MULTANI LETTER GA ++11287 ; disallowed # NA ++11288 ; valid # 8.0 MULTANI LETTER GHA ++11289 ; disallowed # NA ++1128A..1128D ; valid # 8.0 MULTANI LETTER CA..MULTANI LETTER JJA ++1128E ; disallowed # NA ++1128F..1129D ; valid # 8.0 MULTANI LETTER NYA..MULTANI LETTER BA ++1129E ; disallowed # NA ++1129F..112A8 ; valid # 8.0 MULTANI LETTER BHA..MULTANI LETTER RHA ++112A9 ; valid ; ; NV8 # 8.0 MULTANI SECTION MARK ++112AA..112AF ; disallowed # NA .. ++112B0..112EA ; valid # 7.0 KHUDAWADI LETTER A..KHUDAWADI SIGN VIRAMA ++112EB..112EF ; disallowed # NA .. ++112F0..112F9 ; valid # 7.0 KHUDAWADI DIGIT ZERO..KHUDAWADI DIGIT NINE ++112FA..112FF ; disallowed # NA .. ++11300 ; valid # 8.0 GRANTHA SIGN COMBINING ANUSVARA ABOVE ++11301..11303 ; valid # 7.0 GRANTHA SIGN CANDRABINDU..GRANTHA SIGN VISARGA ++11304 ; disallowed # NA ++11305..1130C ; valid # 7.0 GRANTHA LETTER A..GRANTHA LETTER VOCALIC L ++1130D..1130E ; disallowed # NA .. ++1130F..11310 ; valid # 7.0 GRANTHA LETTER EE..GRANTHA LETTER AI ++11311..11312 ; disallowed # NA .. ++11313..11328 ; valid # 7.0 GRANTHA LETTER OO..GRANTHA LETTER NA ++11329 ; disallowed # NA ++1132A..11330 ; valid # 7.0 GRANTHA LETTER PA..GRANTHA LETTER RA ++11331 ; disallowed # NA ++11332..11333 ; valid # 7.0 GRANTHA LETTER LA..GRANTHA LETTER LLA ++11334 ; disallowed # NA ++11335..11339 ; valid # 7.0 GRANTHA LETTER VA..GRANTHA LETTER HA ++1133A..1133B ; disallowed # NA .. ++1133C..11344 ; valid # 7.0 GRANTHA SIGN NUKTA..GRANTHA VOWEL SIGN VOCALIC RR ++11345..11346 ; disallowed # NA .. ++11347..11348 ; valid # 7.0 GRANTHA VOWEL SIGN EE..GRANTHA VOWEL SIGN AI ++11349..1134A ; disallowed # NA .. ++1134B..1134D ; valid # 7.0 GRANTHA VOWEL SIGN OO..GRANTHA SIGN VIRAMA ++1134E..1134F ; disallowed # NA .. ++11350 ; valid # 8.0 GRANTHA OM ++11351..11356 ; disallowed # NA .. ++11357 ; valid # 7.0 GRANTHA AU LENGTH MARK ++11358..1135C ; disallowed # NA .. ++1135D..11363 ; valid # 7.0 GRANTHA SIGN PLUTA..GRANTHA VOWEL SIGN VOCALIC LL ++11364..11365 ; disallowed # NA .. ++11366..1136C ; valid # 7.0 COMBINING GRANTHA DIGIT ZERO..COMBINING GRANTHA DIGIT SIX ++1136D..1136F ; disallowed # NA .. ++11370..11374 ; valid # 7.0 COMBINING GRANTHA LETTER A..COMBINING GRANTHA LETTER PA ++11375..113FF ; disallowed # NA .. ++11400..1144A ; valid # 9.0 NEWA LETTER A..NEWA SIDDHI ++1144B..1144F ; valid ; ; NV8 # 9.0 NEWA DANDA..NEWA ABBREVIATION SIGN ++11450..11459 ; valid # 9.0 NEWA DIGIT ZERO..NEWA DIGIT NINE ++1145A ; disallowed # NA ++1145B ; valid ; ; NV8 # 9.0 NEWA PLACEHOLDER MARK ++1145C ; disallowed # NA ++1145D ; valid ; ; NV8 # 9.0 NEWA INSERTION SIGN ++1145E..1147F ; disallowed # NA .. ++11480..114C5 ; valid # 7.0 TIRHUTA ANJI..TIRHUTA GVANG ++114C6 ; valid ; ; NV8 # 7.0 TIRHUTA ABBREVIATION SIGN ++114C7 ; valid # 7.0 TIRHUTA OM ++114C8..114CF ; disallowed # NA .. ++114D0..114D9 ; valid # 7.0 TIRHUTA DIGIT ZERO..TIRHUTA DIGIT NINE ++114DA..1157F ; disallowed # NA .. ++11580..115B5 ; valid # 7.0 SIDDHAM LETTER A..SIDDHAM VOWEL SIGN VOCALIC RR ++115B6..115B7 ; disallowed # NA .. ++115B8..115C0 ; valid # 7.0 SIDDHAM VOWEL SIGN E..SIDDHAM SIGN NUKTA ++115C1..115C9 ; valid ; ; NV8 # 7.0 SIDDHAM SIGN SIDDHAM..SIDDHAM END OF TEXT MARK ++115CA..115D7 ; valid ; ; NV8 # 8.0 SIDDHAM SECTION MARK WITH TRIDENT AND U-SHAPED ORNAMENTS..SIDDHAM SECTION MARK WITH CIRCLES AND FOUR ENCLOSURES ++115D8..115DD ; valid # 8.0 SIDDHAM LETTER THREE-CIRCLE ALTERNATE I..SIDDHAM VOWEL SIGN ALTERNATE UU ++115DE..115FF ; disallowed # NA .. ++11600..11640 ; valid # 7.0 MODI LETTER A..MODI SIGN ARDHACANDRA ++11641..11643 ; valid ; ; NV8 # 7.0 MODI DANDA..MODI ABBREVIATION SIGN ++11644 ; valid # 7.0 MODI SIGN HUVA ++11645..1164F ; disallowed # NA .. ++11650..11659 ; valid # 7.0 MODI DIGIT ZERO..MODI DIGIT NINE ++1165A..1165F ; disallowed # NA .. ++11660..1166C ; valid ; ; NV8 # 9.0 MONGOLIAN BIRGA WITH ORNAMENT..MONGOLIAN TURNED SWIRL BIRGA WITH DOUBLE ORNAMENT ++1166D..1167F ; disallowed # NA .. ++11680..116B7 ; valid # 6.1 TAKRI LETTER A..TAKRI SIGN NUKTA ++116B8..116BF ; disallowed # NA .. ++116C0..116C9 ; valid # 6.1 TAKRI DIGIT ZERO..TAKRI DIGIT NINE ++116CA..116FF ; disallowed # NA .. ++11700..11719 ; valid # 8.0 AHOM LETTER KA..AHOM LETTER JHA ++1171A..1171C ; disallowed # NA .. ++1171D..1172B ; valid # 8.0 AHOM CONSONANT SIGN MEDIAL LA..AHOM SIGN KILLER ++1172C..1172F ; disallowed # NA .. ++11730..11739 ; valid # 8.0 AHOM DIGIT ZERO..AHOM DIGIT NINE ++1173A..1173F ; valid ; ; NV8 # 8.0 AHOM NUMBER TEN..AHOM SYMBOL VI ++11740..1189F ; disallowed # NA .. ++118A0 ; mapped ; 118C0 # 7.0 WARANG CITI CAPITAL LETTER NGAA ++118A1 ; mapped ; 118C1 # 7.0 WARANG CITI CAPITAL LETTER A ++118A2 ; mapped ; 118C2 # 7.0 WARANG CITI CAPITAL LETTER WI ++118A3 ; mapped ; 118C3 # 7.0 WARANG CITI CAPITAL LETTER YU ++118A4 ; mapped ; 118C4 # 7.0 WARANG CITI CAPITAL LETTER YA ++118A5 ; mapped ; 118C5 # 7.0 WARANG CITI CAPITAL LETTER YO ++118A6 ; mapped ; 118C6 # 7.0 WARANG CITI CAPITAL LETTER II ++118A7 ; mapped ; 118C7 # 7.0 WARANG CITI CAPITAL LETTER UU ++118A8 ; mapped ; 118C8 # 7.0 WARANG CITI CAPITAL LETTER E ++118A9 ; mapped ; 118C9 # 7.0 WARANG CITI CAPITAL LETTER O ++118AA ; mapped ; 118CA # 7.0 WARANG CITI CAPITAL LETTER ANG ++118AB ; mapped ; 118CB # 7.0 WARANG CITI CAPITAL LETTER GA ++118AC ; mapped ; 118CC # 7.0 WARANG CITI CAPITAL LETTER KO ++118AD ; mapped ; 118CD # 7.0 WARANG CITI CAPITAL LETTER ENY ++118AE ; mapped ; 118CE # 7.0 WARANG CITI CAPITAL LETTER YUJ ++118AF ; mapped ; 118CF # 7.0 WARANG CITI CAPITAL LETTER UC ++118B0 ; mapped ; 118D0 # 7.0 WARANG CITI CAPITAL LETTER ENN ++118B1 ; mapped ; 118D1 # 7.0 WARANG CITI CAPITAL LETTER ODD ++118B2 ; mapped ; 118D2 # 7.0 WARANG CITI CAPITAL LETTER TTE ++118B3 ; mapped ; 118D3 # 7.0 WARANG CITI CAPITAL LETTER NUNG ++118B4 ; mapped ; 118D4 # 7.0 WARANG CITI CAPITAL LETTER DA ++118B5 ; mapped ; 118D5 # 7.0 WARANG CITI CAPITAL LETTER AT ++118B6 ; mapped ; 118D6 # 7.0 WARANG CITI CAPITAL LETTER AM ++118B7 ; mapped ; 118D7 # 7.0 WARANG CITI CAPITAL LETTER BU ++118B8 ; mapped ; 118D8 # 7.0 WARANG CITI CAPITAL LETTER PU ++118B9 ; mapped ; 118D9 # 7.0 WARANG CITI CAPITAL LETTER HIYO ++118BA ; mapped ; 118DA # 7.0 WARANG CITI CAPITAL LETTER HOLO ++118BB ; mapped ; 118DB # 7.0 WARANG CITI CAPITAL LETTER HORR ++118BC ; mapped ; 118DC # 7.0 WARANG CITI CAPITAL LETTER HAR ++118BD ; mapped ; 118DD # 7.0 WARANG CITI CAPITAL LETTER SSUU ++118BE ; mapped ; 118DE # 7.0 WARANG CITI CAPITAL LETTER SII ++118BF ; mapped ; 118DF # 7.0 WARANG CITI CAPITAL LETTER VIYO ++118C0..118E9 ; valid # 7.0 WARANG CITI SMALL LETTER NGAA..WARANG CITI DIGIT NINE ++118EA..118F2 ; valid ; ; NV8 # 7.0 WARANG CITI NUMBER TEN..WARANG CITI NUMBER NINETY ++118F3..118FE ; disallowed # NA .. ++118FF ; valid # 7.0 WARANG CITI OM ++11900..119FF ; disallowed # NA .. ++11A00..11A3E ; valid # 10.0 ZANABAZAR SQUARE LETTER A..ZANABAZAR SQUARE CLUSTER-FINAL LETTER VA ++11A3F..11A46 ; valid ; ; NV8 # 10.0 ZANABAZAR SQUARE INITIAL HEAD MARK..ZANABAZAR SQUARE CLOSING DOUBLE-LINED HEAD MARK ++11A47 ; valid # 10.0 ZANABAZAR SQUARE SUBJOINER ++11A48..11A4F ; disallowed # NA .. ++11A50..11A83 ; valid # 10.0 SOYOMBO LETTER A..SOYOMBO LETTER KSSA ++11A84..11A85 ; disallowed # NA .. ++11A86..11A99 ; valid # 10.0 SOYOMBO CLUSTER-INITIAL LETTER RA..SOYOMBO SUBJOINER ++11A9A..11A9C ; valid ; ; NV8 # 10.0 SOYOMBO MARK TSHEG..SOYOMBO MARK DOUBLE SHAD ++11A9D ; disallowed # NA ++11A9E..11AA2 ; valid ; ; NV8 # 10.0 SOYOMBO HEAD MARK WITH MOON AND SUN AND TRIPLE FLAME..SOYOMBO TERMINAL MARK-2 ++11AA3..11ABF ; disallowed # NA .. ++11AC0..11AF8 ; valid # 7.0 PAU CIN HAU LETTER PA..PAU CIN HAU GLOTTAL STOP FINAL ++11AF9..11BFF ; disallowed # NA .. ++11C00..11C08 ; valid # 9.0 BHAIKSUKI LETTER A..BHAIKSUKI LETTER VOCALIC L ++11C09 ; disallowed # NA ++11C0A..11C36 ; valid # 9.0 BHAIKSUKI LETTER E..BHAIKSUKI VOWEL SIGN VOCALIC L ++11C37 ; disallowed # NA ++11C38..11C40 ; valid # 9.0 BHAIKSUKI VOWEL SIGN E..BHAIKSUKI SIGN AVAGRAHA ++11C41..11C45 ; valid ; ; NV8 # 9.0 BHAIKSUKI DANDA..BHAIKSUKI GAP FILLER-2 ++11C46..11C4F ; disallowed # NA .. ++11C50..11C59 ; valid # 9.0 BHAIKSUKI DIGIT ZERO..BHAIKSUKI DIGIT NINE ++11C5A..11C6C ; valid ; ; NV8 # 9.0 BHAIKSUKI NUMBER ONE..BHAIKSUKI HUNDREDS UNIT MARK ++11C6D..11C6F ; disallowed # NA .. ++11C70..11C71 ; valid ; ; NV8 # 9.0 MARCHEN HEAD MARK..MARCHEN MARK SHAD ++11C72..11C8F ; valid # 9.0 MARCHEN LETTER KA..MARCHEN LETTER A ++11C90..11C91 ; disallowed # NA .. ++11C92..11CA7 ; valid # 9.0 MARCHEN SUBJOINED LETTER KA..MARCHEN SUBJOINED LETTER ZA ++11CA8 ; disallowed # NA ++11CA9..11CB6 ; valid # 9.0 MARCHEN SUBJOINED LETTER YA..MARCHEN SIGN CANDRABINDU ++11CB7..11CFF ; disallowed # NA .. ++11D00..11D06 ; valid # 10.0 MASARAM GONDI LETTER A..MASARAM GONDI LETTER E ++11D07 ; disallowed # NA ++11D08..11D09 ; valid # 10.0 MASARAM GONDI LETTER AI..MASARAM GONDI LETTER O ++11D0A ; disallowed # NA ++11D0B..11D36 ; valid # 10.0 MASARAM GONDI LETTER AU..MASARAM GONDI VOWEL SIGN VOCALIC R ++11D37..11D39 ; disallowed # NA .. ++11D3A ; valid # 10.0 MASARAM GONDI VOWEL SIGN E ++11D3B ; disallowed # NA ++11D3C..11D3D ; valid # 10.0 MASARAM GONDI VOWEL SIGN AI..MASARAM GONDI VOWEL SIGN O ++11D3E ; disallowed # NA ++11D3F..11D47 ; valid # 10.0 MASARAM GONDI VOWEL SIGN AU..MASARAM GONDI RA-KARA ++11D48..11D4F ; disallowed # NA .. ++11D50..11D59 ; valid # 10.0 MASARAM GONDI DIGIT ZERO..MASARAM GONDI DIGIT NINE ++11D5A..11FFF ; disallowed # NA .. ++12000..1236E ; valid # 5.0 CUNEIFORM SIGN A..CUNEIFORM SIGN ZUM ++1236F..12398 ; valid # 7.0 CUNEIFORM SIGN KAP ELAMITE..CUNEIFORM SIGN UM TIMES ME ++12399 ; valid # 8.0 CUNEIFORM SIGN U U ++1239A..123FF ; disallowed # NA .. ++12400..12462 ; valid ; ; NV8 # 5.0 CUNEIFORM NUMERIC SIGN TWO ASH..CUNEIFORM NUMERIC SIGN OLD ASSYRIAN ONE QUARTER ++12463..1246E ; valid ; ; NV8 # 7.0 CUNEIFORM NUMERIC SIGN ONE QUARTER GUR..CUNEIFORM NUMERIC SIGN NINE U VARIANT FORM ++1246F ; disallowed # NA ++12470..12473 ; valid ; ; NV8 # 5.0 CUNEIFORM PUNCTUATION SIGN OLD ASSYRIAN WORD DIVIDER..CUNEIFORM PUNCTUATION SIGN DIAGONAL TRICOLON ++12474 ; valid ; ; NV8 # 7.0 CUNEIFORM PUNCTUATION SIGN DIAGONAL QUADCOLON ++12475..1247F ; disallowed # NA .. ++12480..12543 ; valid # 8.0 CUNEIFORM SIGN AB TIMES NUN TENU..CUNEIFORM SIGN ZU5 TIMES THREE DISH TENU ++12544..12FFF ; disallowed # NA .. ++13000..1342E ; valid # 5.2 EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH AA032 ++1342F..143FF ; disallowed # NA .. ++14400..14646 ; valid # 8.0 ANATOLIAN HIEROGLYPH A001..ANATOLIAN HIEROGLYPH A530 ++14647..167FF ; disallowed # NA .. ++16800..16A38 ; valid # 6.0 BAMUM LETTER PHASE-A NGKUE MFON..BAMUM LETTER PHASE-F VUEQ ++16A39..16A3F ; disallowed # NA .. ++16A40..16A5E ; valid # 7.0 MRO LETTER TA..MRO LETTER TEK ++16A5F ; disallowed # NA ++16A60..16A69 ; valid # 7.0 MRO DIGIT ZERO..MRO DIGIT NINE ++16A6A..16A6D ; disallowed # NA .. ++16A6E..16A6F ; valid ; ; NV8 # 7.0 MRO DANDA..MRO DOUBLE DANDA ++16A70..16ACF ; disallowed # NA .. ++16AD0..16AED ; valid # 7.0 BASSA VAH LETTER ENNI..BASSA VAH LETTER I ++16AEE..16AEF ; disallowed # NA .. ++16AF0..16AF4 ; valid # 7.0 BASSA VAH COMBINING HIGH TONE..BASSA VAH COMBINING HIGH-LOW TONE ++16AF5 ; valid ; ; NV8 # 7.0 BASSA VAH FULL STOP ++16AF6..16AFF ; disallowed # NA .. ++16B00..16B36 ; valid # 7.0 PAHAWH HMONG VOWEL KEEB..PAHAWH HMONG MARK CIM TAUM ++16B37..16B3F ; valid ; ; NV8 # 7.0 PAHAWH HMONG SIGN VOS THOM..PAHAWH HMONG SIGN XYEEM FAIB ++16B40..16B43 ; valid # 7.0 PAHAWH HMONG SIGN VOS SEEV..PAHAWH HMONG SIGN IB YAM ++16B44..16B45 ; valid ; ; NV8 # 7.0 PAHAWH HMONG SIGN XAUS..PAHAWH HMONG SIGN CIM TSOV ROG ++16B46..16B4F ; disallowed # NA .. ++16B50..16B59 ; valid # 7.0 PAHAWH HMONG DIGIT ZERO..PAHAWH HMONG DIGIT NINE ++16B5A ; disallowed # NA ++16B5B..16B61 ; valid ; ; NV8 # 7.0 PAHAWH HMONG NUMBER TENS..PAHAWH HMONG NUMBER TRILLIONS ++16B62 ; disallowed # NA ++16B63..16B77 ; valid # 7.0 PAHAWH HMONG SIGN VOS LUB..PAHAWH HMONG SIGN CIM NRES TOS ++16B78..16B7C ; disallowed # NA .. ++16B7D..16B8F ; valid # 7.0 PAHAWH HMONG CLAN SIGN TSHEEJ..PAHAWH HMONG CLAN SIGN VWJ ++16B90..16EFF ; disallowed # NA .. ++16F00..16F44 ; valid # 6.1 MIAO LETTER PA..MIAO LETTER HHA ++16F45..16F4F ; disallowed # NA .. ++16F50..16F7E ; valid # 6.1 MIAO LETTER NASALIZATION..MIAO VOWEL SIGN NG ++16F7F..16F8E ; disallowed # NA .. ++16F8F..16F9F ; valid # 6.1 MIAO TONE RIGHT..MIAO LETTER REFORMED TONE-8 ++16FA0..16FDF ; disallowed # NA .. ++16FE0 ; valid # 9.0 TANGUT ITERATION MARK ++16FE1 ; valid # 10.0 NUSHU ITERATION MARK ++16FE2..16FFF ; disallowed # NA .. ++17000..187EC ; valid # 9.0 TANGUT IDEOGRAPH-17000..TANGUT IDEOGRAPH-187EC ++187ED..187FF ; disallowed # NA .. ++18800..18AF2 ; valid # 9.0 TANGUT COMPONENT-001..TANGUT COMPONENT-755 ++18AF3..1AFFF ; disallowed # NA .. ++1B000..1B001 ; valid # 6.0 KATAKANA LETTER ARCHAIC E..HIRAGANA LETTER ARCHAIC YE ++1B002..1B11E ; valid # 10.0 HENTAIGANA LETTER A-1..HENTAIGANA LETTER N-MU-MO-2 ++1B11F..1B16F ; disallowed # NA .. ++1B170..1B2FB ; valid # 10.0 NUSHU CHARACTER-1B170..NUSHU CHARACTER-1B2FB ++1B2FC..1BBFF ; disallowed # NA .. ++1BC00..1BC6A ; valid # 7.0 DUPLOYAN LETTER H..DUPLOYAN LETTER VOCALIC M ++1BC6B..1BC6F ; disallowed # NA .. ++1BC70..1BC7C ; valid # 7.0 DUPLOYAN AFFIX LEFT HORIZONTAL SECANT..DUPLOYAN AFFIX ATTACHED TANGENT HOOK ++1BC7D..1BC7F ; disallowed # NA .. ++1BC80..1BC88 ; valid # 7.0 DUPLOYAN AFFIX HIGH ACUTE..DUPLOYAN AFFIX HIGH VERTICAL ++1BC89..1BC8F ; disallowed # NA .. ++1BC90..1BC99 ; valid # 7.0 DUPLOYAN AFFIX LOW ACUTE..DUPLOYAN AFFIX LOW ARROW ++1BC9A..1BC9B ; disallowed # NA .. ++1BC9C ; valid ; ; NV8 # 7.0 DUPLOYAN SIGN O WITH CROSS ++1BC9D..1BC9E ; valid # 7.0 DUPLOYAN THICK LETTER SELECTOR..DUPLOYAN DOUBLE MARK ++1BC9F ; valid ; ; NV8 # 7.0 DUPLOYAN PUNCTUATION CHINOOK FULL STOP ++1BCA0..1BCA3 ; ignored # 7.0 SHORTHAND FORMAT LETTER OVERLAP..SHORTHAND FORMAT UP STEP ++1BCA4..1CFFF ; disallowed # NA .. ++1D000..1D0F5 ; valid ; ; NV8 # 3.1 BYZANTINE MUSICAL SYMBOL PSILI..BYZANTINE MUSICAL SYMBOL GORGON NEO KATO ++1D0F6..1D0FF ; disallowed # NA .. ++1D100..1D126 ; valid ; ; NV8 # 3.1 MUSICAL SYMBOL SINGLE BARLINE..MUSICAL SYMBOL DRUM CLEF-2 ++1D127..1D128 ; disallowed # NA .. ++1D129 ; valid ; ; NV8 # 5.1 MUSICAL SYMBOL MULTIPLE MEASURE REST ++1D12A..1D15D ; valid ; ; NV8 # 3.1 MUSICAL SYMBOL DOUBLE SHARP..MUSICAL SYMBOL WHOLE NOTE ++1D15E ; mapped ; 1D157 1D165 # 3.1 MUSICAL SYMBOL HALF NOTE ++1D15F ; mapped ; 1D158 1D165 # 3.1 MUSICAL SYMBOL QUARTER NOTE ++1D160 ; mapped ; 1D158 1D165 1D16E #3.1 MUSICAL SYMBOL EIGHTH NOTE ++1D161 ; mapped ; 1D158 1D165 1D16F #3.1 MUSICAL SYMBOL SIXTEENTH NOTE ++1D162 ; mapped ; 1D158 1D165 1D170 #3.1 MUSICAL SYMBOL THIRTY-SECOND NOTE ++1D163 ; mapped ; 1D158 1D165 1D171 #3.1 MUSICAL SYMBOL SIXTY-FOURTH NOTE ++1D164 ; mapped ; 1D158 1D165 1D172 #3.1 MUSICAL SYMBOL ONE HUNDRED TWENTY-EIGHTH NOTE ++1D165..1D172 ; valid ; ; NV8 # 3.1 MUSICAL SYMBOL COMBINING STEM..MUSICAL SYMBOL COMBINING FLAG-5 ++1D173..1D17A ; disallowed # 3.1 MUSICAL SYMBOL BEGIN BEAM..MUSICAL SYMBOL END PHRASE ++1D17B..1D1BA ; valid ; ; NV8 # 3.1 MUSICAL SYMBOL COMBINING ACCENT..MUSICAL SYMBOL SEMIBREVIS BLACK ++1D1BB ; mapped ; 1D1B9 1D165 # 3.1 MUSICAL SYMBOL MINIMA ++1D1BC ; mapped ; 1D1BA 1D165 # 3.1 MUSICAL SYMBOL MINIMA BLACK ++1D1BD ; mapped ; 1D1B9 1D165 1D16E #3.1 MUSICAL SYMBOL SEMIMINIMA WHITE ++1D1BE ; mapped ; 1D1BA 1D165 1D16E #3.1 MUSICAL SYMBOL SEMIMINIMA BLACK ++1D1BF ; mapped ; 1D1B9 1D165 1D16F #3.1 MUSICAL SYMBOL FUSA WHITE ++1D1C0 ; mapped ; 1D1BA 1D165 1D16F #3.1 MUSICAL SYMBOL FUSA BLACK ++1D1C1..1D1DD ; valid ; ; NV8 # 3.1 MUSICAL SYMBOL LONGA PERFECTA REST..MUSICAL SYMBOL PES SUBPUNCTIS ++1D1DE..1D1E8 ; valid ; ; NV8 # 8.0 MUSICAL SYMBOL KIEVAN C CLEF..MUSICAL SYMBOL KIEVAN FLAT SIGN ++1D1E9..1D1FF ; disallowed # NA .. ++1D200..1D245 ; valid ; ; NV8 # 4.1 GREEK VOCAL NOTATION SYMBOL-1..GREEK MUSICAL LEIMMA ++1D246..1D2FF ; disallowed # NA .. ++1D300..1D356 ; valid ; ; NV8 # 4.0 MONOGRAM FOR EARTH..TETRAGRAM FOR FOSTERING ++1D357..1D35F ; disallowed # NA .. ++1D360..1D371 ; valid ; ; NV8 # 5.0 COUNTING ROD UNIT DIGIT ONE..COUNTING ROD TENS DIGIT NINE ++1D372..1D3FF ; disallowed # NA .. ++1D400 ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD CAPITAL A ++1D401 ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD CAPITAL B ++1D402 ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD CAPITAL C ++1D403 ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD CAPITAL D ++1D404 ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD CAPITAL E ++1D405 ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD CAPITAL F ++1D406 ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD CAPITAL G ++1D407 ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD CAPITAL H ++1D408 ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD CAPITAL I ++1D409 ; mapped ; 006A # 3.1 MATHEMATICAL BOLD CAPITAL J ++1D40A ; mapped ; 006B # 3.1 MATHEMATICAL BOLD CAPITAL K ++1D40B ; mapped ; 006C # 3.1 MATHEMATICAL BOLD CAPITAL L ++1D40C ; mapped ; 006D # 3.1 MATHEMATICAL BOLD CAPITAL M ++1D40D ; mapped ; 006E # 3.1 MATHEMATICAL BOLD CAPITAL N ++1D40E ; mapped ; 006F # 3.1 MATHEMATICAL BOLD CAPITAL O ++1D40F ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD CAPITAL P ++1D410 ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD CAPITAL Q ++1D411 ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD CAPITAL R ++1D412 ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD CAPITAL S ++1D413 ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD CAPITAL T ++1D414 ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD CAPITAL U ++1D415 ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD CAPITAL V ++1D416 ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD CAPITAL W ++1D417 ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD CAPITAL X ++1D418 ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD CAPITAL Y ++1D419 ; mapped ; 007A # 3.1 MATHEMATICAL BOLD CAPITAL Z ++1D41A ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD SMALL A ++1D41B ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD SMALL B ++1D41C ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD SMALL C ++1D41D ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD SMALL D ++1D41E ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD SMALL E ++1D41F ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD SMALL F ++1D420 ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD SMALL G ++1D421 ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD SMALL H ++1D422 ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD SMALL I ++1D423 ; mapped ; 006A # 3.1 MATHEMATICAL BOLD SMALL J ++1D424 ; mapped ; 006B # 3.1 MATHEMATICAL BOLD SMALL K ++1D425 ; mapped ; 006C # 3.1 MATHEMATICAL BOLD SMALL L ++1D426 ; mapped ; 006D # 3.1 MATHEMATICAL BOLD SMALL M ++1D427 ; mapped ; 006E # 3.1 MATHEMATICAL BOLD SMALL N ++1D428 ; mapped ; 006F # 3.1 MATHEMATICAL BOLD SMALL O ++1D429 ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD SMALL P ++1D42A ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD SMALL Q ++1D42B ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD SMALL R ++1D42C ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD SMALL S ++1D42D ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD SMALL T ++1D42E ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD SMALL U ++1D42F ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD SMALL V ++1D430 ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD SMALL W ++1D431 ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD SMALL X ++1D432 ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD SMALL Y ++1D433 ; mapped ; 007A # 3.1 MATHEMATICAL BOLD SMALL Z ++1D434 ; mapped ; 0061 # 3.1 MATHEMATICAL ITALIC CAPITAL A ++1D435 ; mapped ; 0062 # 3.1 MATHEMATICAL ITALIC CAPITAL B ++1D436 ; mapped ; 0063 # 3.1 MATHEMATICAL ITALIC CAPITAL C ++1D437 ; mapped ; 0064 # 3.1 MATHEMATICAL ITALIC CAPITAL D ++1D438 ; mapped ; 0065 # 3.1 MATHEMATICAL ITALIC CAPITAL E ++1D439 ; mapped ; 0066 # 3.1 MATHEMATICAL ITALIC CAPITAL F ++1D43A ; mapped ; 0067 # 3.1 MATHEMATICAL ITALIC CAPITAL G ++1D43B ; mapped ; 0068 # 3.1 MATHEMATICAL ITALIC CAPITAL H ++1D43C ; mapped ; 0069 # 3.1 MATHEMATICAL ITALIC CAPITAL I ++1D43D ; mapped ; 006A # 3.1 MATHEMATICAL ITALIC CAPITAL J ++1D43E ; mapped ; 006B # 3.1 MATHEMATICAL ITALIC CAPITAL K ++1D43F ; mapped ; 006C # 3.1 MATHEMATICAL ITALIC CAPITAL L ++1D440 ; mapped ; 006D # 3.1 MATHEMATICAL ITALIC CAPITAL M ++1D441 ; mapped ; 006E # 3.1 MATHEMATICAL ITALIC CAPITAL N ++1D442 ; mapped ; 006F # 3.1 MATHEMATICAL ITALIC CAPITAL O ++1D443 ; mapped ; 0070 # 3.1 MATHEMATICAL ITALIC CAPITAL P ++1D444 ; mapped ; 0071 # 3.1 MATHEMATICAL ITALIC CAPITAL Q ++1D445 ; mapped ; 0072 # 3.1 MATHEMATICAL ITALIC CAPITAL R ++1D446 ; mapped ; 0073 # 3.1 MATHEMATICAL ITALIC CAPITAL S ++1D447 ; mapped ; 0074 # 3.1 MATHEMATICAL ITALIC CAPITAL T ++1D448 ; mapped ; 0075 # 3.1 MATHEMATICAL ITALIC CAPITAL U ++1D449 ; mapped ; 0076 # 3.1 MATHEMATICAL ITALIC CAPITAL V ++1D44A ; mapped ; 0077 # 3.1 MATHEMATICAL ITALIC CAPITAL W ++1D44B ; mapped ; 0078 # 3.1 MATHEMATICAL ITALIC CAPITAL X ++1D44C ; mapped ; 0079 # 3.1 MATHEMATICAL ITALIC CAPITAL Y ++1D44D ; mapped ; 007A # 3.1 MATHEMATICAL ITALIC CAPITAL Z ++1D44E ; mapped ; 0061 # 3.1 MATHEMATICAL ITALIC SMALL A ++1D44F ; mapped ; 0062 # 3.1 MATHEMATICAL ITALIC SMALL B ++1D450 ; mapped ; 0063 # 3.1 MATHEMATICAL ITALIC SMALL C ++1D451 ; mapped ; 0064 # 3.1 MATHEMATICAL ITALIC SMALL D ++1D452 ; mapped ; 0065 # 3.1 MATHEMATICAL ITALIC SMALL E ++1D453 ; mapped ; 0066 # 3.1 MATHEMATICAL ITALIC SMALL F ++1D454 ; mapped ; 0067 # 3.1 MATHEMATICAL ITALIC SMALL G ++1D455 ; disallowed # NA ++1D456 ; mapped ; 0069 # 3.1 MATHEMATICAL ITALIC SMALL I ++1D457 ; mapped ; 006A # 3.1 MATHEMATICAL ITALIC SMALL J ++1D458 ; mapped ; 006B # 3.1 MATHEMATICAL ITALIC SMALL K ++1D459 ; mapped ; 006C # 3.1 MATHEMATICAL ITALIC SMALL L ++1D45A ; mapped ; 006D # 3.1 MATHEMATICAL ITALIC SMALL M ++1D45B ; mapped ; 006E # 3.1 MATHEMATICAL ITALIC SMALL N ++1D45C ; mapped ; 006F # 3.1 MATHEMATICAL ITALIC SMALL O ++1D45D ; mapped ; 0070 # 3.1 MATHEMATICAL ITALIC SMALL P ++1D45E ; mapped ; 0071 # 3.1 MATHEMATICAL ITALIC SMALL Q ++1D45F ; mapped ; 0072 # 3.1 MATHEMATICAL ITALIC SMALL R ++1D460 ; mapped ; 0073 # 3.1 MATHEMATICAL ITALIC SMALL S ++1D461 ; mapped ; 0074 # 3.1 MATHEMATICAL ITALIC SMALL T ++1D462 ; mapped ; 0075 # 3.1 MATHEMATICAL ITALIC SMALL U ++1D463 ; mapped ; 0076 # 3.1 MATHEMATICAL ITALIC SMALL V ++1D464 ; mapped ; 0077 # 3.1 MATHEMATICAL ITALIC SMALL W ++1D465 ; mapped ; 0078 # 3.1 MATHEMATICAL ITALIC SMALL X ++1D466 ; mapped ; 0079 # 3.1 MATHEMATICAL ITALIC SMALL Y ++1D467 ; mapped ; 007A # 3.1 MATHEMATICAL ITALIC SMALL Z ++1D468 ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL A ++1D469 ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL B ++1D46A ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL C ++1D46B ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL D ++1D46C ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL E ++1D46D ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL F ++1D46E ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL G ++1D46F ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL H ++1D470 ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL I ++1D471 ; mapped ; 006A # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL J ++1D472 ; mapped ; 006B # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL K ++1D473 ; mapped ; 006C # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL L ++1D474 ; mapped ; 006D # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL M ++1D475 ; mapped ; 006E # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL N ++1D476 ; mapped ; 006F # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL O ++1D477 ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL P ++1D478 ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL Q ++1D479 ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL R ++1D47A ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL S ++1D47B ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL T ++1D47C ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL U ++1D47D ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL V ++1D47E ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL W ++1D47F ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL X ++1D480 ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL Y ++1D481 ; mapped ; 007A # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL Z ++1D482 ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD ITALIC SMALL A ++1D483 ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD ITALIC SMALL B ++1D484 ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD ITALIC SMALL C ++1D485 ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD ITALIC SMALL D ++1D486 ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD ITALIC SMALL E ++1D487 ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD ITALIC SMALL F ++1D488 ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD ITALIC SMALL G ++1D489 ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD ITALIC SMALL H ++1D48A ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD ITALIC SMALL I ++1D48B ; mapped ; 006A # 3.1 MATHEMATICAL BOLD ITALIC SMALL J ++1D48C ; mapped ; 006B # 3.1 MATHEMATICAL BOLD ITALIC SMALL K ++1D48D ; mapped ; 006C # 3.1 MATHEMATICAL BOLD ITALIC SMALL L ++1D48E ; mapped ; 006D # 3.1 MATHEMATICAL BOLD ITALIC SMALL M ++1D48F ; mapped ; 006E # 3.1 MATHEMATICAL BOLD ITALIC SMALL N ++1D490 ; mapped ; 006F # 3.1 MATHEMATICAL BOLD ITALIC SMALL O ++1D491 ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD ITALIC SMALL P ++1D492 ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD ITALIC SMALL Q ++1D493 ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD ITALIC SMALL R ++1D494 ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD ITALIC SMALL S ++1D495 ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD ITALIC SMALL T ++1D496 ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD ITALIC SMALL U ++1D497 ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD ITALIC SMALL V ++1D498 ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD ITALIC SMALL W ++1D499 ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD ITALIC SMALL X ++1D49A ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD ITALIC SMALL Y ++1D49B ; mapped ; 007A # 3.1 MATHEMATICAL BOLD ITALIC SMALL Z ++1D49C ; mapped ; 0061 # 3.1 MATHEMATICAL SCRIPT CAPITAL A ++1D49D ; disallowed # NA ++1D49E ; mapped ; 0063 # 3.1 MATHEMATICAL SCRIPT CAPITAL C ++1D49F ; mapped ; 0064 # 3.1 MATHEMATICAL SCRIPT CAPITAL D ++1D4A0..1D4A1 ; disallowed # NA .. ++1D4A2 ; mapped ; 0067 # 3.1 MATHEMATICAL SCRIPT CAPITAL G ++1D4A3..1D4A4 ; disallowed # NA .. ++1D4A5 ; mapped ; 006A # 3.1 MATHEMATICAL SCRIPT CAPITAL J ++1D4A6 ; mapped ; 006B # 3.1 MATHEMATICAL SCRIPT CAPITAL K ++1D4A7..1D4A8 ; disallowed # NA .. ++1D4A9 ; mapped ; 006E # 3.1 MATHEMATICAL SCRIPT CAPITAL N ++1D4AA ; mapped ; 006F # 3.1 MATHEMATICAL SCRIPT CAPITAL O ++1D4AB ; mapped ; 0070 # 3.1 MATHEMATICAL SCRIPT CAPITAL P ++1D4AC ; mapped ; 0071 # 3.1 MATHEMATICAL SCRIPT CAPITAL Q ++1D4AD ; disallowed # NA ++1D4AE ; mapped ; 0073 # 3.1 MATHEMATICAL SCRIPT CAPITAL S ++1D4AF ; mapped ; 0074 # 3.1 MATHEMATICAL SCRIPT CAPITAL T ++1D4B0 ; mapped ; 0075 # 3.1 MATHEMATICAL SCRIPT CAPITAL U ++1D4B1 ; mapped ; 0076 # 3.1 MATHEMATICAL SCRIPT CAPITAL V ++1D4B2 ; mapped ; 0077 # 3.1 MATHEMATICAL SCRIPT CAPITAL W ++1D4B3 ; mapped ; 0078 # 3.1 MATHEMATICAL SCRIPT CAPITAL X ++1D4B4 ; mapped ; 0079 # 3.1 MATHEMATICAL SCRIPT CAPITAL Y ++1D4B5 ; mapped ; 007A # 3.1 MATHEMATICAL SCRIPT CAPITAL Z ++1D4B6 ; mapped ; 0061 # 3.1 MATHEMATICAL SCRIPT SMALL A ++1D4B7 ; mapped ; 0062 # 3.1 MATHEMATICAL SCRIPT SMALL B ++1D4B8 ; mapped ; 0063 # 3.1 MATHEMATICAL SCRIPT SMALL C ++1D4B9 ; mapped ; 0064 # 3.1 MATHEMATICAL SCRIPT SMALL D ++1D4BA ; disallowed # NA ++1D4BB ; mapped ; 0066 # 3.1 MATHEMATICAL SCRIPT SMALL F ++1D4BC ; disallowed # NA ++1D4BD ; mapped ; 0068 # 3.1 MATHEMATICAL SCRIPT SMALL H ++1D4BE ; mapped ; 0069 # 3.1 MATHEMATICAL SCRIPT SMALL I ++1D4BF ; mapped ; 006A # 3.1 MATHEMATICAL SCRIPT SMALL J ++1D4C0 ; mapped ; 006B # 3.1 MATHEMATICAL SCRIPT SMALL K ++1D4C1 ; mapped ; 006C # 4.0 MATHEMATICAL SCRIPT SMALL L ++1D4C2 ; mapped ; 006D # 3.1 MATHEMATICAL SCRIPT SMALL M ++1D4C3 ; mapped ; 006E # 3.1 MATHEMATICAL SCRIPT SMALL N ++1D4C4 ; disallowed # NA ++1D4C5 ; mapped ; 0070 # 3.1 MATHEMATICAL SCRIPT SMALL P ++1D4C6 ; mapped ; 0071 # 3.1 MATHEMATICAL SCRIPT SMALL Q ++1D4C7 ; mapped ; 0072 # 3.1 MATHEMATICAL SCRIPT SMALL R ++1D4C8 ; mapped ; 0073 # 3.1 MATHEMATICAL SCRIPT SMALL S ++1D4C9 ; mapped ; 0074 # 3.1 MATHEMATICAL SCRIPT SMALL T ++1D4CA ; mapped ; 0075 # 3.1 MATHEMATICAL SCRIPT SMALL U ++1D4CB ; mapped ; 0076 # 3.1 MATHEMATICAL SCRIPT SMALL V ++1D4CC ; mapped ; 0077 # 3.1 MATHEMATICAL SCRIPT SMALL W ++1D4CD ; mapped ; 0078 # 3.1 MATHEMATICAL SCRIPT SMALL X ++1D4CE ; mapped ; 0079 # 3.1 MATHEMATICAL SCRIPT SMALL Y ++1D4CF ; mapped ; 007A # 3.1 MATHEMATICAL SCRIPT SMALL Z ++1D4D0 ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL A ++1D4D1 ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL B ++1D4D2 ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL C ++1D4D3 ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL D ++1D4D4 ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL E ++1D4D5 ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL F ++1D4D6 ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL G ++1D4D7 ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL H ++1D4D8 ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL I ++1D4D9 ; mapped ; 006A # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL J ++1D4DA ; mapped ; 006B # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL K ++1D4DB ; mapped ; 006C # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL L ++1D4DC ; mapped ; 006D # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL M ++1D4DD ; mapped ; 006E # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL N ++1D4DE ; mapped ; 006F # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL O ++1D4DF ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL P ++1D4E0 ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL Q ++1D4E1 ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL R ++1D4E2 ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL S ++1D4E3 ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL T ++1D4E4 ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL U ++1D4E5 ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL V ++1D4E6 ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL W ++1D4E7 ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL X ++1D4E8 ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL Y ++1D4E9 ; mapped ; 007A # 3.1 MATHEMATICAL BOLD SCRIPT CAPITAL Z ++1D4EA ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL A ++1D4EB ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL B ++1D4EC ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL C ++1D4ED ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL D ++1D4EE ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL E ++1D4EF ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL F ++1D4F0 ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL G ++1D4F1 ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL H ++1D4F2 ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL I ++1D4F3 ; mapped ; 006A # 3.1 MATHEMATICAL BOLD SCRIPT SMALL J ++1D4F4 ; mapped ; 006B # 3.1 MATHEMATICAL BOLD SCRIPT SMALL K ++1D4F5 ; mapped ; 006C # 3.1 MATHEMATICAL BOLD SCRIPT SMALL L ++1D4F6 ; mapped ; 006D # 3.1 MATHEMATICAL BOLD SCRIPT SMALL M ++1D4F7 ; mapped ; 006E # 3.1 MATHEMATICAL BOLD SCRIPT SMALL N ++1D4F8 ; mapped ; 006F # 3.1 MATHEMATICAL BOLD SCRIPT SMALL O ++1D4F9 ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL P ++1D4FA ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL Q ++1D4FB ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL R ++1D4FC ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL S ++1D4FD ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL T ++1D4FE ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL U ++1D4FF ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL V ++1D500 ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL W ++1D501 ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL X ++1D502 ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD SCRIPT SMALL Y ++1D503 ; mapped ; 007A # 3.1 MATHEMATICAL BOLD SCRIPT SMALL Z ++1D504 ; mapped ; 0061 # 3.1 MATHEMATICAL FRAKTUR CAPITAL A ++1D505 ; mapped ; 0062 # 3.1 MATHEMATICAL FRAKTUR CAPITAL B ++1D506 ; disallowed # NA ++1D507 ; mapped ; 0064 # 3.1 MATHEMATICAL FRAKTUR CAPITAL D ++1D508 ; mapped ; 0065 # 3.1 MATHEMATICAL FRAKTUR CAPITAL E ++1D509 ; mapped ; 0066 # 3.1 MATHEMATICAL FRAKTUR CAPITAL F ++1D50A ; mapped ; 0067 # 3.1 MATHEMATICAL FRAKTUR CAPITAL G ++1D50B..1D50C ; disallowed # NA .. ++1D50D ; mapped ; 006A # 3.1 MATHEMATICAL FRAKTUR CAPITAL J ++1D50E ; mapped ; 006B # 3.1 MATHEMATICAL FRAKTUR CAPITAL K ++1D50F ; mapped ; 006C # 3.1 MATHEMATICAL FRAKTUR CAPITAL L ++1D510 ; mapped ; 006D # 3.1 MATHEMATICAL FRAKTUR CAPITAL M ++1D511 ; mapped ; 006E # 3.1 MATHEMATICAL FRAKTUR CAPITAL N ++1D512 ; mapped ; 006F # 3.1 MATHEMATICAL FRAKTUR CAPITAL O ++1D513 ; mapped ; 0070 # 3.1 MATHEMATICAL FRAKTUR CAPITAL P ++1D514 ; mapped ; 0071 # 3.1 MATHEMATICAL FRAKTUR CAPITAL Q ++1D515 ; disallowed # NA ++1D516 ; mapped ; 0073 # 3.1 MATHEMATICAL FRAKTUR CAPITAL S ++1D517 ; mapped ; 0074 # 3.1 MATHEMATICAL FRAKTUR CAPITAL T ++1D518 ; mapped ; 0075 # 3.1 MATHEMATICAL FRAKTUR CAPITAL U ++1D519 ; mapped ; 0076 # 3.1 MATHEMATICAL FRAKTUR CAPITAL V ++1D51A ; mapped ; 0077 # 3.1 MATHEMATICAL FRAKTUR CAPITAL W ++1D51B ; mapped ; 0078 # 3.1 MATHEMATICAL FRAKTUR CAPITAL X ++1D51C ; mapped ; 0079 # 3.1 MATHEMATICAL FRAKTUR CAPITAL Y ++1D51D ; disallowed # NA ++1D51E ; mapped ; 0061 # 3.1 MATHEMATICAL FRAKTUR SMALL A ++1D51F ; mapped ; 0062 # 3.1 MATHEMATICAL FRAKTUR SMALL B ++1D520 ; mapped ; 0063 # 3.1 MATHEMATICAL FRAKTUR SMALL C ++1D521 ; mapped ; 0064 # 3.1 MATHEMATICAL FRAKTUR SMALL D ++1D522 ; mapped ; 0065 # 3.1 MATHEMATICAL FRAKTUR SMALL E ++1D523 ; mapped ; 0066 # 3.1 MATHEMATICAL FRAKTUR SMALL F ++1D524 ; mapped ; 0067 # 3.1 MATHEMATICAL FRAKTUR SMALL G ++1D525 ; mapped ; 0068 # 3.1 MATHEMATICAL FRAKTUR SMALL H ++1D526 ; mapped ; 0069 # 3.1 MATHEMATICAL FRAKTUR SMALL I ++1D527 ; mapped ; 006A # 3.1 MATHEMATICAL FRAKTUR SMALL J ++1D528 ; mapped ; 006B # 3.1 MATHEMATICAL FRAKTUR SMALL K ++1D529 ; mapped ; 006C # 3.1 MATHEMATICAL FRAKTUR SMALL L ++1D52A ; mapped ; 006D # 3.1 MATHEMATICAL FRAKTUR SMALL M ++1D52B ; mapped ; 006E # 3.1 MATHEMATICAL FRAKTUR SMALL N ++1D52C ; mapped ; 006F # 3.1 MATHEMATICAL FRAKTUR SMALL O ++1D52D ; mapped ; 0070 # 3.1 MATHEMATICAL FRAKTUR SMALL P ++1D52E ; mapped ; 0071 # 3.1 MATHEMATICAL FRAKTUR SMALL Q ++1D52F ; mapped ; 0072 # 3.1 MATHEMATICAL FRAKTUR SMALL R ++1D530 ; mapped ; 0073 # 3.1 MATHEMATICAL FRAKTUR SMALL S ++1D531 ; mapped ; 0074 # 3.1 MATHEMATICAL FRAKTUR SMALL T ++1D532 ; mapped ; 0075 # 3.1 MATHEMATICAL FRAKTUR SMALL U ++1D533 ; mapped ; 0076 # 3.1 MATHEMATICAL FRAKTUR SMALL V ++1D534 ; mapped ; 0077 # 3.1 MATHEMATICAL FRAKTUR SMALL W ++1D535 ; mapped ; 0078 # 3.1 MATHEMATICAL FRAKTUR SMALL X ++1D536 ; mapped ; 0079 # 3.1 MATHEMATICAL FRAKTUR SMALL Y ++1D537 ; mapped ; 007A # 3.1 MATHEMATICAL FRAKTUR SMALL Z ++1D538 ; mapped ; 0061 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL A ++1D539 ; mapped ; 0062 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL B ++1D53A ; disallowed # NA ++1D53B ; mapped ; 0064 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL D ++1D53C ; mapped ; 0065 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL E ++1D53D ; mapped ; 0066 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL F ++1D53E ; mapped ; 0067 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL G ++1D53F ; disallowed # NA ++1D540 ; mapped ; 0069 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL I ++1D541 ; mapped ; 006A # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL J ++1D542 ; mapped ; 006B # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL K ++1D543 ; mapped ; 006C # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL L ++1D544 ; mapped ; 006D # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL M ++1D545 ; disallowed # NA ++1D546 ; mapped ; 006F # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL O ++1D547..1D549 ; disallowed # NA .. ++1D54A ; mapped ; 0073 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL S ++1D54B ; mapped ; 0074 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL T ++1D54C ; mapped ; 0075 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL U ++1D54D ; mapped ; 0076 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL V ++1D54E ; mapped ; 0077 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL W ++1D54F ; mapped ; 0078 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL X ++1D550 ; mapped ; 0079 # 3.1 MATHEMATICAL DOUBLE-STRUCK CAPITAL Y ++1D551 ; disallowed # NA ++1D552 ; mapped ; 0061 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL A ++1D553 ; mapped ; 0062 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL B ++1D554 ; mapped ; 0063 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL C ++1D555 ; mapped ; 0064 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL D ++1D556 ; mapped ; 0065 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL E ++1D557 ; mapped ; 0066 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL F ++1D558 ; mapped ; 0067 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL G ++1D559 ; mapped ; 0068 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL H ++1D55A ; mapped ; 0069 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL I ++1D55B ; mapped ; 006A # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL J ++1D55C ; mapped ; 006B # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL K ++1D55D ; mapped ; 006C # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL L ++1D55E ; mapped ; 006D # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL M ++1D55F ; mapped ; 006E # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL N ++1D560 ; mapped ; 006F # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL O ++1D561 ; mapped ; 0070 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL P ++1D562 ; mapped ; 0071 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL Q ++1D563 ; mapped ; 0072 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL R ++1D564 ; mapped ; 0073 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL S ++1D565 ; mapped ; 0074 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL T ++1D566 ; mapped ; 0075 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL U ++1D567 ; mapped ; 0076 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL V ++1D568 ; mapped ; 0077 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL W ++1D569 ; mapped ; 0078 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL X ++1D56A ; mapped ; 0079 # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL Y ++1D56B ; mapped ; 007A # 3.1 MATHEMATICAL DOUBLE-STRUCK SMALL Z ++1D56C ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL A ++1D56D ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL B ++1D56E ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL C ++1D56F ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL D ++1D570 ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL E ++1D571 ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL F ++1D572 ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL G ++1D573 ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL H ++1D574 ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL I ++1D575 ; mapped ; 006A # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL J ++1D576 ; mapped ; 006B # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL K ++1D577 ; mapped ; 006C # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL L ++1D578 ; mapped ; 006D # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL M ++1D579 ; mapped ; 006E # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL N ++1D57A ; mapped ; 006F # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL O ++1D57B ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL P ++1D57C ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL Q ++1D57D ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL R ++1D57E ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL S ++1D57F ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL T ++1D580 ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL U ++1D581 ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL V ++1D582 ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL W ++1D583 ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL X ++1D584 ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL Y ++1D585 ; mapped ; 007A # 3.1 MATHEMATICAL BOLD FRAKTUR CAPITAL Z ++1D586 ; mapped ; 0061 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL A ++1D587 ; mapped ; 0062 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL B ++1D588 ; mapped ; 0063 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL C ++1D589 ; mapped ; 0064 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL D ++1D58A ; mapped ; 0065 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL E ++1D58B ; mapped ; 0066 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL F ++1D58C ; mapped ; 0067 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL G ++1D58D ; mapped ; 0068 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL H ++1D58E ; mapped ; 0069 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL I ++1D58F ; mapped ; 006A # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL J ++1D590 ; mapped ; 006B # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL K ++1D591 ; mapped ; 006C # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL L ++1D592 ; mapped ; 006D # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL M ++1D593 ; mapped ; 006E # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL N ++1D594 ; mapped ; 006F # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL O ++1D595 ; mapped ; 0070 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL P ++1D596 ; mapped ; 0071 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL Q ++1D597 ; mapped ; 0072 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL R ++1D598 ; mapped ; 0073 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL S ++1D599 ; mapped ; 0074 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL T ++1D59A ; mapped ; 0075 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL U ++1D59B ; mapped ; 0076 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL V ++1D59C ; mapped ; 0077 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL W ++1D59D ; mapped ; 0078 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL X ++1D59E ; mapped ; 0079 # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL Y ++1D59F ; mapped ; 007A # 3.1 MATHEMATICAL BOLD FRAKTUR SMALL Z ++1D5A0 ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL A ++1D5A1 ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL B ++1D5A2 ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL C ++1D5A3 ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL D ++1D5A4 ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL E ++1D5A5 ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL F ++1D5A6 ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL G ++1D5A7 ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL H ++1D5A8 ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL I ++1D5A9 ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF CAPITAL J ++1D5AA ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF CAPITAL K ++1D5AB ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF CAPITAL L ++1D5AC ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF CAPITAL M ++1D5AD ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF CAPITAL N ++1D5AE ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF CAPITAL O ++1D5AF ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL P ++1D5B0 ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL Q ++1D5B1 ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL R ++1D5B2 ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL S ++1D5B3 ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL T ++1D5B4 ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL U ++1D5B5 ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL V ++1D5B6 ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL W ++1D5B7 ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL X ++1D5B8 ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF CAPITAL Y ++1D5B9 ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF CAPITAL Z ++1D5BA ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF SMALL A ++1D5BB ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF SMALL B ++1D5BC ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF SMALL C ++1D5BD ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF SMALL D ++1D5BE ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF SMALL E ++1D5BF ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF SMALL F ++1D5C0 ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF SMALL G ++1D5C1 ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF SMALL H ++1D5C2 ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF SMALL I ++1D5C3 ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF SMALL J ++1D5C4 ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF SMALL K ++1D5C5 ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF SMALL L ++1D5C6 ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF SMALL M ++1D5C7 ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF SMALL N ++1D5C8 ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF SMALL O ++1D5C9 ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF SMALL P ++1D5CA ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF SMALL Q ++1D5CB ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF SMALL R ++1D5CC ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF SMALL S ++1D5CD ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF SMALL T ++1D5CE ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF SMALL U ++1D5CF ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF SMALL V ++1D5D0 ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF SMALL W ++1D5D1 ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF SMALL X ++1D5D2 ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF SMALL Y ++1D5D3 ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF SMALL Z ++1D5D4 ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL A ++1D5D5 ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL B ++1D5D6 ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL C ++1D5D7 ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL D ++1D5D8 ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL E ++1D5D9 ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL F ++1D5DA ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL G ++1D5DB ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL H ++1D5DC ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL I ++1D5DD ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL J ++1D5DE ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL K ++1D5DF ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL L ++1D5E0 ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL M ++1D5E1 ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL N ++1D5E2 ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL O ++1D5E3 ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL P ++1D5E4 ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL Q ++1D5E5 ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL R ++1D5E6 ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL S ++1D5E7 ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL T ++1D5E8 ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL U ++1D5E9 ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL V ++1D5EA ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL W ++1D5EB ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL X ++1D5EC ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL Y ++1D5ED ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL Z ++1D5EE ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL A ++1D5EF ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL B ++1D5F0 ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL C ++1D5F1 ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL D ++1D5F2 ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL E ++1D5F3 ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL F ++1D5F4 ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL G ++1D5F5 ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL H ++1D5F6 ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL I ++1D5F7 ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL J ++1D5F8 ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL K ++1D5F9 ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL L ++1D5FA ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL M ++1D5FB ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL N ++1D5FC ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL O ++1D5FD ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL P ++1D5FE ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL Q ++1D5FF ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL R ++1D600 ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL S ++1D601 ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL T ++1D602 ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL U ++1D603 ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL V ++1D604 ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL W ++1D605 ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL X ++1D606 ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL Y ++1D607 ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL Z ++1D608 ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL A ++1D609 ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL B ++1D60A ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL C ++1D60B ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL D ++1D60C ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL E ++1D60D ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL F ++1D60E ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL G ++1D60F ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL H ++1D610 ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL I ++1D611 ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL J ++1D612 ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL K ++1D613 ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL L ++1D614 ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL M ++1D615 ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL N ++1D616 ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL O ++1D617 ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL P ++1D618 ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL Q ++1D619 ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL R ++1D61A ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL S ++1D61B ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL T ++1D61C ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL U ++1D61D ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL V ++1D61E ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL W ++1D61F ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL X ++1D620 ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL Y ++1D621 ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF ITALIC CAPITAL Z ++1D622 ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL A ++1D623 ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL B ++1D624 ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL C ++1D625 ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL D ++1D626 ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL E ++1D627 ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL F ++1D628 ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL G ++1D629 ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL H ++1D62A ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL I ++1D62B ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL J ++1D62C ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL K ++1D62D ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL L ++1D62E ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL M ++1D62F ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL N ++1D630 ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL O ++1D631 ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL P ++1D632 ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL Q ++1D633 ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL R ++1D634 ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL S ++1D635 ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL T ++1D636 ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL U ++1D637 ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL V ++1D638 ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL W ++1D639 ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL X ++1D63A ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL Y ++1D63B ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF ITALIC SMALL Z ++1D63C ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL A ++1D63D ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL B ++1D63E ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL C ++1D63F ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL D ++1D640 ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL E ++1D641 ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL F ++1D642 ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL G ++1D643 ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL H ++1D644 ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL I ++1D645 ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL J ++1D646 ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL K ++1D647 ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL L ++1D648 ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL M ++1D649 ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL N ++1D64A ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL O ++1D64B ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL P ++1D64C ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL Q ++1D64D ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL R ++1D64E ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL S ++1D64F ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL T ++1D650 ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL U ++1D651 ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL V ++1D652 ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL W ++1D653 ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL X ++1D654 ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL Y ++1D655 ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL Z ++1D656 ; mapped ; 0061 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL A ++1D657 ; mapped ; 0062 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL B ++1D658 ; mapped ; 0063 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL C ++1D659 ; mapped ; 0064 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL D ++1D65A ; mapped ; 0065 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL E ++1D65B ; mapped ; 0066 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL F ++1D65C ; mapped ; 0067 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL G ++1D65D ; mapped ; 0068 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL H ++1D65E ; mapped ; 0069 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL I ++1D65F ; mapped ; 006A # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL J ++1D660 ; mapped ; 006B # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL K ++1D661 ; mapped ; 006C # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL L ++1D662 ; mapped ; 006D # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL M ++1D663 ; mapped ; 006E # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL N ++1D664 ; mapped ; 006F # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL O ++1D665 ; mapped ; 0070 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL P ++1D666 ; mapped ; 0071 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL Q ++1D667 ; mapped ; 0072 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL R ++1D668 ; mapped ; 0073 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL S ++1D669 ; mapped ; 0074 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL T ++1D66A ; mapped ; 0075 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL U ++1D66B ; mapped ; 0076 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL V ++1D66C ; mapped ; 0077 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL W ++1D66D ; mapped ; 0078 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL X ++1D66E ; mapped ; 0079 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL Y ++1D66F ; mapped ; 007A # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL Z ++1D670 ; mapped ; 0061 # 3.1 MATHEMATICAL MONOSPACE CAPITAL A ++1D671 ; mapped ; 0062 # 3.1 MATHEMATICAL MONOSPACE CAPITAL B ++1D672 ; mapped ; 0063 # 3.1 MATHEMATICAL MONOSPACE CAPITAL C ++1D673 ; mapped ; 0064 # 3.1 MATHEMATICAL MONOSPACE CAPITAL D ++1D674 ; mapped ; 0065 # 3.1 MATHEMATICAL MONOSPACE CAPITAL E ++1D675 ; mapped ; 0066 # 3.1 MATHEMATICAL MONOSPACE CAPITAL F ++1D676 ; mapped ; 0067 # 3.1 MATHEMATICAL MONOSPACE CAPITAL G ++1D677 ; mapped ; 0068 # 3.1 MATHEMATICAL MONOSPACE CAPITAL H ++1D678 ; mapped ; 0069 # 3.1 MATHEMATICAL MONOSPACE CAPITAL I ++1D679 ; mapped ; 006A # 3.1 MATHEMATICAL MONOSPACE CAPITAL J ++1D67A ; mapped ; 006B # 3.1 MATHEMATICAL MONOSPACE CAPITAL K ++1D67B ; mapped ; 006C # 3.1 MATHEMATICAL MONOSPACE CAPITAL L ++1D67C ; mapped ; 006D # 3.1 MATHEMATICAL MONOSPACE CAPITAL M ++1D67D ; mapped ; 006E # 3.1 MATHEMATICAL MONOSPACE CAPITAL N ++1D67E ; mapped ; 006F # 3.1 MATHEMATICAL MONOSPACE CAPITAL O ++1D67F ; mapped ; 0070 # 3.1 MATHEMATICAL MONOSPACE CAPITAL P ++1D680 ; mapped ; 0071 # 3.1 MATHEMATICAL MONOSPACE CAPITAL Q ++1D681 ; mapped ; 0072 # 3.1 MATHEMATICAL MONOSPACE CAPITAL R ++1D682 ; mapped ; 0073 # 3.1 MATHEMATICAL MONOSPACE CAPITAL S ++1D683 ; mapped ; 0074 # 3.1 MATHEMATICAL MONOSPACE CAPITAL T ++1D684 ; mapped ; 0075 # 3.1 MATHEMATICAL MONOSPACE CAPITAL U ++1D685 ; mapped ; 0076 # 3.1 MATHEMATICAL MONOSPACE CAPITAL V ++1D686 ; mapped ; 0077 # 3.1 MATHEMATICAL MONOSPACE CAPITAL W ++1D687 ; mapped ; 0078 # 3.1 MATHEMATICAL MONOSPACE CAPITAL X ++1D688 ; mapped ; 0079 # 3.1 MATHEMATICAL MONOSPACE CAPITAL Y ++1D689 ; mapped ; 007A # 3.1 MATHEMATICAL MONOSPACE CAPITAL Z ++1D68A ; mapped ; 0061 # 3.1 MATHEMATICAL MONOSPACE SMALL A ++1D68B ; mapped ; 0062 # 3.1 MATHEMATICAL MONOSPACE SMALL B ++1D68C ; mapped ; 0063 # 3.1 MATHEMATICAL MONOSPACE SMALL C ++1D68D ; mapped ; 0064 # 3.1 MATHEMATICAL MONOSPACE SMALL D ++1D68E ; mapped ; 0065 # 3.1 MATHEMATICAL MONOSPACE SMALL E ++1D68F ; mapped ; 0066 # 3.1 MATHEMATICAL MONOSPACE SMALL F ++1D690 ; mapped ; 0067 # 3.1 MATHEMATICAL MONOSPACE SMALL G ++1D691 ; mapped ; 0068 # 3.1 MATHEMATICAL MONOSPACE SMALL H ++1D692 ; mapped ; 0069 # 3.1 MATHEMATICAL MONOSPACE SMALL I ++1D693 ; mapped ; 006A # 3.1 MATHEMATICAL MONOSPACE SMALL J ++1D694 ; mapped ; 006B # 3.1 MATHEMATICAL MONOSPACE SMALL K ++1D695 ; mapped ; 006C # 3.1 MATHEMATICAL MONOSPACE SMALL L ++1D696 ; mapped ; 006D # 3.1 MATHEMATICAL MONOSPACE SMALL M ++1D697 ; mapped ; 006E # 3.1 MATHEMATICAL MONOSPACE SMALL N ++1D698 ; mapped ; 006F # 3.1 MATHEMATICAL MONOSPACE SMALL O ++1D699 ; mapped ; 0070 # 3.1 MATHEMATICAL MONOSPACE SMALL P ++1D69A ; mapped ; 0071 # 3.1 MATHEMATICAL MONOSPACE SMALL Q ++1D69B ; mapped ; 0072 # 3.1 MATHEMATICAL MONOSPACE SMALL R ++1D69C ; mapped ; 0073 # 3.1 MATHEMATICAL MONOSPACE SMALL S ++1D69D ; mapped ; 0074 # 3.1 MATHEMATICAL MONOSPACE SMALL T ++1D69E ; mapped ; 0075 # 3.1 MATHEMATICAL MONOSPACE SMALL U ++1D69F ; mapped ; 0076 # 3.1 MATHEMATICAL MONOSPACE SMALL V ++1D6A0 ; mapped ; 0077 # 3.1 MATHEMATICAL MONOSPACE SMALL W ++1D6A1 ; mapped ; 0078 # 3.1 MATHEMATICAL MONOSPACE SMALL X ++1D6A2 ; mapped ; 0079 # 3.1 MATHEMATICAL MONOSPACE SMALL Y ++1D6A3 ; mapped ; 007A # 3.1 MATHEMATICAL MONOSPACE SMALL Z ++1D6A4 ; mapped ; 0131 # 4.1 MATHEMATICAL ITALIC SMALL DOTLESS I ++1D6A5 ; mapped ; 0237 # 4.1 MATHEMATICAL ITALIC SMALL DOTLESS J ++1D6A6..1D6A7 ; disallowed # NA .. ++1D6A8 ; mapped ; 03B1 # 3.1 MATHEMATICAL BOLD CAPITAL ALPHA ++1D6A9 ; mapped ; 03B2 # 3.1 MATHEMATICAL BOLD CAPITAL BETA ++1D6AA ; mapped ; 03B3 # 3.1 MATHEMATICAL BOLD CAPITAL GAMMA ++1D6AB ; mapped ; 03B4 # 3.1 MATHEMATICAL BOLD CAPITAL DELTA ++1D6AC ; mapped ; 03B5 # 3.1 MATHEMATICAL BOLD CAPITAL EPSILON ++1D6AD ; mapped ; 03B6 # 3.1 MATHEMATICAL BOLD CAPITAL ZETA ++1D6AE ; mapped ; 03B7 # 3.1 MATHEMATICAL BOLD CAPITAL ETA ++1D6AF ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD CAPITAL THETA ++1D6B0 ; mapped ; 03B9 # 3.1 MATHEMATICAL BOLD CAPITAL IOTA ++1D6B1 ; mapped ; 03BA # 3.1 MATHEMATICAL BOLD CAPITAL KAPPA ++1D6B2 ; mapped ; 03BB # 3.1 MATHEMATICAL BOLD CAPITAL LAMDA ++1D6B3 ; mapped ; 03BC # 3.1 MATHEMATICAL BOLD CAPITAL MU ++1D6B4 ; mapped ; 03BD # 3.1 MATHEMATICAL BOLD CAPITAL NU ++1D6B5 ; mapped ; 03BE # 3.1 MATHEMATICAL BOLD CAPITAL XI ++1D6B6 ; mapped ; 03BF # 3.1 MATHEMATICAL BOLD CAPITAL OMICRON ++1D6B7 ; mapped ; 03C0 # 3.1 MATHEMATICAL BOLD CAPITAL PI ++1D6B8 ; mapped ; 03C1 # 3.1 MATHEMATICAL BOLD CAPITAL RHO ++1D6B9 ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD CAPITAL THETA SYMBOL ++1D6BA ; mapped ; 03C3 # 3.1 MATHEMATICAL BOLD CAPITAL SIGMA ++1D6BB ; mapped ; 03C4 # 3.1 MATHEMATICAL BOLD CAPITAL TAU ++1D6BC ; mapped ; 03C5 # 3.1 MATHEMATICAL BOLD CAPITAL UPSILON ++1D6BD ; mapped ; 03C6 # 3.1 MATHEMATICAL BOLD CAPITAL PHI ++1D6BE ; mapped ; 03C7 # 3.1 MATHEMATICAL BOLD CAPITAL CHI ++1D6BF ; mapped ; 03C8 # 3.1 MATHEMATICAL BOLD CAPITAL PSI ++1D6C0 ; mapped ; 03C9 # 3.1 MATHEMATICAL BOLD CAPITAL OMEGA ++1D6C1 ; mapped ; 2207 # 3.1 MATHEMATICAL BOLD NABLA ++1D6C2 ; mapped ; 03B1 # 3.1 MATHEMATICAL BOLD SMALL ALPHA ++1D6C3 ; mapped ; 03B2 # 3.1 MATHEMATICAL BOLD SMALL BETA ++1D6C4 ; mapped ; 03B3 # 3.1 MATHEMATICAL BOLD SMALL GAMMA ++1D6C5 ; mapped ; 03B4 # 3.1 MATHEMATICAL BOLD SMALL DELTA ++1D6C6 ; mapped ; 03B5 # 3.1 MATHEMATICAL BOLD SMALL EPSILON ++1D6C7 ; mapped ; 03B6 # 3.1 MATHEMATICAL BOLD SMALL ZETA ++1D6C8 ; mapped ; 03B7 # 3.1 MATHEMATICAL BOLD SMALL ETA ++1D6C9 ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD SMALL THETA ++1D6CA ; mapped ; 03B9 # 3.1 MATHEMATICAL BOLD SMALL IOTA ++1D6CB ; mapped ; 03BA # 3.1 MATHEMATICAL BOLD SMALL KAPPA ++1D6CC ; mapped ; 03BB # 3.1 MATHEMATICAL BOLD SMALL LAMDA ++1D6CD ; mapped ; 03BC # 3.1 MATHEMATICAL BOLD SMALL MU ++1D6CE ; mapped ; 03BD # 3.1 MATHEMATICAL BOLD SMALL NU ++1D6CF ; mapped ; 03BE # 3.1 MATHEMATICAL BOLD SMALL XI ++1D6D0 ; mapped ; 03BF # 3.1 MATHEMATICAL BOLD SMALL OMICRON ++1D6D1 ; mapped ; 03C0 # 3.1 MATHEMATICAL BOLD SMALL PI ++1D6D2 ; mapped ; 03C1 # 3.1 MATHEMATICAL BOLD SMALL RHO ++1D6D3..1D6D4 ; mapped ; 03C3 # 3.1 MATHEMATICAL BOLD SMALL FINAL SIGMA..MATHEMATICAL BOLD SMALL SIGMA ++1D6D5 ; mapped ; 03C4 # 3.1 MATHEMATICAL BOLD SMALL TAU ++1D6D6 ; mapped ; 03C5 # 3.1 MATHEMATICAL BOLD SMALL UPSILON ++1D6D7 ; mapped ; 03C6 # 3.1 MATHEMATICAL BOLD SMALL PHI ++1D6D8 ; mapped ; 03C7 # 3.1 MATHEMATICAL BOLD SMALL CHI ++1D6D9 ; mapped ; 03C8 # 3.1 MATHEMATICAL BOLD SMALL PSI ++1D6DA ; mapped ; 03C9 # 3.1 MATHEMATICAL BOLD SMALL OMEGA ++1D6DB ; mapped ; 2202 # 3.1 MATHEMATICAL BOLD PARTIAL DIFFERENTIAL ++1D6DC ; mapped ; 03B5 # 3.1 MATHEMATICAL BOLD EPSILON SYMBOL ++1D6DD ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD THETA SYMBOL ++1D6DE ; mapped ; 03BA # 3.1 MATHEMATICAL BOLD KAPPA SYMBOL ++1D6DF ; mapped ; 03C6 # 3.1 MATHEMATICAL BOLD PHI SYMBOL ++1D6E0 ; mapped ; 03C1 # 3.1 MATHEMATICAL BOLD RHO SYMBOL ++1D6E1 ; mapped ; 03C0 # 3.1 MATHEMATICAL BOLD PI SYMBOL ++1D6E2 ; mapped ; 03B1 # 3.1 MATHEMATICAL ITALIC CAPITAL ALPHA ++1D6E3 ; mapped ; 03B2 # 3.1 MATHEMATICAL ITALIC CAPITAL BETA ++1D6E4 ; mapped ; 03B3 # 3.1 MATHEMATICAL ITALIC CAPITAL GAMMA ++1D6E5 ; mapped ; 03B4 # 3.1 MATHEMATICAL ITALIC CAPITAL DELTA ++1D6E6 ; mapped ; 03B5 # 3.1 MATHEMATICAL ITALIC CAPITAL EPSILON ++1D6E7 ; mapped ; 03B6 # 3.1 MATHEMATICAL ITALIC CAPITAL ZETA ++1D6E8 ; mapped ; 03B7 # 3.1 MATHEMATICAL ITALIC CAPITAL ETA ++1D6E9 ; mapped ; 03B8 # 3.1 MATHEMATICAL ITALIC CAPITAL THETA ++1D6EA ; mapped ; 03B9 # 3.1 MATHEMATICAL ITALIC CAPITAL IOTA ++1D6EB ; mapped ; 03BA # 3.1 MATHEMATICAL ITALIC CAPITAL KAPPA ++1D6EC ; mapped ; 03BB # 3.1 MATHEMATICAL ITALIC CAPITAL LAMDA ++1D6ED ; mapped ; 03BC # 3.1 MATHEMATICAL ITALIC CAPITAL MU ++1D6EE ; mapped ; 03BD # 3.1 MATHEMATICAL ITALIC CAPITAL NU ++1D6EF ; mapped ; 03BE # 3.1 MATHEMATICAL ITALIC CAPITAL XI ++1D6F0 ; mapped ; 03BF # 3.1 MATHEMATICAL ITALIC CAPITAL OMICRON ++1D6F1 ; mapped ; 03C0 # 3.1 MATHEMATICAL ITALIC CAPITAL PI ++1D6F2 ; mapped ; 03C1 # 3.1 MATHEMATICAL ITALIC CAPITAL RHO ++1D6F3 ; mapped ; 03B8 # 3.1 MATHEMATICAL ITALIC CAPITAL THETA SYMBOL ++1D6F4 ; mapped ; 03C3 # 3.1 MATHEMATICAL ITALIC CAPITAL SIGMA ++1D6F5 ; mapped ; 03C4 # 3.1 MATHEMATICAL ITALIC CAPITAL TAU ++1D6F6 ; mapped ; 03C5 # 3.1 MATHEMATICAL ITALIC CAPITAL UPSILON ++1D6F7 ; mapped ; 03C6 # 3.1 MATHEMATICAL ITALIC CAPITAL PHI ++1D6F8 ; mapped ; 03C7 # 3.1 MATHEMATICAL ITALIC CAPITAL CHI ++1D6F9 ; mapped ; 03C8 # 3.1 MATHEMATICAL ITALIC CAPITAL PSI ++1D6FA ; mapped ; 03C9 # 3.1 MATHEMATICAL ITALIC CAPITAL OMEGA ++1D6FB ; mapped ; 2207 # 3.1 MATHEMATICAL ITALIC NABLA ++1D6FC ; mapped ; 03B1 # 3.1 MATHEMATICAL ITALIC SMALL ALPHA ++1D6FD ; mapped ; 03B2 # 3.1 MATHEMATICAL ITALIC SMALL BETA ++1D6FE ; mapped ; 03B3 # 3.1 MATHEMATICAL ITALIC SMALL GAMMA ++1D6FF ; mapped ; 03B4 # 3.1 MATHEMATICAL ITALIC SMALL DELTA ++1D700 ; mapped ; 03B5 # 3.1 MATHEMATICAL ITALIC SMALL EPSILON ++1D701 ; mapped ; 03B6 # 3.1 MATHEMATICAL ITALIC SMALL ZETA ++1D702 ; mapped ; 03B7 # 3.1 MATHEMATICAL ITALIC SMALL ETA ++1D703 ; mapped ; 03B8 # 3.1 MATHEMATICAL ITALIC SMALL THETA ++1D704 ; mapped ; 03B9 # 3.1 MATHEMATICAL ITALIC SMALL IOTA ++1D705 ; mapped ; 03BA # 3.1 MATHEMATICAL ITALIC SMALL KAPPA ++1D706 ; mapped ; 03BB # 3.1 MATHEMATICAL ITALIC SMALL LAMDA ++1D707 ; mapped ; 03BC # 3.1 MATHEMATICAL ITALIC SMALL MU ++1D708 ; mapped ; 03BD # 3.1 MATHEMATICAL ITALIC SMALL NU ++1D709 ; mapped ; 03BE # 3.1 MATHEMATICAL ITALIC SMALL XI ++1D70A ; mapped ; 03BF # 3.1 MATHEMATICAL ITALIC SMALL OMICRON ++1D70B ; mapped ; 03C0 # 3.1 MATHEMATICAL ITALIC SMALL PI ++1D70C ; mapped ; 03C1 # 3.1 MATHEMATICAL ITALIC SMALL RHO ++1D70D..1D70E ; mapped ; 03C3 # 3.1 MATHEMATICAL ITALIC SMALL FINAL SIGMA..MATHEMATICAL ITALIC SMALL SIGMA ++1D70F ; mapped ; 03C4 # 3.1 MATHEMATICAL ITALIC SMALL TAU ++1D710 ; mapped ; 03C5 # 3.1 MATHEMATICAL ITALIC SMALL UPSILON ++1D711 ; mapped ; 03C6 # 3.1 MATHEMATICAL ITALIC SMALL PHI ++1D712 ; mapped ; 03C7 # 3.1 MATHEMATICAL ITALIC SMALL CHI ++1D713 ; mapped ; 03C8 # 3.1 MATHEMATICAL ITALIC SMALL PSI ++1D714 ; mapped ; 03C9 # 3.1 MATHEMATICAL ITALIC SMALL OMEGA ++1D715 ; mapped ; 2202 # 3.1 MATHEMATICAL ITALIC PARTIAL DIFFERENTIAL ++1D716 ; mapped ; 03B5 # 3.1 MATHEMATICAL ITALIC EPSILON SYMBOL ++1D717 ; mapped ; 03B8 # 3.1 MATHEMATICAL ITALIC THETA SYMBOL ++1D718 ; mapped ; 03BA # 3.1 MATHEMATICAL ITALIC KAPPA SYMBOL ++1D719 ; mapped ; 03C6 # 3.1 MATHEMATICAL ITALIC PHI SYMBOL ++1D71A ; mapped ; 03C1 # 3.1 MATHEMATICAL ITALIC RHO SYMBOL ++1D71B ; mapped ; 03C0 # 3.1 MATHEMATICAL ITALIC PI SYMBOL ++1D71C ; mapped ; 03B1 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL ALPHA ++1D71D ; mapped ; 03B2 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL BETA ++1D71E ; mapped ; 03B3 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL GAMMA ++1D71F ; mapped ; 03B4 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL DELTA ++1D720 ; mapped ; 03B5 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL EPSILON ++1D721 ; mapped ; 03B6 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL ZETA ++1D722 ; mapped ; 03B7 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL ETA ++1D723 ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL THETA ++1D724 ; mapped ; 03B9 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL IOTA ++1D725 ; mapped ; 03BA # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL KAPPA ++1D726 ; mapped ; 03BB # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL LAMDA ++1D727 ; mapped ; 03BC # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL MU ++1D728 ; mapped ; 03BD # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL NU ++1D729 ; mapped ; 03BE # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL XI ++1D72A ; mapped ; 03BF # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL OMICRON ++1D72B ; mapped ; 03C0 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL PI ++1D72C ; mapped ; 03C1 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL RHO ++1D72D ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL THETA SYMBOL ++1D72E ; mapped ; 03C3 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL SIGMA ++1D72F ; mapped ; 03C4 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL TAU ++1D730 ; mapped ; 03C5 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL UPSILON ++1D731 ; mapped ; 03C6 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL PHI ++1D732 ; mapped ; 03C7 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL CHI ++1D733 ; mapped ; 03C8 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL PSI ++1D734 ; mapped ; 03C9 # 3.1 MATHEMATICAL BOLD ITALIC CAPITAL OMEGA ++1D735 ; mapped ; 2207 # 3.1 MATHEMATICAL BOLD ITALIC NABLA ++1D736 ; mapped ; 03B1 # 3.1 MATHEMATICAL BOLD ITALIC SMALL ALPHA ++1D737 ; mapped ; 03B2 # 3.1 MATHEMATICAL BOLD ITALIC SMALL BETA ++1D738 ; mapped ; 03B3 # 3.1 MATHEMATICAL BOLD ITALIC SMALL GAMMA ++1D739 ; mapped ; 03B4 # 3.1 MATHEMATICAL BOLD ITALIC SMALL DELTA ++1D73A ; mapped ; 03B5 # 3.1 MATHEMATICAL BOLD ITALIC SMALL EPSILON ++1D73B ; mapped ; 03B6 # 3.1 MATHEMATICAL BOLD ITALIC SMALL ZETA ++1D73C ; mapped ; 03B7 # 3.1 MATHEMATICAL BOLD ITALIC SMALL ETA ++1D73D ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD ITALIC SMALL THETA ++1D73E ; mapped ; 03B9 # 3.1 MATHEMATICAL BOLD ITALIC SMALL IOTA ++1D73F ; mapped ; 03BA # 3.1 MATHEMATICAL BOLD ITALIC SMALL KAPPA ++1D740 ; mapped ; 03BB # 3.1 MATHEMATICAL BOLD ITALIC SMALL LAMDA ++1D741 ; mapped ; 03BC # 3.1 MATHEMATICAL BOLD ITALIC SMALL MU ++1D742 ; mapped ; 03BD # 3.1 MATHEMATICAL BOLD ITALIC SMALL NU ++1D743 ; mapped ; 03BE # 3.1 MATHEMATICAL BOLD ITALIC SMALL XI ++1D744 ; mapped ; 03BF # 3.1 MATHEMATICAL BOLD ITALIC SMALL OMICRON ++1D745 ; mapped ; 03C0 # 3.1 MATHEMATICAL BOLD ITALIC SMALL PI ++1D746 ; mapped ; 03C1 # 3.1 MATHEMATICAL BOLD ITALIC SMALL RHO ++1D747..1D748 ; mapped ; 03C3 # 3.1 MATHEMATICAL BOLD ITALIC SMALL FINAL SIGMA..MATHEMATICAL BOLD ITALIC SMALL SIGMA ++1D749 ; mapped ; 03C4 # 3.1 MATHEMATICAL BOLD ITALIC SMALL TAU ++1D74A ; mapped ; 03C5 # 3.1 MATHEMATICAL BOLD ITALIC SMALL UPSILON ++1D74B ; mapped ; 03C6 # 3.1 MATHEMATICAL BOLD ITALIC SMALL PHI ++1D74C ; mapped ; 03C7 # 3.1 MATHEMATICAL BOLD ITALIC SMALL CHI ++1D74D ; mapped ; 03C8 # 3.1 MATHEMATICAL BOLD ITALIC SMALL PSI ++1D74E ; mapped ; 03C9 # 3.1 MATHEMATICAL BOLD ITALIC SMALL OMEGA ++1D74F ; mapped ; 2202 # 3.1 MATHEMATICAL BOLD ITALIC PARTIAL DIFFERENTIAL ++1D750 ; mapped ; 03B5 # 3.1 MATHEMATICAL BOLD ITALIC EPSILON SYMBOL ++1D751 ; mapped ; 03B8 # 3.1 MATHEMATICAL BOLD ITALIC THETA SYMBOL ++1D752 ; mapped ; 03BA # 3.1 MATHEMATICAL BOLD ITALIC KAPPA SYMBOL ++1D753 ; mapped ; 03C6 # 3.1 MATHEMATICAL BOLD ITALIC PHI SYMBOL ++1D754 ; mapped ; 03C1 # 3.1 MATHEMATICAL BOLD ITALIC RHO SYMBOL ++1D755 ; mapped ; 03C0 # 3.1 MATHEMATICAL BOLD ITALIC PI SYMBOL ++1D756 ; mapped ; 03B1 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL ALPHA ++1D757 ; mapped ; 03B2 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL BETA ++1D758 ; mapped ; 03B3 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL GAMMA ++1D759 ; mapped ; 03B4 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL DELTA ++1D75A ; mapped ; 03B5 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL EPSILON ++1D75B ; mapped ; 03B6 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL ZETA ++1D75C ; mapped ; 03B7 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL ETA ++1D75D ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL THETA ++1D75E ; mapped ; 03B9 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL IOTA ++1D75F ; mapped ; 03BA # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL KAPPA ++1D760 ; mapped ; 03BB # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL LAMDA ++1D761 ; mapped ; 03BC # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL MU ++1D762 ; mapped ; 03BD # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL NU ++1D763 ; mapped ; 03BE # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL XI ++1D764 ; mapped ; 03BF # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL OMICRON ++1D765 ; mapped ; 03C0 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL PI ++1D766 ; mapped ; 03C1 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL RHO ++1D767 ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL THETA SYMBOL ++1D768 ; mapped ; 03C3 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL SIGMA ++1D769 ; mapped ; 03C4 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL TAU ++1D76A ; mapped ; 03C5 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL UPSILON ++1D76B ; mapped ; 03C6 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL PHI ++1D76C ; mapped ; 03C7 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL CHI ++1D76D ; mapped ; 03C8 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL PSI ++1D76E ; mapped ; 03C9 # 3.1 MATHEMATICAL SANS-SERIF BOLD CAPITAL OMEGA ++1D76F ; mapped ; 2207 # 3.1 MATHEMATICAL SANS-SERIF BOLD NABLA ++1D770 ; mapped ; 03B1 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL ALPHA ++1D771 ; mapped ; 03B2 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL BETA ++1D772 ; mapped ; 03B3 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL GAMMA ++1D773 ; mapped ; 03B4 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL DELTA ++1D774 ; mapped ; 03B5 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL EPSILON ++1D775 ; mapped ; 03B6 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL ZETA ++1D776 ; mapped ; 03B7 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL ETA ++1D777 ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL THETA ++1D778 ; mapped ; 03B9 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL IOTA ++1D779 ; mapped ; 03BA # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL KAPPA ++1D77A ; mapped ; 03BB # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL LAMDA ++1D77B ; mapped ; 03BC # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL MU ++1D77C ; mapped ; 03BD # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL NU ++1D77D ; mapped ; 03BE # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL XI ++1D77E ; mapped ; 03BF # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL OMICRON ++1D77F ; mapped ; 03C0 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL PI ++1D780 ; mapped ; 03C1 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL RHO ++1D781..1D782 ; mapped ; 03C3 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL FINAL SIGMA..MATHEMATICAL SANS-SERIF BOLD SMALL SIGMA ++1D783 ; mapped ; 03C4 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL TAU ++1D784 ; mapped ; 03C5 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL UPSILON ++1D785 ; mapped ; 03C6 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL PHI ++1D786 ; mapped ; 03C7 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL CHI ++1D787 ; mapped ; 03C8 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL PSI ++1D788 ; mapped ; 03C9 # 3.1 MATHEMATICAL SANS-SERIF BOLD SMALL OMEGA ++1D789 ; mapped ; 2202 # 3.1 MATHEMATICAL SANS-SERIF BOLD PARTIAL DIFFERENTIAL ++1D78A ; mapped ; 03B5 # 3.1 MATHEMATICAL SANS-SERIF BOLD EPSILON SYMBOL ++1D78B ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD THETA SYMBOL ++1D78C ; mapped ; 03BA # 3.1 MATHEMATICAL SANS-SERIF BOLD KAPPA SYMBOL ++1D78D ; mapped ; 03C6 # 3.1 MATHEMATICAL SANS-SERIF BOLD PHI SYMBOL ++1D78E ; mapped ; 03C1 # 3.1 MATHEMATICAL SANS-SERIF BOLD RHO SYMBOL ++1D78F ; mapped ; 03C0 # 3.1 MATHEMATICAL SANS-SERIF BOLD PI SYMBOL ++1D790 ; mapped ; 03B1 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL ALPHA ++1D791 ; mapped ; 03B2 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL BETA ++1D792 ; mapped ; 03B3 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL GAMMA ++1D793 ; mapped ; 03B4 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL DELTA ++1D794 ; mapped ; 03B5 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL EPSILON ++1D795 ; mapped ; 03B6 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL ZETA ++1D796 ; mapped ; 03B7 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL ETA ++1D797 ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL THETA ++1D798 ; mapped ; 03B9 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL IOTA ++1D799 ; mapped ; 03BA # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL KAPPA ++1D79A ; mapped ; 03BB # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL LAMDA ++1D79B ; mapped ; 03BC # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL MU ++1D79C ; mapped ; 03BD # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL NU ++1D79D ; mapped ; 03BE # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL XI ++1D79E ; mapped ; 03BF # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL OMICRON ++1D79F ; mapped ; 03C0 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL PI ++1D7A0 ; mapped ; 03C1 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL RHO ++1D7A1 ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL THETA SYMBOL ++1D7A2 ; mapped ; 03C3 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL SIGMA ++1D7A3 ; mapped ; 03C4 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL TAU ++1D7A4 ; mapped ; 03C5 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL UPSILON ++1D7A5 ; mapped ; 03C6 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL PHI ++1D7A6 ; mapped ; 03C7 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL CHI ++1D7A7 ; mapped ; 03C8 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL PSI ++1D7A8 ; mapped ; 03C9 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC CAPITAL OMEGA ++1D7A9 ; mapped ; 2207 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC NABLA ++1D7AA ; mapped ; 03B1 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL ALPHA ++1D7AB ; mapped ; 03B2 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL BETA ++1D7AC ; mapped ; 03B3 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL GAMMA ++1D7AD ; mapped ; 03B4 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL DELTA ++1D7AE ; mapped ; 03B5 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL EPSILON ++1D7AF ; mapped ; 03B6 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL ZETA ++1D7B0 ; mapped ; 03B7 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL ETA ++1D7B1 ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL THETA ++1D7B2 ; mapped ; 03B9 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL IOTA ++1D7B3 ; mapped ; 03BA # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL KAPPA ++1D7B4 ; mapped ; 03BB # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL LAMDA ++1D7B5 ; mapped ; 03BC # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL MU ++1D7B6 ; mapped ; 03BD # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL NU ++1D7B7 ; mapped ; 03BE # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL XI ++1D7B8 ; mapped ; 03BF # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL OMICRON ++1D7B9 ; mapped ; 03C0 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL PI ++1D7BA ; mapped ; 03C1 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL RHO ++1D7BB..1D7BC ; mapped ; 03C3 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL FINAL SIGMA..MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL SIGMA ++1D7BD ; mapped ; 03C4 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL TAU ++1D7BE ; mapped ; 03C5 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL UPSILON ++1D7BF ; mapped ; 03C6 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL PHI ++1D7C0 ; mapped ; 03C7 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL CHI ++1D7C1 ; mapped ; 03C8 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL PSI ++1D7C2 ; mapped ; 03C9 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC SMALL OMEGA ++1D7C3 ; mapped ; 2202 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC PARTIAL DIFFERENTIAL ++1D7C4 ; mapped ; 03B5 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC EPSILON SYMBOL ++1D7C5 ; mapped ; 03B8 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC THETA SYMBOL ++1D7C6 ; mapped ; 03BA # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC KAPPA SYMBOL ++1D7C7 ; mapped ; 03C6 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC PHI SYMBOL ++1D7C8 ; mapped ; 03C1 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC RHO SYMBOL ++1D7C9 ; mapped ; 03C0 # 3.1 MATHEMATICAL SANS-SERIF BOLD ITALIC PI SYMBOL ++1D7CA..1D7CB ; mapped ; 03DD # 5.0 MATHEMATICAL BOLD CAPITAL DIGAMMA..MATHEMATICAL BOLD SMALL DIGAMMA ++1D7CC..1D7CD ; disallowed # NA .. ++1D7CE ; mapped ; 0030 # 3.1 MATHEMATICAL BOLD DIGIT ZERO ++1D7CF ; mapped ; 0031 # 3.1 MATHEMATICAL BOLD DIGIT ONE ++1D7D0 ; mapped ; 0032 # 3.1 MATHEMATICAL BOLD DIGIT TWO ++1D7D1 ; mapped ; 0033 # 3.1 MATHEMATICAL BOLD DIGIT THREE ++1D7D2 ; mapped ; 0034 # 3.1 MATHEMATICAL BOLD DIGIT FOUR ++1D7D3 ; mapped ; 0035 # 3.1 MATHEMATICAL BOLD DIGIT FIVE ++1D7D4 ; mapped ; 0036 # 3.1 MATHEMATICAL BOLD DIGIT SIX ++1D7D5 ; mapped ; 0037 # 3.1 MATHEMATICAL BOLD DIGIT SEVEN ++1D7D6 ; mapped ; 0038 # 3.1 MATHEMATICAL BOLD DIGIT EIGHT ++1D7D7 ; mapped ; 0039 # 3.1 MATHEMATICAL BOLD DIGIT NINE ++1D7D8 ; mapped ; 0030 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT ZERO ++1D7D9 ; mapped ; 0031 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT ONE ++1D7DA ; mapped ; 0032 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT TWO ++1D7DB ; mapped ; 0033 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT THREE ++1D7DC ; mapped ; 0034 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT FOUR ++1D7DD ; mapped ; 0035 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT FIVE ++1D7DE ; mapped ; 0036 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT SIX ++1D7DF ; mapped ; 0037 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT SEVEN ++1D7E0 ; mapped ; 0038 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT EIGHT ++1D7E1 ; mapped ; 0039 # 3.1 MATHEMATICAL DOUBLE-STRUCK DIGIT NINE ++1D7E2 ; mapped ; 0030 # 3.1 MATHEMATICAL SANS-SERIF DIGIT ZERO ++1D7E3 ; mapped ; 0031 # 3.1 MATHEMATICAL SANS-SERIF DIGIT ONE ++1D7E4 ; mapped ; 0032 # 3.1 MATHEMATICAL SANS-SERIF DIGIT TWO ++1D7E5 ; mapped ; 0033 # 3.1 MATHEMATICAL SANS-SERIF DIGIT THREE ++1D7E6 ; mapped ; 0034 # 3.1 MATHEMATICAL SANS-SERIF DIGIT FOUR ++1D7E7 ; mapped ; 0035 # 3.1 MATHEMATICAL SANS-SERIF DIGIT FIVE ++1D7E8 ; mapped ; 0036 # 3.1 MATHEMATICAL SANS-SERIF DIGIT SIX ++1D7E9 ; mapped ; 0037 # 3.1 MATHEMATICAL SANS-SERIF DIGIT SEVEN ++1D7EA ; mapped ; 0038 # 3.1 MATHEMATICAL SANS-SERIF DIGIT EIGHT ++1D7EB ; mapped ; 0039 # 3.1 MATHEMATICAL SANS-SERIF DIGIT NINE ++1D7EC ; mapped ; 0030 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT ZERO ++1D7ED ; mapped ; 0031 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT ONE ++1D7EE ; mapped ; 0032 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT TWO ++1D7EF ; mapped ; 0033 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT THREE ++1D7F0 ; mapped ; 0034 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT FOUR ++1D7F1 ; mapped ; 0035 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT FIVE ++1D7F2 ; mapped ; 0036 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT SIX ++1D7F3 ; mapped ; 0037 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT SEVEN ++1D7F4 ; mapped ; 0038 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT EIGHT ++1D7F5 ; mapped ; 0039 # 3.1 MATHEMATICAL SANS-SERIF BOLD DIGIT NINE ++1D7F6 ; mapped ; 0030 # 3.1 MATHEMATICAL MONOSPACE DIGIT ZERO ++1D7F7 ; mapped ; 0031 # 3.1 MATHEMATICAL MONOSPACE DIGIT ONE ++1D7F8 ; mapped ; 0032 # 3.1 MATHEMATICAL MONOSPACE DIGIT TWO ++1D7F9 ; mapped ; 0033 # 3.1 MATHEMATICAL MONOSPACE DIGIT THREE ++1D7FA ; mapped ; 0034 # 3.1 MATHEMATICAL MONOSPACE DIGIT FOUR ++1D7FB ; mapped ; 0035 # 3.1 MATHEMATICAL MONOSPACE DIGIT FIVE ++1D7FC ; mapped ; 0036 # 3.1 MATHEMATICAL MONOSPACE DIGIT SIX ++1D7FD ; mapped ; 0037 # 3.1 MATHEMATICAL MONOSPACE DIGIT SEVEN ++1D7FE ; mapped ; 0038 # 3.1 MATHEMATICAL MONOSPACE DIGIT EIGHT ++1D7FF ; mapped ; 0039 # 3.1 MATHEMATICAL MONOSPACE DIGIT NINE ++1D800..1D9FF ; valid ; ; NV8 # 8.0 SIGNWRITING HAND-FIST INDEX..SIGNWRITING HEAD ++1DA00..1DA36 ; valid # 8.0 SIGNWRITING HEAD RIM..SIGNWRITING AIR SUCKING IN ++1DA37..1DA3A ; valid ; ; NV8 # 8.0 SIGNWRITING AIR BLOW SMALL ROTATIONS..SIGNWRITING BREATH EXHALE ++1DA3B..1DA6C ; valid # 8.0 SIGNWRITING MOUTH CLOSED NEUTRAL..SIGNWRITING EXCITEMENT ++1DA6D..1DA74 ; valid ; ; NV8 # 8.0 SIGNWRITING SHOULDER HIP SPINE..SIGNWRITING TORSO-FLOORPLANE TWISTING ++1DA75 ; valid # 8.0 SIGNWRITING UPPER BODY TILTING FROM HIP JOINTS ++1DA76..1DA83 ; valid ; ; NV8 # 8.0 SIGNWRITING LIMB COMBINATION..SIGNWRITING LOCATION DEPTH ++1DA84 ; valid # 8.0 SIGNWRITING LOCATION HEAD NECK ++1DA85..1DA8B ; valid ; ; NV8 # 8.0 SIGNWRITING LOCATION TORSO..SIGNWRITING PARENTHESIS ++1DA8C..1DA9A ; disallowed # NA .. ++1DA9B..1DA9F ; valid # 8.0 SIGNWRITING FILL MODIFIER-2..SIGNWRITING FILL MODIFIER-6 ++1DAA0 ; disallowed # NA ++1DAA1..1DAAF ; valid # 8.0 SIGNWRITING ROTATION MODIFIER-2..SIGNWRITING ROTATION MODIFIER-16 ++1DAB0..1DFFF ; disallowed # NA .. ++1E000..1E006 ; valid # 9.0 COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE ++1E007 ; disallowed # NA ++1E008..1E018 ; valid # 9.0 COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU ++1E019..1E01A ; disallowed # NA .. ++1E01B..1E021 ; valid # 9.0 COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI ++1E022 ; disallowed # NA ++1E023..1E024 ; valid # 9.0 COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS ++1E025 ; disallowed # NA ++1E026..1E02A ; valid # 9.0 COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA ++1E02B..1E7FF ; disallowed # NA .. ++1E800..1E8C4 ; valid # 7.0 MENDE KIKAKUI SYLLABLE M001 KI..MENDE KIKAKUI SYLLABLE M060 NYON ++1E8C5..1E8C6 ; disallowed # NA .. ++1E8C7..1E8CF ; valid ; ; NV8 # 7.0 MENDE KIKAKUI DIGIT ONE..MENDE KIKAKUI DIGIT NINE ++1E8D0..1E8D6 ; valid # 7.0 MENDE KIKAKUI COMBINING NUMBER TEENS..MENDE KIKAKUI COMBINING NUMBER MILLIONS ++1E8D7..1E8FF ; disallowed # NA .. ++1E900 ; mapped ; 1E922 # 9.0 ADLAM CAPITAL LETTER ALIF ++1E901 ; mapped ; 1E923 # 9.0 ADLAM CAPITAL LETTER DAALI ++1E902 ; mapped ; 1E924 # 9.0 ADLAM CAPITAL LETTER LAAM ++1E903 ; mapped ; 1E925 # 9.0 ADLAM CAPITAL LETTER MIIM ++1E904 ; mapped ; 1E926 # 9.0 ADLAM CAPITAL LETTER BA ++1E905 ; mapped ; 1E927 # 9.0 ADLAM CAPITAL LETTER SINNYIIYHE ++1E906 ; mapped ; 1E928 # 9.0 ADLAM CAPITAL LETTER PE ++1E907 ; mapped ; 1E929 # 9.0 ADLAM CAPITAL LETTER BHE ++1E908 ; mapped ; 1E92A # 9.0 ADLAM CAPITAL LETTER RA ++1E909 ; mapped ; 1E92B # 9.0 ADLAM CAPITAL LETTER E ++1E90A ; mapped ; 1E92C # 9.0 ADLAM CAPITAL LETTER FA ++1E90B ; mapped ; 1E92D # 9.0 ADLAM CAPITAL LETTER I ++1E90C ; mapped ; 1E92E # 9.0 ADLAM CAPITAL LETTER O ++1E90D ; mapped ; 1E92F # 9.0 ADLAM CAPITAL LETTER DHA ++1E90E ; mapped ; 1E930 # 9.0 ADLAM CAPITAL LETTER YHE ++1E90F ; mapped ; 1E931 # 9.0 ADLAM CAPITAL LETTER WAW ++1E910 ; mapped ; 1E932 # 9.0 ADLAM CAPITAL LETTER NUN ++1E911 ; mapped ; 1E933 # 9.0 ADLAM CAPITAL LETTER KAF ++1E912 ; mapped ; 1E934 # 9.0 ADLAM CAPITAL LETTER YA ++1E913 ; mapped ; 1E935 # 9.0 ADLAM CAPITAL LETTER U ++1E914 ; mapped ; 1E936 # 9.0 ADLAM CAPITAL LETTER JIIM ++1E915 ; mapped ; 1E937 # 9.0 ADLAM CAPITAL LETTER CHI ++1E916 ; mapped ; 1E938 # 9.0 ADLAM CAPITAL LETTER HA ++1E917 ; mapped ; 1E939 # 9.0 ADLAM CAPITAL LETTER QAAF ++1E918 ; mapped ; 1E93A # 9.0 ADLAM CAPITAL LETTER GA ++1E919 ; mapped ; 1E93B # 9.0 ADLAM CAPITAL LETTER NYA ++1E91A ; mapped ; 1E93C # 9.0 ADLAM CAPITAL LETTER TU ++1E91B ; mapped ; 1E93D # 9.0 ADLAM CAPITAL LETTER NHA ++1E91C ; mapped ; 1E93E # 9.0 ADLAM CAPITAL LETTER VA ++1E91D ; mapped ; 1E93F # 9.0 ADLAM CAPITAL LETTER KHA ++1E91E ; mapped ; 1E940 # 9.0 ADLAM CAPITAL LETTER GBE ++1E91F ; mapped ; 1E941 # 9.0 ADLAM CAPITAL LETTER ZAL ++1E920 ; mapped ; 1E942 # 9.0 ADLAM CAPITAL LETTER KPO ++1E921 ; mapped ; 1E943 # 9.0 ADLAM CAPITAL LETTER SHA ++1E922..1E94A ; valid # 9.0 ADLAM SMALL LETTER ALIF..ADLAM NUKTA ++1E94B..1E94F ; disallowed # NA .. ++1E950..1E959 ; valid # 9.0 ADLAM DIGIT ZERO..ADLAM DIGIT NINE ++1E95A..1E95D ; disallowed # NA .. ++1E95E..1E95F ; valid ; ; NV8 # 9.0 ADLAM INITIAL EXCLAMATION MARK..ADLAM INITIAL QUESTION MARK ++1E960..1EDFF ; disallowed # NA .. ++1EE00 ; mapped ; 0627 # 6.1 ARABIC MATHEMATICAL ALEF ++1EE01 ; mapped ; 0628 # 6.1 ARABIC MATHEMATICAL BEH ++1EE02 ; mapped ; 062C # 6.1 ARABIC MATHEMATICAL JEEM ++1EE03 ; mapped ; 062F # 6.1 ARABIC MATHEMATICAL DAL ++1EE04 ; disallowed # NA ++1EE05 ; mapped ; 0648 # 6.1 ARABIC MATHEMATICAL WAW ++1EE06 ; mapped ; 0632 # 6.1 ARABIC MATHEMATICAL ZAIN ++1EE07 ; mapped ; 062D # 6.1 ARABIC MATHEMATICAL HAH ++1EE08 ; mapped ; 0637 # 6.1 ARABIC MATHEMATICAL TAH ++1EE09 ; mapped ; 064A # 6.1 ARABIC MATHEMATICAL YEH ++1EE0A ; mapped ; 0643 # 6.1 ARABIC MATHEMATICAL KAF ++1EE0B ; mapped ; 0644 # 6.1 ARABIC MATHEMATICAL LAM ++1EE0C ; mapped ; 0645 # 6.1 ARABIC MATHEMATICAL MEEM ++1EE0D ; mapped ; 0646 # 6.1 ARABIC MATHEMATICAL NOON ++1EE0E ; mapped ; 0633 # 6.1 ARABIC MATHEMATICAL SEEN ++1EE0F ; mapped ; 0639 # 6.1 ARABIC MATHEMATICAL AIN ++1EE10 ; mapped ; 0641 # 6.1 ARABIC MATHEMATICAL FEH ++1EE11 ; mapped ; 0635 # 6.1 ARABIC MATHEMATICAL SAD ++1EE12 ; mapped ; 0642 # 6.1 ARABIC MATHEMATICAL QAF ++1EE13 ; mapped ; 0631 # 6.1 ARABIC MATHEMATICAL REH ++1EE14 ; mapped ; 0634 # 6.1 ARABIC MATHEMATICAL SHEEN ++1EE15 ; mapped ; 062A # 6.1 ARABIC MATHEMATICAL TEH ++1EE16 ; mapped ; 062B # 6.1 ARABIC MATHEMATICAL THEH ++1EE17 ; mapped ; 062E # 6.1 ARABIC MATHEMATICAL KHAH ++1EE18 ; mapped ; 0630 # 6.1 ARABIC MATHEMATICAL THAL ++1EE19 ; mapped ; 0636 # 6.1 ARABIC MATHEMATICAL DAD ++1EE1A ; mapped ; 0638 # 6.1 ARABIC MATHEMATICAL ZAH ++1EE1B ; mapped ; 063A # 6.1 ARABIC MATHEMATICAL GHAIN ++1EE1C ; mapped ; 066E # 6.1 ARABIC MATHEMATICAL DOTLESS BEH ++1EE1D ; mapped ; 06BA # 6.1 ARABIC MATHEMATICAL DOTLESS NOON ++1EE1E ; mapped ; 06A1 # 6.1 ARABIC MATHEMATICAL DOTLESS FEH ++1EE1F ; mapped ; 066F # 6.1 ARABIC MATHEMATICAL DOTLESS QAF ++1EE20 ; disallowed # NA ++1EE21 ; mapped ; 0628 # 6.1 ARABIC MATHEMATICAL INITIAL BEH ++1EE22 ; mapped ; 062C # 6.1 ARABIC MATHEMATICAL INITIAL JEEM ++1EE23 ; disallowed # NA ++1EE24 ; mapped ; 0647 # 6.1 ARABIC MATHEMATICAL INITIAL HEH ++1EE25..1EE26 ; disallowed # NA .. ++1EE27 ; mapped ; 062D # 6.1 ARABIC MATHEMATICAL INITIAL HAH ++1EE28 ; disallowed # NA ++1EE29 ; mapped ; 064A # 6.1 ARABIC MATHEMATICAL INITIAL YEH ++1EE2A ; mapped ; 0643 # 6.1 ARABIC MATHEMATICAL INITIAL KAF ++1EE2B ; mapped ; 0644 # 6.1 ARABIC MATHEMATICAL INITIAL LAM ++1EE2C ; mapped ; 0645 # 6.1 ARABIC MATHEMATICAL INITIAL MEEM ++1EE2D ; mapped ; 0646 # 6.1 ARABIC MATHEMATICAL INITIAL NOON ++1EE2E ; mapped ; 0633 # 6.1 ARABIC MATHEMATICAL INITIAL SEEN ++1EE2F ; mapped ; 0639 # 6.1 ARABIC MATHEMATICAL INITIAL AIN ++1EE30 ; mapped ; 0641 # 6.1 ARABIC MATHEMATICAL INITIAL FEH ++1EE31 ; mapped ; 0635 # 6.1 ARABIC MATHEMATICAL INITIAL SAD ++1EE32 ; mapped ; 0642 # 6.1 ARABIC MATHEMATICAL INITIAL QAF ++1EE33 ; disallowed # NA ++1EE34 ; mapped ; 0634 # 6.1 ARABIC MATHEMATICAL INITIAL SHEEN ++1EE35 ; mapped ; 062A # 6.1 ARABIC MATHEMATICAL INITIAL TEH ++1EE36 ; mapped ; 062B # 6.1 ARABIC MATHEMATICAL INITIAL THEH ++1EE37 ; mapped ; 062E # 6.1 ARABIC MATHEMATICAL INITIAL KHAH ++1EE38 ; disallowed # NA ++1EE39 ; mapped ; 0636 # 6.1 ARABIC MATHEMATICAL INITIAL DAD ++1EE3A ; disallowed # NA ++1EE3B ; mapped ; 063A # 6.1 ARABIC MATHEMATICAL INITIAL GHAIN ++1EE3C..1EE41 ; disallowed # NA .. ++1EE42 ; mapped ; 062C # 6.1 ARABIC MATHEMATICAL TAILED JEEM ++1EE43..1EE46 ; disallowed # NA .. ++1EE47 ; mapped ; 062D # 6.1 ARABIC MATHEMATICAL TAILED HAH ++1EE48 ; disallowed # NA ++1EE49 ; mapped ; 064A # 6.1 ARABIC MATHEMATICAL TAILED YEH ++1EE4A ; disallowed # NA ++1EE4B ; mapped ; 0644 # 6.1 ARABIC MATHEMATICAL TAILED LAM ++1EE4C ; disallowed # NA ++1EE4D ; mapped ; 0646 # 6.1 ARABIC MATHEMATICAL TAILED NOON ++1EE4E ; mapped ; 0633 # 6.1 ARABIC MATHEMATICAL TAILED SEEN ++1EE4F ; mapped ; 0639 # 6.1 ARABIC MATHEMATICAL TAILED AIN ++1EE50 ; disallowed # NA ++1EE51 ; mapped ; 0635 # 6.1 ARABIC MATHEMATICAL TAILED SAD ++1EE52 ; mapped ; 0642 # 6.1 ARABIC MATHEMATICAL TAILED QAF ++1EE53 ; disallowed # NA ++1EE54 ; mapped ; 0634 # 6.1 ARABIC MATHEMATICAL TAILED SHEEN ++1EE55..1EE56 ; disallowed # NA .. ++1EE57 ; mapped ; 062E # 6.1 ARABIC MATHEMATICAL TAILED KHAH ++1EE58 ; disallowed # NA ++1EE59 ; mapped ; 0636 # 6.1 ARABIC MATHEMATICAL TAILED DAD ++1EE5A ; disallowed # NA ++1EE5B ; mapped ; 063A # 6.1 ARABIC MATHEMATICAL TAILED GHAIN ++1EE5C ; disallowed # NA ++1EE5D ; mapped ; 06BA # 6.1 ARABIC MATHEMATICAL TAILED DOTLESS NOON ++1EE5E ; disallowed # NA ++1EE5F ; mapped ; 066F # 6.1 ARABIC MATHEMATICAL TAILED DOTLESS QAF ++1EE60 ; disallowed # NA ++1EE61 ; mapped ; 0628 # 6.1 ARABIC MATHEMATICAL STRETCHED BEH ++1EE62 ; mapped ; 062C # 6.1 ARABIC MATHEMATICAL STRETCHED JEEM ++1EE63 ; disallowed # NA ++1EE64 ; mapped ; 0647 # 6.1 ARABIC MATHEMATICAL STRETCHED HEH ++1EE65..1EE66 ; disallowed # NA .. ++1EE67 ; mapped ; 062D # 6.1 ARABIC MATHEMATICAL STRETCHED HAH ++1EE68 ; mapped ; 0637 # 6.1 ARABIC MATHEMATICAL STRETCHED TAH ++1EE69 ; mapped ; 064A # 6.1 ARABIC MATHEMATICAL STRETCHED YEH ++1EE6A ; mapped ; 0643 # 6.1 ARABIC MATHEMATICAL STRETCHED KAF ++1EE6B ; disallowed # NA ++1EE6C ; mapped ; 0645 # 6.1 ARABIC MATHEMATICAL STRETCHED MEEM ++1EE6D ; mapped ; 0646 # 6.1 ARABIC MATHEMATICAL STRETCHED NOON ++1EE6E ; mapped ; 0633 # 6.1 ARABIC MATHEMATICAL STRETCHED SEEN ++1EE6F ; mapped ; 0639 # 6.1 ARABIC MATHEMATICAL STRETCHED AIN ++1EE70 ; mapped ; 0641 # 6.1 ARABIC MATHEMATICAL STRETCHED FEH ++1EE71 ; mapped ; 0635 # 6.1 ARABIC MATHEMATICAL STRETCHED SAD ++1EE72 ; mapped ; 0642 # 6.1 ARABIC MATHEMATICAL STRETCHED QAF ++1EE73 ; disallowed # NA ++1EE74 ; mapped ; 0634 # 6.1 ARABIC MATHEMATICAL STRETCHED SHEEN ++1EE75 ; mapped ; 062A # 6.1 ARABIC MATHEMATICAL STRETCHED TEH ++1EE76 ; mapped ; 062B # 6.1 ARABIC MATHEMATICAL STRETCHED THEH ++1EE77 ; mapped ; 062E # 6.1 ARABIC MATHEMATICAL STRETCHED KHAH ++1EE78 ; disallowed # NA ++1EE79 ; mapped ; 0636 # 6.1 ARABIC MATHEMATICAL STRETCHED DAD ++1EE7A ; mapped ; 0638 # 6.1 ARABIC MATHEMATICAL STRETCHED ZAH ++1EE7B ; mapped ; 063A # 6.1 ARABIC MATHEMATICAL STRETCHED GHAIN ++1EE7C ; mapped ; 066E # 6.1 ARABIC MATHEMATICAL STRETCHED DOTLESS BEH ++1EE7D ; disallowed # NA ++1EE7E ; mapped ; 06A1 # 6.1 ARABIC MATHEMATICAL STRETCHED DOTLESS FEH ++1EE7F ; disallowed # NA ++1EE80 ; mapped ; 0627 # 6.1 ARABIC MATHEMATICAL LOOPED ALEF ++1EE81 ; mapped ; 0628 # 6.1 ARABIC MATHEMATICAL LOOPED BEH ++1EE82 ; mapped ; 062C # 6.1 ARABIC MATHEMATICAL LOOPED JEEM ++1EE83 ; mapped ; 062F # 6.1 ARABIC MATHEMATICAL LOOPED DAL ++1EE84 ; mapped ; 0647 # 6.1 ARABIC MATHEMATICAL LOOPED HEH ++1EE85 ; mapped ; 0648 # 6.1 ARABIC MATHEMATICAL LOOPED WAW ++1EE86 ; mapped ; 0632 # 6.1 ARABIC MATHEMATICAL LOOPED ZAIN ++1EE87 ; mapped ; 062D # 6.1 ARABIC MATHEMATICAL LOOPED HAH ++1EE88 ; mapped ; 0637 # 6.1 ARABIC MATHEMATICAL LOOPED TAH ++1EE89 ; mapped ; 064A # 6.1 ARABIC MATHEMATICAL LOOPED YEH ++1EE8A ; disallowed # NA ++1EE8B ; mapped ; 0644 # 6.1 ARABIC MATHEMATICAL LOOPED LAM ++1EE8C ; mapped ; 0645 # 6.1 ARABIC MATHEMATICAL LOOPED MEEM ++1EE8D ; mapped ; 0646 # 6.1 ARABIC MATHEMATICAL LOOPED NOON ++1EE8E ; mapped ; 0633 # 6.1 ARABIC MATHEMATICAL LOOPED SEEN ++1EE8F ; mapped ; 0639 # 6.1 ARABIC MATHEMATICAL LOOPED AIN ++1EE90 ; mapped ; 0641 # 6.1 ARABIC MATHEMATICAL LOOPED FEH ++1EE91 ; mapped ; 0635 # 6.1 ARABIC MATHEMATICAL LOOPED SAD ++1EE92 ; mapped ; 0642 # 6.1 ARABIC MATHEMATICAL LOOPED QAF ++1EE93 ; mapped ; 0631 # 6.1 ARABIC MATHEMATICAL LOOPED REH ++1EE94 ; mapped ; 0634 # 6.1 ARABIC MATHEMATICAL LOOPED SHEEN ++1EE95 ; mapped ; 062A # 6.1 ARABIC MATHEMATICAL LOOPED TEH ++1EE96 ; mapped ; 062B # 6.1 ARABIC MATHEMATICAL LOOPED THEH ++1EE97 ; mapped ; 062E # 6.1 ARABIC MATHEMATICAL LOOPED KHAH ++1EE98 ; mapped ; 0630 # 6.1 ARABIC MATHEMATICAL LOOPED THAL ++1EE99 ; mapped ; 0636 # 6.1 ARABIC MATHEMATICAL LOOPED DAD ++1EE9A ; mapped ; 0638 # 6.1 ARABIC MATHEMATICAL LOOPED ZAH ++1EE9B ; mapped ; 063A # 6.1 ARABIC MATHEMATICAL LOOPED GHAIN ++1EE9C..1EEA0 ; disallowed # NA .. ++1EEA1 ; mapped ; 0628 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK BEH ++1EEA2 ; mapped ; 062C # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK JEEM ++1EEA3 ; mapped ; 062F # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK DAL ++1EEA4 ; disallowed # NA ++1EEA5 ; mapped ; 0648 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK WAW ++1EEA6 ; mapped ; 0632 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK ZAIN ++1EEA7 ; mapped ; 062D # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK HAH ++1EEA8 ; mapped ; 0637 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK TAH ++1EEA9 ; mapped ; 064A # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK YEH ++1EEAA ; disallowed # NA ++1EEAB ; mapped ; 0644 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK LAM ++1EEAC ; mapped ; 0645 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK MEEM ++1EEAD ; mapped ; 0646 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK NOON ++1EEAE ; mapped ; 0633 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK SEEN ++1EEAF ; mapped ; 0639 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK AIN ++1EEB0 ; mapped ; 0641 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK FEH ++1EEB1 ; mapped ; 0635 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK SAD ++1EEB2 ; mapped ; 0642 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK QAF ++1EEB3 ; mapped ; 0631 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK REH ++1EEB4 ; mapped ; 0634 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK SHEEN ++1EEB5 ; mapped ; 062A # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK TEH ++1EEB6 ; mapped ; 062B # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK THEH ++1EEB7 ; mapped ; 062E # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK KHAH ++1EEB8 ; mapped ; 0630 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK THAL ++1EEB9 ; mapped ; 0636 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK DAD ++1EEBA ; mapped ; 0638 # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK ZAH ++1EEBB ; mapped ; 063A # 6.1 ARABIC MATHEMATICAL DOUBLE-STRUCK GHAIN ++1EEBC..1EEEF ; disallowed # NA .. ++1EEF0..1EEF1 ; valid ; ; NV8 # 6.1 ARABIC MATHEMATICAL OPERATOR MEEM WITH HAH WITH TATWEEL..ARABIC MATHEMATICAL OPERATOR HAH WITH DAL ++1EEF2..1EFFF ; disallowed # NA .. ++1F000..1F02B ; valid ; ; NV8 # 5.1 MAHJONG TILE EAST WIND..MAHJONG TILE BACK ++1F02C..1F02F ; disallowed # NA .. ++1F030..1F093 ; valid ; ; NV8 # 5.1 DOMINO TILE HORIZONTAL BACK..DOMINO TILE VERTICAL-06-06 ++1F094..1F09F ; disallowed # NA .. ++1F0A0..1F0AE ; valid ; ; NV8 # 6.0 PLAYING CARD BACK..PLAYING CARD KING OF SPADES ++1F0AF..1F0B0 ; disallowed # NA .. ++1F0B1..1F0BE ; valid ; ; NV8 # 6.0 PLAYING CARD ACE OF HEARTS..PLAYING CARD KING OF HEARTS ++1F0BF ; valid ; ; NV8 # 7.0 PLAYING CARD RED JOKER ++1F0C0 ; disallowed # NA ++1F0C1..1F0CF ; valid ; ; NV8 # 6.0 PLAYING CARD ACE OF DIAMONDS..PLAYING CARD BLACK JOKER ++1F0D0 ; disallowed # NA ++1F0D1..1F0DF ; valid ; ; NV8 # 6.0 PLAYING CARD ACE OF CLUBS..PLAYING CARD WHITE JOKER ++1F0E0..1F0F5 ; valid ; ; NV8 # 7.0 PLAYING CARD FOOL..PLAYING CARD TRUMP-21 ++1F0F6..1F0FF ; disallowed # NA .. ++1F100 ; disallowed # 5.2 DIGIT ZERO FULL STOP ++1F101 ; disallowed_STD3_mapped ; 0030 002C # 5.2 DIGIT ZERO COMMA ++1F102 ; disallowed_STD3_mapped ; 0031 002C # 5.2 DIGIT ONE COMMA ++1F103 ; disallowed_STD3_mapped ; 0032 002C # 5.2 DIGIT TWO COMMA ++1F104 ; disallowed_STD3_mapped ; 0033 002C # 5.2 DIGIT THREE COMMA ++1F105 ; disallowed_STD3_mapped ; 0034 002C # 5.2 DIGIT FOUR COMMA ++1F106 ; disallowed_STD3_mapped ; 0035 002C # 5.2 DIGIT FIVE COMMA ++1F107 ; disallowed_STD3_mapped ; 0036 002C # 5.2 DIGIT SIX COMMA ++1F108 ; disallowed_STD3_mapped ; 0037 002C # 5.2 DIGIT SEVEN COMMA ++1F109 ; disallowed_STD3_mapped ; 0038 002C # 5.2 DIGIT EIGHT COMMA ++1F10A ; disallowed_STD3_mapped ; 0039 002C # 5.2 DIGIT NINE COMMA ++1F10B..1F10C ; valid ; ; NV8 # 7.0 DINGBAT CIRCLED SANS-SERIF DIGIT ZERO..DINGBAT NEGATIVE CIRCLED SANS-SERIF DIGIT ZERO ++1F10D..1F10F ; disallowed # NA .. ++1F110 ; disallowed_STD3_mapped ; 0028 0061 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER A ++1F111 ; disallowed_STD3_mapped ; 0028 0062 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER B ++1F112 ; disallowed_STD3_mapped ; 0028 0063 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER C ++1F113 ; disallowed_STD3_mapped ; 0028 0064 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER D ++1F114 ; disallowed_STD3_mapped ; 0028 0065 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER E ++1F115 ; disallowed_STD3_mapped ; 0028 0066 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER F ++1F116 ; disallowed_STD3_mapped ; 0028 0067 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER G ++1F117 ; disallowed_STD3_mapped ; 0028 0068 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER H ++1F118 ; disallowed_STD3_mapped ; 0028 0069 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER I ++1F119 ; disallowed_STD3_mapped ; 0028 006A 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER J ++1F11A ; disallowed_STD3_mapped ; 0028 006B 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER K ++1F11B ; disallowed_STD3_mapped ; 0028 006C 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER L ++1F11C ; disallowed_STD3_mapped ; 0028 006D 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER M ++1F11D ; disallowed_STD3_mapped ; 0028 006E 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER N ++1F11E ; disallowed_STD3_mapped ; 0028 006F 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER O ++1F11F ; disallowed_STD3_mapped ; 0028 0070 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER P ++1F120 ; disallowed_STD3_mapped ; 0028 0071 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER Q ++1F121 ; disallowed_STD3_mapped ; 0028 0072 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER R ++1F122 ; disallowed_STD3_mapped ; 0028 0073 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER S ++1F123 ; disallowed_STD3_mapped ; 0028 0074 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER T ++1F124 ; disallowed_STD3_mapped ; 0028 0075 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER U ++1F125 ; disallowed_STD3_mapped ; 0028 0076 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER V ++1F126 ; disallowed_STD3_mapped ; 0028 0077 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER W ++1F127 ; disallowed_STD3_mapped ; 0028 0078 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER X ++1F128 ; disallowed_STD3_mapped ; 0028 0079 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER Y ++1F129 ; disallowed_STD3_mapped ; 0028 007A 0029 #5.2 PARENTHESIZED LATIN CAPITAL LETTER Z ++1F12A ; mapped ; 3014 0073 3015 #5.2 TORTOISE SHELL BRACKETED LATIN CAPITAL LETTER S ++1F12B ; mapped ; 0063 # 5.2 CIRCLED ITALIC LATIN CAPITAL LETTER C ++1F12C ; mapped ; 0072 # 5.2 CIRCLED ITALIC LATIN CAPITAL LETTER R ++1F12D ; mapped ; 0063 0064 # 5.2 CIRCLED CD ++1F12E ; mapped ; 0077 007A # 5.2 CIRCLED WZ ++1F12F ; disallowed # NA ++1F130 ; mapped ; 0061 # 6.0 SQUARED LATIN CAPITAL LETTER A ++1F131 ; mapped ; 0062 # 5.2 SQUARED LATIN CAPITAL LETTER B ++1F132 ; mapped ; 0063 # 6.0 SQUARED LATIN CAPITAL LETTER C ++1F133 ; mapped ; 0064 # 6.0 SQUARED LATIN CAPITAL LETTER D ++1F134 ; mapped ; 0065 # 6.0 SQUARED LATIN CAPITAL LETTER E ++1F135 ; mapped ; 0066 # 6.0 SQUARED LATIN CAPITAL LETTER F ++1F136 ; mapped ; 0067 # 6.0 SQUARED LATIN CAPITAL LETTER G ++1F137 ; mapped ; 0068 # 6.0 SQUARED LATIN CAPITAL LETTER H ++1F138 ; mapped ; 0069 # 6.0 SQUARED LATIN CAPITAL LETTER I ++1F139 ; mapped ; 006A # 6.0 SQUARED LATIN CAPITAL LETTER J ++1F13A ; mapped ; 006B # 6.0 SQUARED LATIN CAPITAL LETTER K ++1F13B ; mapped ; 006C # 6.0 SQUARED LATIN CAPITAL LETTER L ++1F13C ; mapped ; 006D # 6.0 SQUARED LATIN CAPITAL LETTER M ++1F13D ; mapped ; 006E # 5.2 SQUARED LATIN CAPITAL LETTER N ++1F13E ; mapped ; 006F # 6.0 SQUARED LATIN CAPITAL LETTER O ++1F13F ; mapped ; 0070 # 5.2 SQUARED LATIN CAPITAL LETTER P ++1F140 ; mapped ; 0071 # 6.0 SQUARED LATIN CAPITAL LETTER Q ++1F141 ; mapped ; 0072 # 6.0 SQUARED LATIN CAPITAL LETTER R ++1F142 ; mapped ; 0073 # 5.2 SQUARED LATIN CAPITAL LETTER S ++1F143 ; mapped ; 0074 # 6.0 SQUARED LATIN CAPITAL LETTER T ++1F144 ; mapped ; 0075 # 6.0 SQUARED LATIN CAPITAL LETTER U ++1F145 ; mapped ; 0076 # 6.0 SQUARED LATIN CAPITAL LETTER V ++1F146 ; mapped ; 0077 # 5.2 SQUARED LATIN CAPITAL LETTER W ++1F147 ; mapped ; 0078 # 6.0 SQUARED LATIN CAPITAL LETTER X ++1F148 ; mapped ; 0079 # 6.0 SQUARED LATIN CAPITAL LETTER Y ++1F149 ; mapped ; 007A # 6.0 SQUARED LATIN CAPITAL LETTER Z ++1F14A ; mapped ; 0068 0076 # 5.2 SQUARED HV ++1F14B ; mapped ; 006D 0076 # 5.2 SQUARED MV ++1F14C ; mapped ; 0073 0064 # 5.2 SQUARED SD ++1F14D ; mapped ; 0073 0073 # 5.2 SQUARED SS ++1F14E ; mapped ; 0070 0070 0076 #5.2 SQUARED PPV ++1F14F ; mapped ; 0077 0063 # 6.0 SQUARED WC ++1F150..1F156 ; valid ; ; NV8 # 6.0 NEGATIVE CIRCLED LATIN CAPITAL LETTER A..NEGATIVE CIRCLED LATIN CAPITAL LETTER G ++1F157 ; valid ; ; NV8 # 5.2 NEGATIVE CIRCLED LATIN CAPITAL LETTER H ++1F158..1F15E ; valid ; ; NV8 # 6.0 NEGATIVE CIRCLED LATIN CAPITAL LETTER I..NEGATIVE CIRCLED LATIN CAPITAL LETTER O ++1F15F ; valid ; ; NV8 # 5.2 NEGATIVE CIRCLED LATIN CAPITAL LETTER P ++1F160..1F169 ; valid ; ; NV8 # 6.0 NEGATIVE CIRCLED LATIN CAPITAL LETTER Q..NEGATIVE CIRCLED LATIN CAPITAL LETTER Z ++1F16A ; mapped ; 006D 0063 # 6.1 RAISED MC SIGN ++1F16B ; mapped ; 006D 0064 # 6.1 RAISED MD SIGN ++1F16C..1F16F ; disallowed # NA .. ++1F170..1F178 ; valid ; ; NV8 # 6.0 NEGATIVE SQUARED LATIN CAPITAL LETTER A..NEGATIVE SQUARED LATIN CAPITAL LETTER I ++1F179 ; valid ; ; NV8 # 5.2 NEGATIVE SQUARED LATIN CAPITAL LETTER J ++1F17A ; valid ; ; NV8 # 6.0 NEGATIVE SQUARED LATIN CAPITAL LETTER K ++1F17B..1F17C ; valid ; ; NV8 # 5.2 NEGATIVE SQUARED LATIN CAPITAL LETTER L..NEGATIVE SQUARED LATIN CAPITAL LETTER M ++1F17D..1F17E ; valid ; ; NV8 # 6.0 NEGATIVE SQUARED LATIN CAPITAL LETTER N..NEGATIVE SQUARED LATIN CAPITAL LETTER O ++1F17F ; valid ; ; NV8 # 5.2 NEGATIVE SQUARED LATIN CAPITAL LETTER P ++1F180..1F189 ; valid ; ; NV8 # 6.0 NEGATIVE SQUARED LATIN CAPITAL LETTER Q..NEGATIVE SQUARED LATIN CAPITAL LETTER Z ++1F18A..1F18D ; valid ; ; NV8 # 5.2 CROSSED NEGATIVE SQUARED LATIN CAPITAL LETTER P..NEGATIVE SQUARED SA ++1F18E..1F18F ; valid ; ; NV8 # 6.0 NEGATIVE SQUARED AB..NEGATIVE SQUARED WC ++1F190 ; mapped ; 0064 006A # 5.2 SQUARE DJ ++1F191..1F19A ; valid ; ; NV8 # 6.0 SQUARED CL..SQUARED VS ++1F19B..1F1AC ; valid ; ; NV8 # 9.0 SQUARED THREE D..SQUARED VOD ++1F1AD..1F1E5 ; disallowed # NA .. ++1F1E6..1F1FF ; valid ; ; NV8 # 6.0 REGIONAL INDICATOR SYMBOL LETTER A..REGIONAL INDICATOR SYMBOL LETTER Z ++1F200 ; mapped ; 307B 304B # 5.2 SQUARE HIRAGANA HOKA ++1F201 ; mapped ; 30B3 30B3 # 6.0 SQUARED KATAKANA KOKO ++1F202 ; mapped ; 30B5 # 6.0 SQUARED KATAKANA SA ++1F203..1F20F ; disallowed # NA .. ++1F210 ; mapped ; 624B # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-624B ++1F211 ; mapped ; 5B57 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-5B57 ++1F212 ; mapped ; 53CC # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-53CC ++1F213 ; mapped ; 30C7 # 5.2 SQUARED KATAKANA DE ++1F214 ; mapped ; 4E8C # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-4E8C ++1F215 ; mapped ; 591A # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-591A ++1F216 ; mapped ; 89E3 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-89E3 ++1F217 ; mapped ; 5929 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-5929 ++1F218 ; mapped ; 4EA4 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-4EA4 ++1F219 ; mapped ; 6620 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6620 ++1F21A ; mapped ; 7121 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-7121 ++1F21B ; mapped ; 6599 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6599 ++1F21C ; mapped ; 524D # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-524D ++1F21D ; mapped ; 5F8C # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-5F8C ++1F21E ; mapped ; 518D # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-518D ++1F21F ; mapped ; 65B0 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-65B0 ++1F220 ; mapped ; 521D # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-521D ++1F221 ; mapped ; 7D42 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-7D42 ++1F222 ; mapped ; 751F # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-751F ++1F223 ; mapped ; 8CA9 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-8CA9 ++1F224 ; mapped ; 58F0 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-58F0 ++1F225 ; mapped ; 5439 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-5439 ++1F226 ; mapped ; 6F14 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6F14 ++1F227 ; mapped ; 6295 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6295 ++1F228 ; mapped ; 6355 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6355 ++1F229 ; mapped ; 4E00 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-4E00 ++1F22A ; mapped ; 4E09 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-4E09 ++1F22B ; mapped ; 904A # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-904A ++1F22C ; mapped ; 5DE6 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-5DE6 ++1F22D ; mapped ; 4E2D # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-4E2D ++1F22E ; mapped ; 53F3 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-53F3 ++1F22F ; mapped ; 6307 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6307 ++1F230 ; mapped ; 8D70 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-8D70 ++1F231 ; mapped ; 6253 # 5.2 SQUARED CJK UNIFIED IDEOGRAPH-6253 ++1F232 ; mapped ; 7981 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-7981 ++1F233 ; mapped ; 7A7A # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-7A7A ++1F234 ; mapped ; 5408 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-5408 ++1F235 ; mapped ; 6E80 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-6E80 ++1F236 ; mapped ; 6709 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-6709 ++1F237 ; mapped ; 6708 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-6708 ++1F238 ; mapped ; 7533 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-7533 ++1F239 ; mapped ; 5272 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-5272 ++1F23A ; mapped ; 55B6 # 6.0 SQUARED CJK UNIFIED IDEOGRAPH-55B6 ++1F23B ; mapped ; 914D # 9.0 SQUARED CJK UNIFIED IDEOGRAPH-914D ++1F23C..1F23F ; disallowed # NA .. ++1F240 ; mapped ; 3014 672C 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-672C ++1F241 ; mapped ; 3014 4E09 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-4E09 ++1F242 ; mapped ; 3014 4E8C 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-4E8C ++1F243 ; mapped ; 3014 5B89 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-5B89 ++1F244 ; mapped ; 3014 70B9 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-70B9 ++1F245 ; mapped ; 3014 6253 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-6253 ++1F246 ; mapped ; 3014 76D7 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-76D7 ++1F247 ; mapped ; 3014 52DD 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-52DD ++1F248 ; mapped ; 3014 6557 3015 #5.2 TORTOISE SHELL BRACKETED CJK UNIFIED IDEOGRAPH-6557 ++1F249..1F24F ; disallowed # NA .. ++1F250 ; mapped ; 5F97 # 6.0 CIRCLED IDEOGRAPH ADVANTAGE ++1F251 ; mapped ; 53EF # 6.0 CIRCLED IDEOGRAPH ACCEPT ++1F252..1F25F ; disallowed # NA .. ++1F260..1F265 ; valid ; ; NV8 # 10.0 ROUNDED SYMBOL FOR FU..ROUNDED SYMBOL FOR CAI ++1F266..1F2FF ; disallowed # NA .. ++1F300..1F320 ; valid ; ; NV8 # 6.0 CYCLONE..SHOOTING STAR ++1F321..1F32C ; valid ; ; NV8 # 7.0 THERMOMETER..WIND BLOWING FACE ++1F32D..1F32F ; valid ; ; NV8 # 8.0 HOT DOG..BURRITO ++1F330..1F335 ; valid ; ; NV8 # 6.0 CHESTNUT..CACTUS ++1F336 ; valid ; ; NV8 # 7.0 HOT PEPPER ++1F337..1F37C ; valid ; ; NV8 # 6.0 TULIP..BABY BOTTLE ++1F37D ; valid ; ; NV8 # 7.0 FORK AND KNIFE WITH PLATE ++1F37E..1F37F ; valid ; ; NV8 # 8.0 BOTTLE WITH POPPING CORK..POPCORN ++1F380..1F393 ; valid ; ; NV8 # 6.0 RIBBON..GRADUATION CAP ++1F394..1F39F ; valid ; ; NV8 # 7.0 HEART WITH TIP ON THE LEFT..ADMISSION TICKETS ++1F3A0..1F3C4 ; valid ; ; NV8 # 6.0 CAROUSEL HORSE..SURFER ++1F3C5 ; valid ; ; NV8 # 7.0 SPORTS MEDAL ++1F3C6..1F3CA ; valid ; ; NV8 # 6.0 TROPHY..SWIMMER ++1F3CB..1F3CE ; valid ; ; NV8 # 7.0 WEIGHT LIFTER..RACING CAR ++1F3CF..1F3D3 ; valid ; ; NV8 # 8.0 CRICKET BAT AND BALL..TABLE TENNIS PADDLE AND BALL ++1F3D4..1F3DF ; valid ; ; NV8 # 7.0 SNOW CAPPED MOUNTAIN..STADIUM ++1F3E0..1F3F0 ; valid ; ; NV8 # 6.0 HOUSE BUILDING..EUROPEAN CASTLE ++1F3F1..1F3F7 ; valid ; ; NV8 # 7.0 WHITE PENNANT..LABEL ++1F3F8..1F3FF ; valid ; ; NV8 # 8.0 BADMINTON RACQUET AND SHUTTLECOCK..EMOJI MODIFIER FITZPATRICK TYPE-6 ++1F400..1F43E ; valid ; ; NV8 # 6.0 RAT..PAW PRINTS ++1F43F ; valid ; ; NV8 # 7.0 CHIPMUNK ++1F440 ; valid ; ; NV8 # 6.0 EYES ++1F441 ; valid ; ; NV8 # 7.0 EYE ++1F442..1F4F7 ; valid ; ; NV8 # 6.0 EAR..CAMERA ++1F4F8 ; valid ; ; NV8 # 7.0 CAMERA WITH FLASH ++1F4F9..1F4FC ; valid ; ; NV8 # 6.0 VIDEO CAMERA..VIDEOCASSETTE ++1F4FD..1F4FE ; valid ; ; NV8 # 7.0 FILM PROJECTOR..PORTABLE STEREO ++1F4FF ; valid ; ; NV8 # 8.0 PRAYER BEADS ++1F500..1F53D ; valid ; ; NV8 # 6.0 TWISTED RIGHTWARDS ARROWS..DOWN-POINTING SMALL RED TRIANGLE ++1F53E..1F53F ; valid ; ; NV8 # 7.0 LOWER RIGHT SHADOWED WHITE CIRCLE..UPPER RIGHT SHADOWED WHITE CIRCLE ++1F540..1F543 ; valid ; ; NV8 # 6.1 CIRCLED CROSS POMMEE..NOTCHED LEFT SEMICIRCLE WITH THREE DOTS ++1F544..1F54A ; valid ; ; NV8 # 7.0 NOTCHED RIGHT SEMICIRCLE WITH THREE DOTS..DOVE OF PEACE ++1F54B..1F54F ; valid ; ; NV8 # 8.0 KAABA..BOWL OF HYGIEIA ++1F550..1F567 ; valid ; ; NV8 # 6.0 CLOCK FACE ONE OCLOCK..CLOCK FACE TWELVE-THIRTY ++1F568..1F579 ; valid ; ; NV8 # 7.0 RIGHT SPEAKER..JOYSTICK ++1F57A ; valid ; ; NV8 # 9.0 MAN DANCING ++1F57B..1F5A3 ; valid ; ; NV8 # 7.0 LEFT HAND TELEPHONE RECEIVER..BLACK DOWN POINTING BACKHAND INDEX ++1F5A4 ; valid ; ; NV8 # 9.0 BLACK HEART ++1F5A5..1F5FA ; valid ; ; NV8 # 7.0 DESKTOP COMPUTER..WORLD MAP ++1F5FB..1F5FF ; valid ; ; NV8 # 6.0 MOUNT FUJI..MOYAI ++1F600 ; valid ; ; NV8 # 6.1 GRINNING FACE ++1F601..1F610 ; valid ; ; NV8 # 6.0 GRINNING FACE WITH SMILING EYES..NEUTRAL FACE ++1F611 ; valid ; ; NV8 # 6.1 EXPRESSIONLESS FACE ++1F612..1F614 ; valid ; ; NV8 # 6.0 UNAMUSED FACE..PENSIVE FACE ++1F615 ; valid ; ; NV8 # 6.1 CONFUSED FACE ++1F616 ; valid ; ; NV8 # 6.0 CONFOUNDED FACE ++1F617 ; valid ; ; NV8 # 6.1 KISSING FACE ++1F618 ; valid ; ; NV8 # 6.0 FACE THROWING A KISS ++1F619 ; valid ; ; NV8 # 6.1 KISSING FACE WITH SMILING EYES ++1F61A ; valid ; ; NV8 # 6.0 KISSING FACE WITH CLOSED EYES ++1F61B ; valid ; ; NV8 # 6.1 FACE WITH STUCK-OUT TONGUE ++1F61C..1F61E ; valid ; ; NV8 # 6.0 FACE WITH STUCK-OUT TONGUE AND WINKING EYE..DISAPPOINTED FACE ++1F61F ; valid ; ; NV8 # 6.1 WORRIED FACE ++1F620..1F625 ; valid ; ; NV8 # 6.0 ANGRY FACE..DISAPPOINTED BUT RELIEVED FACE ++1F626..1F627 ; valid ; ; NV8 # 6.1 FROWNING FACE WITH OPEN MOUTH..ANGUISHED FACE ++1F628..1F62B ; valid ; ; NV8 # 6.0 FEARFUL FACE..TIRED FACE ++1F62C ; valid ; ; NV8 # 6.1 GRIMACING FACE ++1F62D ; valid ; ; NV8 # 6.0 LOUDLY CRYING FACE ++1F62E..1F62F ; valid ; ; NV8 # 6.1 FACE WITH OPEN MOUTH..HUSHED FACE ++1F630..1F633 ; valid ; ; NV8 # 6.0 FACE WITH OPEN MOUTH AND COLD SWEAT..FLUSHED FACE ++1F634 ; valid ; ; NV8 # 6.1 SLEEPING FACE ++1F635..1F640 ; valid ; ; NV8 # 6.0 DIZZY FACE..WEARY CAT FACE ++1F641..1F642 ; valid ; ; NV8 # 7.0 SLIGHTLY FROWNING FACE..SLIGHTLY SMILING FACE ++1F643..1F644 ; valid ; ; NV8 # 8.0 UPSIDE-DOWN FACE..FACE WITH ROLLING EYES ++1F645..1F64F ; valid ; ; NV8 # 6.0 FACE WITH NO GOOD GESTURE..PERSON WITH FOLDED HANDS ++1F650..1F67F ; valid ; ; NV8 # 7.0 NORTH WEST POINTING LEAF..REVERSE CHECKER BOARD ++1F680..1F6C5 ; valid ; ; NV8 # 6.0 ROCKET..LEFT LUGGAGE ++1F6C6..1F6CF ; valid ; ; NV8 # 7.0 TRIANGLE WITH ROUNDED CORNERS..BED ++1F6D0 ; valid ; ; NV8 # 8.0 PLACE OF WORSHIP ++1F6D1..1F6D2 ; valid ; ; NV8 # 9.0 OCTAGONAL SIGN..SHOPPING TROLLEY ++1F6D3..1F6D4 ; valid ; ; NV8 # 10.0 STUPA..PAGODA ++1F6D5..1F6DF ; disallowed # NA .. ++1F6E0..1F6EC ; valid ; ; NV8 # 7.0 HAMMER AND WRENCH..AIRPLANE ARRIVING ++1F6ED..1F6EF ; disallowed # NA .. ++1F6F0..1F6F3 ; valid ; ; NV8 # 7.0 SATELLITE..PASSENGER SHIP ++1F6F4..1F6F6 ; valid ; ; NV8 # 9.0 SCOOTER..CANOE ++1F6F7..1F6F8 ; valid ; ; NV8 # 10.0 SLED..FLYING SAUCER ++1F6F9..1F6FF ; disallowed # NA .. ++1F700..1F773 ; valid ; ; NV8 # 6.0 ALCHEMICAL SYMBOL FOR QUINTESSENCE..ALCHEMICAL SYMBOL FOR HALF OUNCE ++1F774..1F77F ; disallowed # NA .. ++1F780..1F7D4 ; valid ; ; NV8 # 7.0 BLACK LEFT-POINTING ISOSCELES RIGHT TRIANGLE..HEAVY TWELVE POINTED PINWHEEL STAR ++1F7D5..1F7FF ; disallowed # NA .. ++1F800..1F80B ; valid ; ; NV8 # 7.0 LEFTWARDS ARROW WITH SMALL TRIANGLE ARROWHEAD..DOWNWARDS ARROW WITH LARGE TRIANGLE ARROWHEAD ++1F80C..1F80F ; disallowed # NA .. ++1F810..1F847 ; valid ; ; NV8 # 7.0 LEFTWARDS ARROW WITH SMALL EQUILATERAL ARROWHEAD..DOWNWARDS HEAVY ARROW ++1F848..1F84F ; disallowed # NA .. ++1F850..1F859 ; valid ; ; NV8 # 7.0 LEFTWARDS SANS-SERIF ARROW..UP DOWN SANS-SERIF ARROW ++1F85A..1F85F ; disallowed # NA .. ++1F860..1F887 ; valid ; ; NV8 # 7.0 WIDE-HEADED LEFTWARDS LIGHT BARB ARROW..WIDE-HEADED SOUTH WEST VERY HEAVY BARB ARROW ++1F888..1F88F ; disallowed # NA .. ++1F890..1F8AD ; valid ; ; NV8 # 7.0 LEFTWARDS TRIANGLE ARROWHEAD..WHITE ARROW SHAFT WIDTH TWO THIRDS ++1F8AE..1F8FF ; disallowed # NA .. ++1F900..1F90B ; valid ; ; NV8 # 10.0 CIRCLED CROSS FORMEE WITH FOUR DOTS..DOWNWARD FACING NOTCHED HOOK WITH DOT ++1F90C..1F90F ; disallowed # NA .. ++1F910..1F918 ; valid ; ; NV8 # 8.0 ZIPPER-MOUTH FACE..SIGN OF THE HORNS ++1F919..1F91E ; valid ; ; NV8 # 9.0 CALL ME HAND..HAND WITH INDEX AND MIDDLE FINGERS CROSSED ++1F91F ; valid ; ; NV8 # 10.0 I LOVE YOU HAND SIGN ++1F920..1F927 ; valid ; ; NV8 # 9.0 FACE WITH COWBOY HAT..SNEEZING FACE ++1F928..1F92F ; valid ; ; NV8 # 10.0 FACE WITH ONE EYEBROW RAISED..SHOCKED FACE WITH EXPLODING HEAD ++1F930 ; valid ; ; NV8 # 9.0 PREGNANT WOMAN ++1F931..1F932 ; valid ; ; NV8 # 10.0 BREAST-FEEDING..PALMS UP TOGETHER ++1F933..1F93E ; valid ; ; NV8 # 9.0 SELFIE..HANDBALL ++1F93F ; disallowed # NA ++1F940..1F94B ; valid ; ; NV8 # 9.0 WILTED FLOWER..MARTIAL ARTS UNIFORM ++1F94C ; valid ; ; NV8 # 10.0 CURLING STONE ++1F94D..1F94F ; disallowed # NA .. ++1F950..1F95E ; valid ; ; NV8 # 9.0 CROISSANT..PANCAKES ++1F95F..1F96B ; valid ; ; NV8 # 10.0 DUMPLING..CANNED FOOD ++1F96C..1F97F ; disallowed # NA .. ++1F980..1F984 ; valid ; ; NV8 # 8.0 CRAB..UNICORN FACE ++1F985..1F991 ; valid ; ; NV8 # 9.0 EAGLE..SQUID ++1F992..1F997 ; valid ; ; NV8 # 10.0 GIRAFFE FACE..CRICKET ++1F998..1F9BF ; disallowed # NA .. ++1F9C0 ; valid ; ; NV8 # 8.0 CHEESE WEDGE ++1F9C1..1F9CF ; disallowed # NA .. ++1F9D0..1F9E6 ; valid ; ; NV8 # 10.0 FACE WITH MONOCLE..SOCKS ++1F9E7..1FFFD ; disallowed # NA .. ++1FFFE..1FFFF ; disallowed # 2.0 .. ++20000..2A6D6 ; valid # 3.1 CJK UNIFIED IDEOGRAPH-20000..CJK UNIFIED IDEOGRAPH-2A6D6 ++2A6D7..2A6FF ; disallowed # NA .. ++2A700..2B734 ; valid # 5.2 CJK UNIFIED IDEOGRAPH-2A700..CJK UNIFIED IDEOGRAPH-2B734 ++2B735..2B73F ; disallowed # NA .. ++2B740..2B81D ; valid # 6.0 CJK UNIFIED IDEOGRAPH-2B740..CJK UNIFIED IDEOGRAPH-2B81D ++2B81E..2B81F ; disallowed # NA .. ++2B820..2CEA1 ; valid # 8.0 CJK UNIFIED IDEOGRAPH-2B820..CJK UNIFIED IDEOGRAPH-2CEA1 ++2CEA2..2CEAF ; disallowed # NA .. ++2CEB0..2EBE0 ; valid # 10.0 CJK UNIFIED IDEOGRAPH-2CEB0..CJK UNIFIED IDEOGRAPH-2EBE0 ++2EBE1..2F7FF ; disallowed # NA .. ++2F800 ; mapped ; 4E3D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F800 ++2F801 ; mapped ; 4E38 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F801 ++2F802 ; mapped ; 4E41 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F802 ++2F803 ; mapped ; 20122 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F803 ++2F804 ; mapped ; 4F60 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F804 ++2F805 ; mapped ; 4FAE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F805 ++2F806 ; mapped ; 4FBB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F806 ++2F807 ; mapped ; 5002 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F807 ++2F808 ; mapped ; 507A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F808 ++2F809 ; mapped ; 5099 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F809 ++2F80A ; mapped ; 50E7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F80A ++2F80B ; mapped ; 50CF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F80B ++2F80C ; mapped ; 349E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F80C ++2F80D ; mapped ; 2063A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F80D ++2F80E ; mapped ; 514D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F80E ++2F80F ; mapped ; 5154 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F80F ++2F810 ; mapped ; 5164 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F810 ++2F811 ; mapped ; 5177 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F811 ++2F812 ; mapped ; 2051C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F812 ++2F813 ; mapped ; 34B9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F813 ++2F814 ; mapped ; 5167 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F814 ++2F815 ; mapped ; 518D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F815 ++2F816 ; mapped ; 2054B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F816 ++2F817 ; mapped ; 5197 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F817 ++2F818 ; mapped ; 51A4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F818 ++2F819 ; mapped ; 4ECC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F819 ++2F81A ; mapped ; 51AC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F81A ++2F81B ; mapped ; 51B5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F81B ++2F81C ; mapped ; 291DF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F81C ++2F81D ; mapped ; 51F5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F81D ++2F81E ; mapped ; 5203 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F81E ++2F81F ; mapped ; 34DF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F81F ++2F820 ; mapped ; 523B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F820 ++2F821 ; mapped ; 5246 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F821 ++2F822 ; mapped ; 5272 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F822 ++2F823 ; mapped ; 5277 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F823 ++2F824 ; mapped ; 3515 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F824 ++2F825 ; mapped ; 52C7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F825 ++2F826 ; mapped ; 52C9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F826 ++2F827 ; mapped ; 52E4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F827 ++2F828 ; mapped ; 52FA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F828 ++2F829 ; mapped ; 5305 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F829 ++2F82A ; mapped ; 5306 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F82A ++2F82B ; mapped ; 5317 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F82B ++2F82C ; mapped ; 5349 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F82C ++2F82D ; mapped ; 5351 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F82D ++2F82E ; mapped ; 535A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F82E ++2F82F ; mapped ; 5373 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F82F ++2F830 ; mapped ; 537D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F830 ++2F831..2F833 ; mapped ; 537F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F831..CJK COMPATIBILITY IDEOGRAPH-2F833 ++2F834 ; mapped ; 20A2C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F834 ++2F835 ; mapped ; 7070 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F835 ++2F836 ; mapped ; 53CA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F836 ++2F837 ; mapped ; 53DF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F837 ++2F838 ; mapped ; 20B63 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F838 ++2F839 ; mapped ; 53EB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F839 ++2F83A ; mapped ; 53F1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F83A ++2F83B ; mapped ; 5406 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F83B ++2F83C ; mapped ; 549E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F83C ++2F83D ; mapped ; 5438 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F83D ++2F83E ; mapped ; 5448 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F83E ++2F83F ; mapped ; 5468 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F83F ++2F840 ; mapped ; 54A2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F840 ++2F841 ; mapped ; 54F6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F841 ++2F842 ; mapped ; 5510 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F842 ++2F843 ; mapped ; 5553 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F843 ++2F844 ; mapped ; 5563 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F844 ++2F845..2F846 ; mapped ; 5584 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F845..CJK COMPATIBILITY IDEOGRAPH-2F846 ++2F847 ; mapped ; 5599 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F847 ++2F848 ; mapped ; 55AB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F848 ++2F849 ; mapped ; 55B3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F849 ++2F84A ; mapped ; 55C2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F84A ++2F84B ; mapped ; 5716 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F84B ++2F84C ; mapped ; 5606 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F84C ++2F84D ; mapped ; 5717 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F84D ++2F84E ; mapped ; 5651 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F84E ++2F84F ; mapped ; 5674 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F84F ++2F850 ; mapped ; 5207 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F850 ++2F851 ; mapped ; 58EE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F851 ++2F852 ; mapped ; 57CE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F852 ++2F853 ; mapped ; 57F4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F853 ++2F854 ; mapped ; 580D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F854 ++2F855 ; mapped ; 578B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F855 ++2F856 ; mapped ; 5832 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F856 ++2F857 ; mapped ; 5831 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F857 ++2F858 ; mapped ; 58AC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F858 ++2F859 ; mapped ; 214E4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F859 ++2F85A ; mapped ; 58F2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F85A ++2F85B ; mapped ; 58F7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F85B ++2F85C ; mapped ; 5906 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F85C ++2F85D ; mapped ; 591A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F85D ++2F85E ; mapped ; 5922 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F85E ++2F85F ; mapped ; 5962 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F85F ++2F860 ; mapped ; 216A8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F860 ++2F861 ; mapped ; 216EA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F861 ++2F862 ; mapped ; 59EC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F862 ++2F863 ; mapped ; 5A1B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F863 ++2F864 ; mapped ; 5A27 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F864 ++2F865 ; mapped ; 59D8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F865 ++2F866 ; mapped ; 5A66 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F866 ++2F867 ; mapped ; 36EE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F867 ++2F868 ; disallowed # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F868 ++2F869 ; mapped ; 5B08 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F869 ++2F86A..2F86B ; mapped ; 5B3E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F86A..CJK COMPATIBILITY IDEOGRAPH-2F86B ++2F86C ; mapped ; 219C8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F86C ++2F86D ; mapped ; 5BC3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F86D ++2F86E ; mapped ; 5BD8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F86E ++2F86F ; mapped ; 5BE7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F86F ++2F870 ; mapped ; 5BF3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F870 ++2F871 ; mapped ; 21B18 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F871 ++2F872 ; mapped ; 5BFF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F872 ++2F873 ; mapped ; 5C06 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F873 ++2F874 ; disallowed # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F874 ++2F875 ; mapped ; 5C22 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F875 ++2F876 ; mapped ; 3781 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F876 ++2F877 ; mapped ; 5C60 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F877 ++2F878 ; mapped ; 5C6E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F878 ++2F879 ; mapped ; 5CC0 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F879 ++2F87A ; mapped ; 5C8D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F87A ++2F87B ; mapped ; 21DE4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F87B ++2F87C ; mapped ; 5D43 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F87C ++2F87D ; mapped ; 21DE6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F87D ++2F87E ; mapped ; 5D6E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F87E ++2F87F ; mapped ; 5D6B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F87F ++2F880 ; mapped ; 5D7C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F880 ++2F881 ; mapped ; 5DE1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F881 ++2F882 ; mapped ; 5DE2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F882 ++2F883 ; mapped ; 382F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F883 ++2F884 ; mapped ; 5DFD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F884 ++2F885 ; mapped ; 5E28 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F885 ++2F886 ; mapped ; 5E3D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F886 ++2F887 ; mapped ; 5E69 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F887 ++2F888 ; mapped ; 3862 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F888 ++2F889 ; mapped ; 22183 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F889 ++2F88A ; mapped ; 387C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F88A ++2F88B ; mapped ; 5EB0 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F88B ++2F88C ; mapped ; 5EB3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F88C ++2F88D ; mapped ; 5EB6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F88D ++2F88E ; mapped ; 5ECA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F88E ++2F88F ; mapped ; 2A392 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F88F ++2F890 ; mapped ; 5EFE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F890 ++2F891..2F892 ; mapped ; 22331 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F891..CJK COMPATIBILITY IDEOGRAPH-2F892 ++2F893 ; mapped ; 8201 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F893 ++2F894..2F895 ; mapped ; 5F22 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F894..CJK COMPATIBILITY IDEOGRAPH-2F895 ++2F896 ; mapped ; 38C7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F896 ++2F897 ; mapped ; 232B8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F897 ++2F898 ; mapped ; 261DA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F898 ++2F899 ; mapped ; 5F62 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F899 ++2F89A ; mapped ; 5F6B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F89A ++2F89B ; mapped ; 38E3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F89B ++2F89C ; mapped ; 5F9A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F89C ++2F89D ; mapped ; 5FCD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F89D ++2F89E ; mapped ; 5FD7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F89E ++2F89F ; mapped ; 5FF9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F89F ++2F8A0 ; mapped ; 6081 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A0 ++2F8A1 ; mapped ; 393A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A1 ++2F8A2 ; mapped ; 391C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A2 ++2F8A3 ; mapped ; 6094 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A3 ++2F8A4 ; mapped ; 226D4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A4 ++2F8A5 ; mapped ; 60C7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A5 ++2F8A6 ; mapped ; 6148 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A6 ++2F8A7 ; mapped ; 614C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A7 ++2F8A8 ; mapped ; 614E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A8 ++2F8A9 ; mapped ; 614C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8A9 ++2F8AA ; mapped ; 617A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8AA ++2F8AB ; mapped ; 618E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8AB ++2F8AC ; mapped ; 61B2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8AC ++2F8AD ; mapped ; 61A4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8AD ++2F8AE ; mapped ; 61AF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8AE ++2F8AF ; mapped ; 61DE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8AF ++2F8B0 ; mapped ; 61F2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B0 ++2F8B1 ; mapped ; 61F6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B1 ++2F8B2 ; mapped ; 6210 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B2 ++2F8B3 ; mapped ; 621B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B3 ++2F8B4 ; mapped ; 625D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B4 ++2F8B5 ; mapped ; 62B1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B5 ++2F8B6 ; mapped ; 62D4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B6 ++2F8B7 ; mapped ; 6350 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B7 ++2F8B8 ; mapped ; 22B0C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B8 ++2F8B9 ; mapped ; 633D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8B9 ++2F8BA ; mapped ; 62FC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8BA ++2F8BB ; mapped ; 6368 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8BB ++2F8BC ; mapped ; 6383 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8BC ++2F8BD ; mapped ; 63E4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8BD ++2F8BE ; mapped ; 22BF1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8BE ++2F8BF ; mapped ; 6422 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8BF ++2F8C0 ; mapped ; 63C5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C0 ++2F8C1 ; mapped ; 63A9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C1 ++2F8C2 ; mapped ; 3A2E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C2 ++2F8C3 ; mapped ; 6469 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C3 ++2F8C4 ; mapped ; 647E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C4 ++2F8C5 ; mapped ; 649D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C5 ++2F8C6 ; mapped ; 6477 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C6 ++2F8C7 ; mapped ; 3A6C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C7 ++2F8C8 ; mapped ; 654F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C8 ++2F8C9 ; mapped ; 656C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8C9 ++2F8CA ; mapped ; 2300A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8CA ++2F8CB ; mapped ; 65E3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8CB ++2F8CC ; mapped ; 66F8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8CC ++2F8CD ; mapped ; 6649 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8CD ++2F8CE ; mapped ; 3B19 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8CE ++2F8CF ; mapped ; 6691 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8CF ++2F8D0 ; mapped ; 3B08 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D0 ++2F8D1 ; mapped ; 3AE4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D1 ++2F8D2 ; mapped ; 5192 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D2 ++2F8D3 ; mapped ; 5195 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D3 ++2F8D4 ; mapped ; 6700 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D4 ++2F8D5 ; mapped ; 669C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D5 ++2F8D6 ; mapped ; 80AD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D6 ++2F8D7 ; mapped ; 43D9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D7 ++2F8D8 ; mapped ; 6717 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D8 ++2F8D9 ; mapped ; 671B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8D9 ++2F8DA ; mapped ; 6721 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8DA ++2F8DB ; mapped ; 675E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8DB ++2F8DC ; mapped ; 6753 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8DC ++2F8DD ; mapped ; 233C3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8DD ++2F8DE ; mapped ; 3B49 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8DE ++2F8DF ; mapped ; 67FA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8DF ++2F8E0 ; mapped ; 6785 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E0 ++2F8E1 ; mapped ; 6852 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E1 ++2F8E2 ; mapped ; 6885 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E2 ++2F8E3 ; mapped ; 2346D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E3 ++2F8E4 ; mapped ; 688E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E4 ++2F8E5 ; mapped ; 681F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E5 ++2F8E6 ; mapped ; 6914 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E6 ++2F8E7 ; mapped ; 3B9D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E7 ++2F8E8 ; mapped ; 6942 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E8 ++2F8E9 ; mapped ; 69A3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8E9 ++2F8EA ; mapped ; 69EA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8EA ++2F8EB ; mapped ; 6AA8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8EB ++2F8EC ; mapped ; 236A3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8EC ++2F8ED ; mapped ; 6ADB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8ED ++2F8EE ; mapped ; 3C18 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8EE ++2F8EF ; mapped ; 6B21 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8EF ++2F8F0 ; mapped ; 238A7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F0 ++2F8F1 ; mapped ; 6B54 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F1 ++2F8F2 ; mapped ; 3C4E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F2 ++2F8F3 ; mapped ; 6B72 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F3 ++2F8F4 ; mapped ; 6B9F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F4 ++2F8F5 ; mapped ; 6BBA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F5 ++2F8F6 ; mapped ; 6BBB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F6 ++2F8F7 ; mapped ; 23A8D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F7 ++2F8F8 ; mapped ; 21D0B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F8 ++2F8F9 ; mapped ; 23AFA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8F9 ++2F8FA ; mapped ; 6C4E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8FA ++2F8FB ; mapped ; 23CBC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8FB ++2F8FC ; mapped ; 6CBF # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8FC ++2F8FD ; mapped ; 6CCD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8FD ++2F8FE ; mapped ; 6C67 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8FE ++2F8FF ; mapped ; 6D16 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F8FF ++2F900 ; mapped ; 6D3E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F900 ++2F901 ; mapped ; 6D77 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F901 ++2F902 ; mapped ; 6D41 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F902 ++2F903 ; mapped ; 6D69 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F903 ++2F904 ; mapped ; 6D78 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F904 ++2F905 ; mapped ; 6D85 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F905 ++2F906 ; mapped ; 23D1E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F906 ++2F907 ; mapped ; 6D34 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F907 ++2F908 ; mapped ; 6E2F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F908 ++2F909 ; mapped ; 6E6E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F909 ++2F90A ; mapped ; 3D33 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F90A ++2F90B ; mapped ; 6ECB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F90B ++2F90C ; mapped ; 6EC7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F90C ++2F90D ; mapped ; 23ED1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F90D ++2F90E ; mapped ; 6DF9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F90E ++2F90F ; mapped ; 6F6E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F90F ++2F910 ; mapped ; 23F5E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F910 ++2F911 ; mapped ; 23F8E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F911 ++2F912 ; mapped ; 6FC6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F912 ++2F913 ; mapped ; 7039 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F913 ++2F914 ; mapped ; 701E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F914 ++2F915 ; mapped ; 701B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F915 ++2F916 ; mapped ; 3D96 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F916 ++2F917 ; mapped ; 704A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F917 ++2F918 ; mapped ; 707D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F918 ++2F919 ; mapped ; 7077 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F919 ++2F91A ; mapped ; 70AD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F91A ++2F91B ; mapped ; 20525 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F91B ++2F91C ; mapped ; 7145 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F91C ++2F91D ; mapped ; 24263 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F91D ++2F91E ; mapped ; 719C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F91E ++2F91F ; disallowed # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F91F ++2F920 ; mapped ; 7228 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F920 ++2F921 ; mapped ; 7235 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F921 ++2F922 ; mapped ; 7250 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F922 ++2F923 ; mapped ; 24608 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F923 ++2F924 ; mapped ; 7280 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F924 ++2F925 ; mapped ; 7295 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F925 ++2F926 ; mapped ; 24735 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F926 ++2F927 ; mapped ; 24814 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F927 ++2F928 ; mapped ; 737A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F928 ++2F929 ; mapped ; 738B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F929 ++2F92A ; mapped ; 3EAC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F92A ++2F92B ; mapped ; 73A5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F92B ++2F92C..2F92D ; mapped ; 3EB8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F92C..CJK COMPATIBILITY IDEOGRAPH-2F92D ++2F92E ; mapped ; 7447 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F92E ++2F92F ; mapped ; 745C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F92F ++2F930 ; mapped ; 7471 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F930 ++2F931 ; mapped ; 7485 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F931 ++2F932 ; mapped ; 74CA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F932 ++2F933 ; mapped ; 3F1B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F933 ++2F934 ; mapped ; 7524 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F934 ++2F935 ; mapped ; 24C36 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F935 ++2F936 ; mapped ; 753E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F936 ++2F937 ; mapped ; 24C92 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F937 ++2F938 ; mapped ; 7570 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F938 ++2F939 ; mapped ; 2219F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F939 ++2F93A ; mapped ; 7610 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F93A ++2F93B ; mapped ; 24FA1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F93B ++2F93C ; mapped ; 24FB8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F93C ++2F93D ; mapped ; 25044 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F93D ++2F93E ; mapped ; 3FFC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F93E ++2F93F ; mapped ; 4008 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F93F ++2F940 ; mapped ; 76F4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F940 ++2F941 ; mapped ; 250F3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F941 ++2F942 ; mapped ; 250F2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F942 ++2F943 ; mapped ; 25119 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F943 ++2F944 ; mapped ; 25133 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F944 ++2F945 ; mapped ; 771E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F945 ++2F946..2F947 ; mapped ; 771F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F946..CJK COMPATIBILITY IDEOGRAPH-2F947 ++2F948 ; mapped ; 774A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F948 ++2F949 ; mapped ; 4039 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F949 ++2F94A ; mapped ; 778B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F94A ++2F94B ; mapped ; 4046 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F94B ++2F94C ; mapped ; 4096 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F94C ++2F94D ; mapped ; 2541D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F94D ++2F94E ; mapped ; 784E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F94E ++2F94F ; mapped ; 788C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F94F ++2F950 ; mapped ; 78CC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F950 ++2F951 ; mapped ; 40E3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F951 ++2F952 ; mapped ; 25626 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F952 ++2F953 ; mapped ; 7956 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F953 ++2F954 ; mapped ; 2569A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F954 ++2F955 ; mapped ; 256C5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F955 ++2F956 ; mapped ; 798F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F956 ++2F957 ; mapped ; 79EB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F957 ++2F958 ; mapped ; 412F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F958 ++2F959 ; mapped ; 7A40 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F959 ++2F95A ; mapped ; 7A4A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F95A ++2F95B ; mapped ; 7A4F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F95B ++2F95C ; mapped ; 2597C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F95C ++2F95D..2F95E ; mapped ; 25AA7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F95D..CJK COMPATIBILITY IDEOGRAPH-2F95E ++2F95F ; disallowed # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F95F ++2F960 ; mapped ; 4202 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F960 ++2F961 ; mapped ; 25BAB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F961 ++2F962 ; mapped ; 7BC6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F962 ++2F963 ; mapped ; 7BC9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F963 ++2F964 ; mapped ; 4227 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F964 ++2F965 ; mapped ; 25C80 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F965 ++2F966 ; mapped ; 7CD2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F966 ++2F967 ; mapped ; 42A0 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F967 ++2F968 ; mapped ; 7CE8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F968 ++2F969 ; mapped ; 7CE3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F969 ++2F96A ; mapped ; 7D00 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F96A ++2F96B ; mapped ; 25F86 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F96B ++2F96C ; mapped ; 7D63 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F96C ++2F96D ; mapped ; 4301 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F96D ++2F96E ; mapped ; 7DC7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F96E ++2F96F ; mapped ; 7E02 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F96F ++2F970 ; mapped ; 7E45 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F970 ++2F971 ; mapped ; 4334 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F971 ++2F972 ; mapped ; 26228 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F972 ++2F973 ; mapped ; 26247 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F973 ++2F974 ; mapped ; 4359 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F974 ++2F975 ; mapped ; 262D9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F975 ++2F976 ; mapped ; 7F7A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F976 ++2F977 ; mapped ; 2633E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F977 ++2F978 ; mapped ; 7F95 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F978 ++2F979 ; mapped ; 7FFA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F979 ++2F97A ; mapped ; 8005 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F97A ++2F97B ; mapped ; 264DA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F97B ++2F97C ; mapped ; 26523 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F97C ++2F97D ; mapped ; 8060 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F97D ++2F97E ; mapped ; 265A8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F97E ++2F97F ; mapped ; 8070 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F97F ++2F980 ; mapped ; 2335F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F980 ++2F981 ; mapped ; 43D5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F981 ++2F982 ; mapped ; 80B2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F982 ++2F983 ; mapped ; 8103 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F983 ++2F984 ; mapped ; 440B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F984 ++2F985 ; mapped ; 813E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F985 ++2F986 ; mapped ; 5AB5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F986 ++2F987 ; mapped ; 267A7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F987 ++2F988 ; mapped ; 267B5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F988 ++2F989 ; mapped ; 23393 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F989 ++2F98A ; mapped ; 2339C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F98A ++2F98B ; mapped ; 8201 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F98B ++2F98C ; mapped ; 8204 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F98C ++2F98D ; mapped ; 8F9E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F98D ++2F98E ; mapped ; 446B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F98E ++2F98F ; mapped ; 8291 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F98F ++2F990 ; mapped ; 828B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F990 ++2F991 ; mapped ; 829D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F991 ++2F992 ; mapped ; 52B3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F992 ++2F993 ; mapped ; 82B1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F993 ++2F994 ; mapped ; 82B3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F994 ++2F995 ; mapped ; 82BD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F995 ++2F996 ; mapped ; 82E6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F996 ++2F997 ; mapped ; 26B3C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F997 ++2F998 ; mapped ; 82E5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F998 ++2F999 ; mapped ; 831D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F999 ++2F99A ; mapped ; 8363 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F99A ++2F99B ; mapped ; 83AD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F99B ++2F99C ; mapped ; 8323 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F99C ++2F99D ; mapped ; 83BD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F99D ++2F99E ; mapped ; 83E7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F99E ++2F99F ; mapped ; 8457 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F99F ++2F9A0 ; mapped ; 8353 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A0 ++2F9A1 ; mapped ; 83CA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A1 ++2F9A2 ; mapped ; 83CC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A2 ++2F9A3 ; mapped ; 83DC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A3 ++2F9A4 ; mapped ; 26C36 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A4 ++2F9A5 ; mapped ; 26D6B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A5 ++2F9A6 ; mapped ; 26CD5 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A6 ++2F9A7 ; mapped ; 452B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A7 ++2F9A8 ; mapped ; 84F1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A8 ++2F9A9 ; mapped ; 84F3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9A9 ++2F9AA ; mapped ; 8516 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9AA ++2F9AB ; mapped ; 273CA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9AB ++2F9AC ; mapped ; 8564 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9AC ++2F9AD ; mapped ; 26F2C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9AD ++2F9AE ; mapped ; 455D # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9AE ++2F9AF ; mapped ; 4561 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9AF ++2F9B0 ; mapped ; 26FB1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B0 ++2F9B1 ; mapped ; 270D2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B1 ++2F9B2 ; mapped ; 456B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B2 ++2F9B3 ; mapped ; 8650 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B3 ++2F9B4 ; mapped ; 865C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B4 ++2F9B5 ; mapped ; 8667 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B5 ++2F9B6 ; mapped ; 8669 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B6 ++2F9B7 ; mapped ; 86A9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B7 ++2F9B8 ; mapped ; 8688 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B8 ++2F9B9 ; mapped ; 870E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9B9 ++2F9BA ; mapped ; 86E2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9BA ++2F9BB ; mapped ; 8779 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9BB ++2F9BC ; mapped ; 8728 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9BC ++2F9BD ; mapped ; 876B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9BD ++2F9BE ; mapped ; 8786 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9BE ++2F9BF ; disallowed # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9BF ++2F9C0 ; mapped ; 87E1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C0 ++2F9C1 ; mapped ; 8801 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C1 ++2F9C2 ; mapped ; 45F9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C2 ++2F9C3 ; mapped ; 8860 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C3 ++2F9C4 ; mapped ; 8863 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C4 ++2F9C5 ; mapped ; 27667 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C5 ++2F9C6 ; mapped ; 88D7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C6 ++2F9C7 ; mapped ; 88DE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C7 ++2F9C8 ; mapped ; 4635 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C8 ++2F9C9 ; mapped ; 88FA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9C9 ++2F9CA ; mapped ; 34BB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9CA ++2F9CB ; mapped ; 278AE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9CB ++2F9CC ; mapped ; 27966 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9CC ++2F9CD ; mapped ; 46BE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9CD ++2F9CE ; mapped ; 46C7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9CE ++2F9CF ; mapped ; 8AA0 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9CF ++2F9D0 ; mapped ; 8AED # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D0 ++2F9D1 ; mapped ; 8B8A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D1 ++2F9D2 ; mapped ; 8C55 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D2 ++2F9D3 ; mapped ; 27CA8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D3 ++2F9D4 ; mapped ; 8CAB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D4 ++2F9D5 ; mapped ; 8CC1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D5 ++2F9D6 ; mapped ; 8D1B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D6 ++2F9D7 ; mapped ; 8D77 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D7 ++2F9D8 ; mapped ; 27F2F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D8 ++2F9D9 ; mapped ; 20804 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9D9 ++2F9DA ; mapped ; 8DCB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9DA ++2F9DB ; mapped ; 8DBC # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9DB ++2F9DC ; mapped ; 8DF0 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9DC ++2F9DD ; mapped ; 208DE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9DD ++2F9DE ; mapped ; 8ED4 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9DE ++2F9DF ; mapped ; 8F38 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9DF ++2F9E0 ; mapped ; 285D2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E0 ++2F9E1 ; mapped ; 285ED # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E1 ++2F9E2 ; mapped ; 9094 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E2 ++2F9E3 ; mapped ; 90F1 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E3 ++2F9E4 ; mapped ; 9111 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E4 ++2F9E5 ; mapped ; 2872E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E5 ++2F9E6 ; mapped ; 911B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E6 ++2F9E7 ; mapped ; 9238 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E7 ++2F9E8 ; mapped ; 92D7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E8 ++2F9E9 ; mapped ; 92D8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9E9 ++2F9EA ; mapped ; 927C # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9EA ++2F9EB ; mapped ; 93F9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9EB ++2F9EC ; mapped ; 9415 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9EC ++2F9ED ; mapped ; 28BFA # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9ED ++2F9EE ; mapped ; 958B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9EE ++2F9EF ; mapped ; 4995 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9EF ++2F9F0 ; mapped ; 95B7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F0 ++2F9F1 ; mapped ; 28D77 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F1 ++2F9F2 ; mapped ; 49E6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F2 ++2F9F3 ; mapped ; 96C3 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F3 ++2F9F4 ; mapped ; 5DB2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F4 ++2F9F5 ; mapped ; 9723 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F5 ++2F9F6 ; mapped ; 29145 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F6 ++2F9F7 ; mapped ; 2921A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F7 ++2F9F8 ; mapped ; 4A6E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F8 ++2F9F9 ; mapped ; 4A76 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9F9 ++2F9FA ; mapped ; 97E0 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9FA ++2F9FB ; mapped ; 2940A # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9FB ++2F9FC ; mapped ; 4AB2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9FC ++2F9FD ; mapped ; 29496 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9FD ++2F9FE..2F9FF ; mapped ; 980B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2F9FE..CJK COMPATIBILITY IDEOGRAPH-2F9FF ++2FA00 ; mapped ; 9829 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA00 ++2FA01 ; mapped ; 295B6 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA01 ++2FA02 ; mapped ; 98E2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA02 ++2FA03 ; mapped ; 4B33 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA03 ++2FA04 ; mapped ; 9929 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA04 ++2FA05 ; mapped ; 99A7 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA05 ++2FA06 ; mapped ; 99C2 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA06 ++2FA07 ; mapped ; 99FE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA07 ++2FA08 ; mapped ; 4BCE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA08 ++2FA09 ; mapped ; 29B30 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA09 ++2FA0A ; mapped ; 9B12 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA0A ++2FA0B ; mapped ; 9C40 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA0B ++2FA0C ; mapped ; 9CFD # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA0C ++2FA0D ; mapped ; 4CCE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA0D ++2FA0E ; mapped ; 4CED # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA0E ++2FA0F ; mapped ; 9D67 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA0F ++2FA10 ; mapped ; 2A0CE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA10 ++2FA11 ; mapped ; 4CF8 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA11 ++2FA12 ; mapped ; 2A105 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA12 ++2FA13 ; mapped ; 2A20E # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA13 ++2FA14 ; mapped ; 2A291 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA14 ++2FA15 ; mapped ; 9EBB # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA15 ++2FA16 ; mapped ; 4D56 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA16 ++2FA17 ; mapped ; 9EF9 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA17 ++2FA18 ; mapped ; 9EFE # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA18 ++2FA19 ; mapped ; 9F05 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA19 ++2FA1A ; mapped ; 9F0F # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA1A ++2FA1B ; mapped ; 9F16 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA1B ++2FA1C ; mapped ; 9F3B # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA1C ++2FA1D ; mapped ; 2A600 # 3.1 CJK COMPATIBILITY IDEOGRAPH-2FA1D ++2FA1E..2FFFD ; disallowed # NA .. ++2FFFE..2FFFF ; disallowed # 2.0 .. ++30000..3FFFD ; disallowed # NA .. ++3FFFE..3FFFF ; disallowed # 2.0 .. ++40000..4FFFD ; disallowed # NA .. ++4FFFE..4FFFF ; disallowed # 2.0 .. ++50000..5FFFD ; disallowed # NA .. ++5FFFE..5FFFF ; disallowed # 2.0 .. ++60000..6FFFD ; disallowed # NA .. ++6FFFE..6FFFF ; disallowed # 2.0 .. ++70000..7FFFD ; disallowed # NA .. ++7FFFE..7FFFF ; disallowed # 2.0 .. ++80000..8FFFD ; disallowed # NA .. ++8FFFE..8FFFF ; disallowed # 2.0 .. ++90000..9FFFD ; disallowed # NA .. ++9FFFE..9FFFF ; disallowed # 2.0 .. ++A0000..AFFFD ; disallowed # NA .. ++AFFFE..AFFFF ; disallowed # 2.0 .. ++B0000..BFFFD ; disallowed # NA .. ++BFFFE..BFFFF ; disallowed # 2.0 .. ++C0000..CFFFD ; disallowed # NA .. ++CFFFE..CFFFF ; disallowed # 2.0 .. ++D0000..DFFFD ; disallowed # NA .. ++DFFFE..DFFFF ; disallowed # 2.0 .. ++E0000 ; disallowed # NA ++E0001 ; disallowed # 3.1 LANGUAGE TAG ++E0002..E001F ; disallowed # NA .. ++E0020..E007F ; disallowed # 3.1 TAG SPACE..CANCEL TAG ++E0080..E00FF ; disallowed # NA .. ++E0100..E01EF ; ignored # 4.0 VARIATION SELECTOR-17..VARIATION SELECTOR-256 ++E01F0..EFFFD ; disallowed # NA .. ++EFFFE..EFFFF ; disallowed # 2.0 .. ++F0000..FFFFD ; disallowed # 2.0 .. ++FFFFE..FFFFF ; disallowed # 2.0 .. ++100000..10FFFD; disallowed # 2.0 .. ++10FFFE..10FFFF; disallowed # 2.0 .. ++ ++# Total code points: 1114112 ++ diff --cc vendor/idna-0.1.4/src/lib.rs index 000000000,000000000..bdeafe448 new file mode 100644 --- /dev/null +++ b/vendor/idna-0.1.4/src/lib.rs @@@ -1,0 -1,0 +1,73 @@@ ++// Copyright 2016 The rust-url developers. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++//! This Rust crate implements IDNA ++//! [per the WHATWG URL Standard](https://url.spec.whatwg.org/#idna). ++//! ++//! It also exposes the underlying algorithms from [*Unicode IDNA Compatibility Processing* ++//! (Unicode Technical Standard #46)](http://www.unicode.org/reports/tr46/) ++//! and [Punycode (RFC 3492)](https://tools.ietf.org/html/rfc3492). ++//! ++//! Quoting from [UTS #46’s introduction](http://www.unicode.org/reports/tr46/#Introduction): ++//! ++//! > Initially, domain names were restricted to ASCII characters. ++//! > A system was introduced in 2003 for internationalized domain names (IDN). ++//! > This system is called Internationalizing Domain Names for Applications, ++//! > or IDNA2003 for short. ++//! > This mechanism supports IDNs by means of a client software transformation ++//! > into a format known as Punycode. ++//! > A revision of IDNA was approved in 2010 (IDNA2008). ++//! > This revision has a number of incompatibilities with IDNA2003. ++//! > ++//! > The incompatibilities force implementers of client software, ++//! > such as browsers and emailers, ++//! > to face difficult choices during the transition period ++//! > as registries shift from IDNA2003 to IDNA2008. ++//! > This document specifies a mechanism ++//! > that minimizes the impact of this transition for client software, ++//! > allowing client software to access domains that are valid under either system. ++ ++#[macro_use] extern crate matches; ++extern crate unicode_bidi; ++extern crate unicode_normalization; ++ ++pub mod punycode; ++pub mod uts46; ++ ++/// The [domain to ASCII](https://url.spec.whatwg.org/#concept-domain-to-ascii) algorithm. ++/// ++/// Return the ASCII representation a domain name, ++/// normalizing characters (upper-case to lower-case and other kinds of equivalence) ++/// and using Punycode as necessary. ++/// ++/// This process may fail. ++pub fn domain_to_ascii(domain: &str) -> Result { ++ uts46::to_ascii(domain, uts46::Flags { ++ use_std3_ascii_rules: false, ++ transitional_processing: true, // XXX: switch when Firefox does ++ verify_dns_length: false, ++ }) ++} ++ ++/// The [domain to Unicode](https://url.spec.whatwg.org/#concept-domain-to-unicode) algorithm. ++/// ++/// Return the Unicode representation of a domain name, ++/// normalizing characters (upper-case to lower-case and other kinds of equivalence) ++/// and decoding Punycode as necessary. ++/// ++/// This may indicate [syntax violations](https://url.spec.whatwg.org/#syntax-violation) ++/// but always returns a string for the mapped domain. ++pub fn domain_to_unicode(domain: &str) -> (String, Result<(), uts46::Errors>) { ++ uts46::to_unicode(domain, uts46::Flags { ++ use_std3_ascii_rules: false, ++ ++ // Unused: ++ transitional_processing: true, ++ verify_dns_length: false, ++ }) ++} diff --cc vendor/idna-0.1.4/src/make_uts46_mapping_table.py index 000000000,000000000..1a9774433 new file mode 100644 --- /dev/null +++ b/vendor/idna-0.1.4/src/make_uts46_mapping_table.py @@@ -1,0 -1,0 +1,139 @@@ ++# Copyright 2013-2014 The rust-url developers. ++# ++# Licensed under the Apache License, Version 2.0 or the MIT license ++# , at your ++# option. This file may not be copied, modified, or distributed ++# except according to those terms. ++ ++# Run as: python make_uts46_mapping_table.py IdnaMappingTable.txt > uts46_mapping_table.rs ++# You can get the latest idna table from ++# http://www.unicode.org/Public/idna/latest/IdnaMappingTable.txt ++ ++import collections ++import itertools ++ ++print('''\ ++// Copyright 2013-2014 The rust-url developers. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++// Generated by make_idna_table.py ++ ++static TABLE: &'static [Range] = &[ ++''') ++ ++txt = open("IdnaMappingTable.txt") ++ ++def escape_char(c): ++ return "\\u{%x}" % ord(c[0]) ++ ++def char(s): ++ return unichr(int(s, 16)) ++ ++strtab = collections.OrderedDict() ++strtab_offset = 0 ++ ++def strtab_slice(s): ++ global strtab, strtab_offset ++ ++ if s in strtab: ++ return strtab[s] ++ else: ++ utf8_len = len(s.encode('utf8')) ++ c = (strtab_offset, utf8_len) ++ strtab[s] = c ++ strtab_offset += utf8_len ++ return c ++ ++def rust_slice(s): ++ start = s[0] ++ length = s[1] ++ start_lo = start & 0xff ++ start_hi = start >> 8 ++ assert length <= 255 ++ assert start_hi <= 255 ++ return "(StringTableSlice { byte_start_lo: %d, byte_start_hi: %d, byte_len: %d })" % (start_lo, start_hi, length) ++ ++ranges = [] ++ ++for line in txt: ++ # remove comments ++ line, _, _ = line.partition('#') ++ # skip empty lines ++ if len(line.strip()) == 0: ++ continue ++ fields = line.split(';') ++ if fields[0].strip() == 'D800..DFFF': ++ continue # Surrogates don't occur in Rust strings. ++ first, _, last = fields[0].strip().partition('..') ++ if not last: ++ last = first ++ mapping = fields[1].strip().replace('_', ' ').title().replace(' ', '') ++ unicode_str = None ++ if len(fields) > 2: ++ if fields[2].strip(): ++ unicode_str = u''.join(char(c) for c in fields[2].strip().split(' ')) ++ elif mapping == "Deviation": ++ unicode_str = u'' ++ ranges.append((first, last, mapping, unicode_str)) ++ ++def mergeable_key(r): ++ mapping = r[2] ++ # These types have associated data, so we should not merge them. ++ if mapping in ('Mapped', 'Deviation', 'DisallowedStd3Mapped'): ++ return r ++ assert mapping in ('Valid', 'Ignored', 'Disallowed', 'DisallowedStd3Valid') ++ return mapping ++ ++grouped_ranges = itertools.groupby(ranges, key=mergeable_key) ++ ++optimized_ranges = [] ++ ++for (k, g) in grouped_ranges: ++ group = list(g) ++ if len(group) == 1: ++ optimized_ranges.append(group[0]) ++ continue ++ # Assert that nothing in the group has an associated unicode string. ++ for g in group: ++ if g[3] is not None and len(g[3]) > 2: ++ assert not g[3][2].strip() ++ # Assert that consecutive members of the group don't leave gaps in ++ # the codepoint space. ++ a, b = itertools.tee(group) ++ next(b, None) ++ for (g1, g2) in itertools.izip(a, b): ++ last_char = int(g1[1], 16) ++ next_char = int(g2[0], 16) ++ if last_char + 1 == next_char: ++ continue ++ # There's a gap where surrogates would appear, but we don't have to ++ # worry about that gap, as surrogates never appear in Rust strings. ++ # Assert we're seeing the surrogate case here. ++ assert last_char == 0xd7ff ++ assert next_char == 0xe000 ++ first = group[0][0] ++ last = group[-1][1] ++ mapping = group[0][2] ++ unicode_str = group[0][3] ++ optimized_ranges.append((first, last, mapping, unicode_str)) ++ ++for (first, last, mapping, unicode_str) in optimized_ranges: ++ if unicode_str is not None: ++ mapping += rust_slice(strtab_slice(unicode_str)) ++ print(" Range { from: '%s', to: '%s', mapping: %s }," % (escape_char(char(first)), ++ escape_char(char(last)), ++ mapping)) ++ ++print("];\n") ++ ++def escape_str(s): ++ return [escape_char(c) for c in s] ++ ++print("static STRING_TABLE: &'static str = \"%s\";" ++ % '\\\n '.join(itertools.chain(*[escape_str(s) for s in strtab.iterkeys()]))) diff --cc vendor/idna-0.1.4/src/punycode.rs index 000000000,000000000..75bb1d6e8 new file mode 100644 --- /dev/null +++ b/vendor/idna-0.1.4/src/punycode.rs @@@ -1,0 -1,0 +1,212 @@@ ++// Copyright 2013 The rust-url developers. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++//! Punycode ([RFC 3492](http://tools.ietf.org/html/rfc3492)) implementation. ++//! ++//! Since Punycode fundamentally works on unicode code points, ++//! `encode` and `decode` take and return slices and vectors of `char`. ++//! `encode_str` and `decode_to_string` provide convenience wrappers ++//! that convert from and to Rust’s UTF-8 based `str` and `String` types. ++ ++use std::u32; ++use std::char; ++use std::ascii::AsciiExt; ++ ++// Bootstring parameters for Punycode ++static BASE: u32 = 36; ++static T_MIN: u32 = 1; ++static T_MAX: u32 = 26; ++static SKEW: u32 = 38; ++static DAMP: u32 = 700; ++static INITIAL_BIAS: u32 = 72; ++static INITIAL_N: u32 = 0x80; ++static DELIMITER: char = '-'; ++ ++ ++#[inline] ++fn adapt(mut delta: u32, num_points: u32, first_time: bool) -> u32 { ++ delta /= if first_time { DAMP } else { 2 }; ++ delta += delta / num_points; ++ let mut k = 0; ++ while delta > ((BASE - T_MIN) * T_MAX) / 2 { ++ delta /= BASE - T_MIN; ++ k += BASE; ++ } ++ k + (((BASE - T_MIN + 1) * delta) / (delta + SKEW)) ++} ++ ++ ++/// Convert Punycode to an Unicode `String`. ++/// ++/// This is a convenience wrapper around `decode`. ++#[inline] ++pub fn decode_to_string(input: &str) -> Option { ++ decode(input).map(|chars| chars.into_iter().collect()) ++} ++ ++ ++/// Convert Punycode to Unicode. ++/// ++/// Return None on malformed input or overflow. ++/// Overflow can only happen on inputs that take more than ++/// 63 encoded bytes, the DNS limit on domain name labels. ++pub fn decode(input: &str) -> Option> { ++ // Handle "basic" (ASCII) code points. ++ // They are encoded as-is before the last delimiter, if any. ++ let (mut output, input) = match input.rfind(DELIMITER) { ++ None => (Vec::new(), input), ++ Some(position) => ( ++ input[..position].chars().collect(), ++ if position > 0 { &input[position + 1..] } else { input } ++ ) ++ }; ++ let mut code_point = INITIAL_N; ++ let mut bias = INITIAL_BIAS; ++ let mut i = 0; ++ let mut iter = input.bytes(); ++ loop { ++ let previous_i = i; ++ let mut weight = 1; ++ let mut k = BASE; ++ let mut byte = match iter.next() { ++ None => break, ++ Some(byte) => byte, ++ }; ++ // Decode a generalized variable-length integer into delta, ++ // which gets added to i. ++ loop { ++ let digit = match byte { ++ byte @ b'0' ... b'9' => byte - b'0' + 26, ++ byte @ b'A' ... b'Z' => byte - b'A', ++ byte @ b'a' ... b'z' => byte - b'a', ++ _ => return None ++ } as u32; ++ if digit > (u32::MAX - i) / weight { ++ return None // Overflow ++ } ++ i += digit * weight; ++ let t = if k <= bias { T_MIN } ++ else if k >= bias + T_MAX { T_MAX } ++ else { k - bias }; ++ if digit < t { ++ break ++ } ++ if weight > u32::MAX / (BASE - t) { ++ return None // Overflow ++ } ++ weight *= BASE - t; ++ k += BASE; ++ byte = match iter.next() { ++ None => return None, // End of input before the end of this delta ++ Some(byte) => byte, ++ }; ++ } ++ let length = output.len() as u32; ++ bias = adapt(i - previous_i, length + 1, previous_i == 0); ++ if i / (length + 1) > u32::MAX - code_point { ++ return None // Overflow ++ } ++ // i was supposed to wrap around from length+1 to 0, ++ // incrementing code_point each time. ++ code_point += i / (length + 1); ++ i %= length + 1; ++ let c = match char::from_u32(code_point) { ++ Some(c) => c, ++ None => return None ++ }; ++ output.insert(i as usize, c); ++ i += 1; ++ } ++ Some(output) ++} ++ ++ ++/// Convert an Unicode `str` to Punycode. ++/// ++/// This is a convenience wrapper around `encode`. ++#[inline] ++pub fn encode_str(input: &str) -> Option { ++ encode(&input.chars().collect::>()) ++} ++ ++ ++/// Convert Unicode to Punycode. ++/// ++/// Return None on overflow, which can only happen on inputs that would take more than ++/// 63 encoded bytes, the DNS limit on domain name labels. ++pub fn encode(input: &[char]) -> Option { ++ // Handle "basic" (ASCII) code points. They are encoded as-is. ++ let output_bytes = input.iter().filter_map(|&c| ++ if c.is_ascii() { Some(c as u8) } else { None } ++ ).collect(); ++ let mut output = unsafe { String::from_utf8_unchecked(output_bytes) }; ++ let basic_length = output.len() as u32; ++ if basic_length > 0 { ++ output.push_str("-") ++ } ++ let mut code_point = INITIAL_N; ++ let mut delta = 0; ++ let mut bias = INITIAL_BIAS; ++ let mut processed = basic_length; ++ let input_length = input.len() as u32; ++ while processed < input_length { ++ // All code points < code_point have been handled already. ++ // Find the next larger one. ++ let min_code_point = input.iter().map(|&c| c as u32) ++ .filter(|&c| c >= code_point).min().unwrap(); ++ if min_code_point - code_point > (u32::MAX - delta) / (processed + 1) { ++ return None // Overflow ++ } ++ // Increase delta to advance the decoder’s state to ++ delta += (min_code_point - code_point) * (processed + 1); ++ code_point = min_code_point; ++ for &c in input { ++ let c = c as u32; ++ if c < code_point { ++ delta += 1; ++ if delta == 0 { ++ return None // Overflow ++ } ++ } ++ if c == code_point { ++ // Represent delta as a generalized variable-length integer: ++ let mut q = delta; ++ let mut k = BASE; ++ loop { ++ let t = if k <= bias { T_MIN } ++ else if k >= bias + T_MAX { T_MAX } ++ else { k - bias }; ++ if q < t { ++ break ++ } ++ let value = t + ((q - t) % (BASE - t)); ++ output.push(value_to_digit(value)); ++ q = (q - t) / (BASE - t); ++ k += BASE; ++ } ++ output.push(value_to_digit(q)); ++ bias = adapt(delta, processed + 1, processed == basic_length); ++ delta = 0; ++ processed += 1; ++ } ++ } ++ delta += 1; ++ code_point += 1; ++ } ++ Some(output) ++} ++ ++ ++#[inline] ++fn value_to_digit(value: u32) -> char { ++ match value { ++ 0 ... 25 => (value as u8 + 'a' as u8) as char, // a..z ++ 26 ... 35 => (value as u8 - 26 + '0' as u8) as char, // 0..9 ++ _ => panic!() ++ } ++} diff --cc vendor/idna-0.1.4/src/uts46.rs index 000000000,000000000..7115bd8ff new file mode 100644 --- /dev/null +++ b/vendor/idna-0.1.4/src/uts46.rs @@@ -1,0 -1,0 +1,415 @@@ ++// Copyright 2013-2014 The rust-url developers. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++//! [*Unicode IDNA Compatibility Processing* ++//! (Unicode Technical Standard #46)](http://www.unicode.org/reports/tr46/) ++ ++use self::Mapping::*; ++use punycode; ++use std::ascii::AsciiExt; ++use std::cmp::Ordering::{Equal, Less, Greater}; ++use unicode_bidi::{BidiClass, bidi_class}; ++use unicode_normalization::UnicodeNormalization; ++use unicode_normalization::char::is_combining_mark; ++ ++include!("uts46_mapping_table.rs"); ++ ++ ++pub static PUNYCODE_PREFIX: &'static str = "xn--"; ++ ++ ++#[derive(Debug)] ++struct StringTableSlice { ++ // Store these as separate fields so the structure will have an ++ // alignment of 1 and thus pack better into the Mapping enum, below. ++ byte_start_lo: u8, ++ byte_start_hi: u8, ++ byte_len: u8, ++} ++ ++fn decode_slice(slice: &StringTableSlice) -> &'static str { ++ let lo = slice.byte_start_lo as usize; ++ let hi = slice.byte_start_hi as usize; ++ let start = (hi << 8) | lo; ++ let len = slice.byte_len as usize; ++ &STRING_TABLE[start..(start + len)] ++} ++ ++#[repr(u8)] ++#[derive(Debug)] ++enum Mapping { ++ Valid, ++ Ignored, ++ Mapped(StringTableSlice), ++ Deviation(StringTableSlice), ++ Disallowed, ++ DisallowedStd3Valid, ++ DisallowedStd3Mapped(StringTableSlice), ++} ++ ++struct Range { ++ from: char, ++ to: char, ++ mapping: Mapping, ++} ++ ++fn find_char(codepoint: char) -> &'static Mapping { ++ let r = TABLE.binary_search_by(|ref range| { ++ if codepoint > range.to { ++ Less ++ } else if codepoint < range.from { ++ Greater ++ } else { ++ Equal ++ } ++ }); ++ r.ok().map(|i| &TABLE[i].mapping).unwrap() ++} ++ ++fn map_char(codepoint: char, flags: Flags, output: &mut String, errors: &mut Vec) { ++ match *find_char(codepoint) { ++ Mapping::Valid => output.push(codepoint), ++ Mapping::Ignored => {}, ++ Mapping::Mapped(ref slice) => output.push_str(decode_slice(slice)), ++ Mapping::Deviation(ref slice) => { ++ if flags.transitional_processing { ++ output.push_str(decode_slice(slice)) ++ } else { ++ output.push(codepoint) ++ } ++ } ++ Mapping::Disallowed => { ++ errors.push(Error::DissallowedCharacter); ++ output.push(codepoint); ++ } ++ Mapping::DisallowedStd3Valid => { ++ if flags.use_std3_ascii_rules { ++ errors.push(Error::DissallowedByStd3AsciiRules); ++ } ++ output.push(codepoint) ++ } ++ Mapping::DisallowedStd3Mapped(ref slice) => { ++ if flags.use_std3_ascii_rules { ++ errors.push(Error::DissallowedMappedInStd3); ++ } ++ output.push_str(decode_slice(slice)) ++ } ++ } ++} ++ ++// http://tools.ietf.org/html/rfc5893#section-2 ++fn passes_bidi(label: &str, is_bidi_domain: bool) -> bool { ++ // Rule 0: Bidi Rules apply to Bidi Domain Names: a name with at least one RTL label. A label ++ // is RTL if it contains at least one character of bidi class R, AL or AN. ++ if !is_bidi_domain { ++ return true; ++ } ++ ++ let mut chars = label.chars(); ++ let first_char_class = match chars.next() { ++ Some(c) => bidi_class(c), ++ None => return true, // empty string ++ }; ++ ++ match first_char_class { ++ // LTR label ++ BidiClass::L => { ++ // Rule 5 ++ loop { ++ match chars.next() { ++ Some(c) => { ++ if !matches!(bidi_class(c), ++ BidiClass::L | BidiClass::EN | ++ BidiClass::ES | BidiClass::CS | ++ BidiClass::ET | BidiClass::ON | ++ BidiClass::BN | BidiClass::NSM ++ ) { ++ return false; ++ } ++ }, ++ None => { break; }, ++ } ++ } ++ ++ // Rule 6 ++ // must end in L or EN followed by 0 or more NSM ++ let mut rev_chars = label.chars().rev(); ++ let mut last_non_nsm = rev_chars.next(); ++ loop { ++ match last_non_nsm { ++ Some(c) if bidi_class(c) == BidiClass::NSM => { ++ last_non_nsm = rev_chars.next(); ++ continue; ++ } ++ _ => { break; }, ++ } ++ } ++ match last_non_nsm { ++ Some(c) if bidi_class(c) == BidiClass::L ++ || bidi_class(c) == BidiClass::EN => {}, ++ Some(_) => { return false; }, ++ _ => {} ++ } ++ ++ } ++ ++ // RTL label ++ BidiClass::R | BidiClass::AL => { ++ let mut found_en = false; ++ let mut found_an = false; ++ ++ // Rule 2 ++ loop { ++ match chars.next() { ++ Some(c) => { ++ let char_class = bidi_class(c); ++ ++ if char_class == BidiClass::EN { ++ found_en = true; ++ } ++ if char_class == BidiClass::AN { ++ found_an = true; ++ } ++ ++ if !matches!(char_class, BidiClass::R | BidiClass::AL | ++ BidiClass::AN | BidiClass::EN | ++ BidiClass::ES | BidiClass::CS | ++ BidiClass::ET | BidiClass::ON | ++ BidiClass::BN | BidiClass::NSM) { ++ return false; ++ } ++ }, ++ None => { break; }, ++ } ++ } ++ // Rule 3 ++ let mut rev_chars = label.chars().rev(); ++ let mut last = rev_chars.next(); ++ loop { // must end in L or EN followed by 0 or more NSM ++ match last { ++ Some(c) if bidi_class(c) == BidiClass::NSM => { ++ last = rev_chars.next(); ++ continue; ++ } ++ _ => { break; }, ++ } ++ } ++ match last { ++ Some(c) if matches!(bidi_class(c), BidiClass::R | BidiClass::AL | ++ BidiClass::EN | BidiClass::AN) => {}, ++ _ => { return false; } ++ } ++ ++ // Rule 4 ++ if found_an && found_en { ++ return false; ++ } ++ } ++ ++ // Rule 1: Should start with L or R/AL ++ _ => { ++ return false; ++ } ++ } ++ ++ return true; ++} ++ ++/// http://www.unicode.org/reports/tr46/#Validity_Criteria ++fn validate(label: &str, is_bidi_domain: bool, flags: Flags, errors: &mut Vec) { ++ let first_char = label.chars().next(); ++ if first_char == None { ++ // Empty string, pass ++ } ++ ++ // V1: Must be in NFC form. ++ else if label.nfc().ne(label.chars()) { ++ errors.push(Error::ValidityCriteria); ++ } ++ ++ // V2: No U+002D HYPHEN-MINUS in both third and fourth positions. ++ // ++ // NOTE: Spec says that the label must not contain a HYPHEN-MINUS character in both the ++ // third and fourth positions. But nobody follows this criteria. See the spec issue below: ++ // https://github.com/whatwg/url/issues/53 ++ // ++ // TODO: Add *CheckHyphens* flag. ++ ++ // V3: neither begin nor end with a U+002D HYPHEN-MINUS ++ else if label.starts_with("-") || label.ends_with("-") { ++ errors.push(Error::ValidityCriteria); ++ } ++ ++ // V4: not contain a U+002E FULL STOP ++ // ++ // Here, label can't contain '.' since the input is from .split('.') ++ ++ // V5: not begin with a GC=Mark ++ else if is_combining_mark(first_char.unwrap()) { ++ errors.push(Error::ValidityCriteria); ++ } ++ ++ // V6: Check against Mapping Table ++ else if label.chars().any(|c| match *find_char(c) { ++ Mapping::Valid => false, ++ Mapping::Deviation(_) => flags.transitional_processing, ++ Mapping::DisallowedStd3Valid => flags.use_std3_ascii_rules, ++ _ => true, ++ }) { ++ errors.push(Error::ValidityCriteria); ++ } ++ ++ // V7: ContextJ rules ++ // ++ // TODO: Implement rules and add *CheckJoiners* flag. ++ ++ // V8: Bidi rules ++ // ++ // TODO: Add *CheckBidi* flag ++ else if !passes_bidi(label, is_bidi_domain) ++ { ++ errors.push(Error::ValidityCriteria); ++ } ++} ++ ++/// http://www.unicode.org/reports/tr46/#Processing ++fn processing(domain: &str, flags: Flags, errors: &mut Vec) -> String { ++ let mut mapped = String::new(); ++ for c in domain.chars() { ++ map_char(c, flags, &mut mapped, errors) ++ } ++ let normalized: String = mapped.nfc().collect(); ++ ++ // Find out if it's a Bidi Domain Name ++ // ++ // First, check for literal bidi chars ++ let mut is_bidi_domain = domain.chars().any(|c| ++ matches!(bidi_class(c), BidiClass::R | BidiClass::AL | BidiClass::AN) ++ ); ++ if !is_bidi_domain { ++ // Then check for punycode-encoded bidi chars ++ for label in normalized.split('.') { ++ if label.starts_with(PUNYCODE_PREFIX) { ++ match punycode::decode_to_string(&label[PUNYCODE_PREFIX.len()..]) { ++ Some(decoded_label) => { ++ if decoded_label.chars().any(|c| ++ matches!(bidi_class(c), BidiClass::R | BidiClass::AL | BidiClass::AN) ++ ) { ++ is_bidi_domain = true; ++ } ++ } ++ None => { ++ is_bidi_domain = true; ++ } ++ } ++ } ++ } ++ } ++ ++ let mut validated = String::new(); ++ let mut first = true; ++ for label in normalized.split('.') { ++ if !first { ++ validated.push('.'); ++ } ++ first = false; ++ if label.starts_with(PUNYCODE_PREFIX) { ++ match punycode::decode_to_string(&label[PUNYCODE_PREFIX.len()..]) { ++ Some(decoded_label) => { ++ let flags = Flags { transitional_processing: false, ..flags }; ++ validate(&decoded_label, is_bidi_domain, flags, errors); ++ validated.push_str(&decoded_label) ++ } ++ None => errors.push(Error::PunycodeError) ++ } ++ } else { ++ validate(label, is_bidi_domain, flags, errors); ++ validated.push_str(label) ++ } ++ } ++ validated ++} ++ ++#[derive(Copy, Clone)] ++pub struct Flags { ++ pub use_std3_ascii_rules: bool, ++ pub transitional_processing: bool, ++ pub verify_dns_length: bool, ++} ++ ++#[derive(PartialEq, Eq, Clone, Copy, Debug)] ++enum Error { ++ PunycodeError, ++ ValidityCriteria, ++ DissallowedByStd3AsciiRules, ++ DissallowedMappedInStd3, ++ DissallowedCharacter, ++ TooLongForDns, ++ TooShortForDns, ++} ++ ++/// Errors recorded during UTS #46 processing. ++/// ++/// This is opaque for now, only indicating the presence of at least one error. ++/// More details may be exposed in the future. ++#[derive(Debug)] ++pub struct Errors(Vec); ++ ++/// http://www.unicode.org/reports/tr46/#ToASCII ++pub fn to_ascii(domain: &str, flags: Flags) -> Result { ++ let mut errors = Vec::new(); ++ let mut result = String::new(); ++ let mut first = true; ++ for label in processing(domain, flags, &mut errors).split('.') { ++ if !first { ++ result.push('.'); ++ } ++ first = false; ++ if label.is_ascii() { ++ result.push_str(label); ++ } else { ++ match punycode::encode_str(label) { ++ Some(x) => { ++ result.push_str(PUNYCODE_PREFIX); ++ result.push_str(&x); ++ }, ++ None => errors.push(Error::PunycodeError) ++ } ++ } ++ } ++ ++ if flags.verify_dns_length { ++ let domain = if result.ends_with(".") { &result[..result.len()-1] } else { &*result }; ++ if domain.len() < 1 || domain.split('.').any(|label| label.len() < 1) { ++ errors.push(Error::TooShortForDns) ++ } ++ if domain.len() > 253 || domain.split('.').any(|label| label.len() > 63) { ++ errors.push(Error::TooLongForDns) ++ } ++ } ++ if errors.is_empty() { ++ Ok(result) ++ } else { ++ Err(Errors(errors)) ++ } ++} ++ ++/// http://www.unicode.org/reports/tr46/#ToUnicode ++/// ++/// Only `use_std3_ascii_rules` is used in `flags`. ++pub fn to_unicode(domain: &str, mut flags: Flags) -> (String, Result<(), Errors>) { ++ flags.transitional_processing = false; ++ let mut errors = Vec::new(); ++ let domain = processing(domain, flags, &mut errors); ++ let errors = if errors.is_empty() { ++ Ok(()) ++ } else { ++ Err(Errors(errors)) ++ }; ++ (domain, errors) ++} diff --cc vendor/idna-0.1.4/src/uts46_mapping_table.rs index 000000000,000000000..4801e6a44 new file mode 100644 --- /dev/null +++ b/vendor/idna-0.1.4/src/uts46_mapping_table.rs @@@ -1,0 -1,0 +1,12844 @@@ ++// Copyright 2013-2014 The rust-url developers. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++// Generated by make_idna_table.py ++ ++static TABLE: &'static [Range] = &[ ++ ++ Range { from: '\u{0}', to: '\u{2c}', mapping: DisallowedStd3Valid }, ++ Range { from: '\u{2d}', to: '\u{2e}', mapping: Valid }, ++ Range { from: '\u{2f}', to: '\u{2f}', mapping: DisallowedStd3Valid }, ++ Range { from: '\u{30}', to: '\u{39}', mapping: Valid }, ++ Range { from: '\u{3a}', to: '\u{40}', mapping: DisallowedStd3Valid }, ++ Range { from: '\u{41}', to: '\u{41}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{42}', to: '\u{42}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{43}', to: '\u{43}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{44}', to: '\u{44}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{45}', to: '\u{45}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{46}', to: '\u{46}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{47}', to: '\u{47}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{48}', to: '\u{48}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{49}', to: '\u{49}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{4a}', to: '\u{4a}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{4b}', to: '\u{4b}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{4c}', to: '\u{4c}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{4d}', to: '\u{4d}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{4e}', to: '\u{4e}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{4f}', to: '\u{4f}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{50}', to: '\u{50}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{51}', to: '\u{51}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{52}', to: '\u{52}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{53}', to: '\u{53}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{54}', to: '\u{54}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{55}', to: '\u{55}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{56}', to: '\u{56}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{57}', to: '\u{57}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{58}', to: '\u{58}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{59}', to: '\u{59}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{5a}', to: '\u{5a}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{5b}', to: '\u{60}', mapping: DisallowedStd3Valid }, ++ Range { from: '\u{61}', to: '\u{7a}', mapping: Valid }, ++ Range { from: '\u{7b}', to: '\u{7f}', mapping: DisallowedStd3Valid }, ++ Range { from: '\u{80}', to: '\u{9f}', mapping: Disallowed }, ++ Range { from: '\u{a0}', to: '\u{a0}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{a1}', to: '\u{a7}', mapping: Valid }, ++ Range { from: '\u{a8}', to: '\u{a8}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 0, byte_len: 3 }) }, ++ Range { from: '\u{a9}', to: '\u{a9}', mapping: Valid }, ++ Range { from: '\u{aa}', to: '\u{aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ab}', to: '\u{ac}', mapping: Valid }, ++ Range { from: '\u{ad}', to: '\u{ad}', mapping: Ignored }, ++ Range { from: '\u{ae}', to: '\u{ae}', mapping: Valid }, ++ Range { from: '\u{af}', to: '\u{af}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 0, byte_len: 3 }) }, ++ Range { from: '\u{b0}', to: '\u{b1}', mapping: Valid }, ++ Range { from: '\u{b2}', to: '\u{b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{b3}', to: '\u{b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{b4}', to: '\u{b4}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 0, byte_len: 3 }) }, ++ Range { from: '\u{b5}', to: '\u{b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{b6}', to: '\u{b7}', mapping: Valid }, ++ Range { from: '\u{b8}', to: '\u{b8}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 0, byte_len: 3 }) }, ++ Range { from: '\u{b9}', to: '\u{b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ba}', to: '\u{ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{bb}', to: '\u{bb}', mapping: Valid }, ++ Range { from: '\u{bc}', to: '\u{bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 0, byte_len: 5 }) }, ++ Range { from: '\u{bd}', to: '\u{bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 0, byte_len: 5 }) }, ++ Range { from: '\u{be}', to: '\u{be}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 0, byte_len: 5 }) }, ++ Range { from: '\u{bf}', to: '\u{bf}', mapping: Valid }, ++ Range { from: '\u{c0}', to: '\u{c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{c1}', to: '\u{c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{c2}', to: '\u{c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{c3}', to: '\u{c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{c4}', to: '\u{c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{c5}', to: '\u{c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{c6}', to: '\u{c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{c7}', to: '\u{c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{c8}', to: '\u{c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{c9}', to: '\u{c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{ca}', to: '\u{ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{cb}', to: '\u{cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{cc}', to: '\u{cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{cd}', to: '\u{cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{ce}', to: '\u{ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{cf}', to: '\u{cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{d0}', to: '\u{d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{d1}', to: '\u{d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{d2}', to: '\u{d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{d3}', to: '\u{d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{d4}', to: '\u{d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{d5}', to: '\u{d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{d6}', to: '\u{d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{d7}', to: '\u{d7}', mapping: Valid }, ++ Range { from: '\u{d8}', to: '\u{d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{d9}', to: '\u{d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{da}', to: '\u{da}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{db}', to: '\u{db}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{dc}', to: '\u{dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{dd}', to: '\u{dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{de}', to: '\u{de}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{df}', to: '\u{df}', mapping: Deviation(StringTableSlice { byte_start_lo: 119, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{e0}', to: '\u{ff}', mapping: Valid }, ++ Range { from: '\u{100}', to: '\u{100}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{101}', to: '\u{101}', mapping: Valid }, ++ Range { from: '\u{102}', to: '\u{102}', mapping: Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{103}', to: '\u{103}', mapping: Valid }, ++ Range { from: '\u{104}', to: '\u{104}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{105}', to: '\u{105}', mapping: Valid }, ++ Range { from: '\u{106}', to: '\u{106}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{107}', to: '\u{107}', mapping: Valid }, ++ Range { from: '\u{108}', to: '\u{108}', mapping: Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{109}', to: '\u{109}', mapping: Valid }, ++ Range { from: '\u{10a}', to: '\u{10a}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{10b}', to: '\u{10b}', mapping: Valid }, ++ Range { from: '\u{10c}', to: '\u{10c}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{10d}', to: '\u{10d}', mapping: Valid }, ++ Range { from: '\u{10e}', to: '\u{10e}', mapping: Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{10f}', to: '\u{10f}', mapping: Valid }, ++ Range { from: '\u{110}', to: '\u{110}', mapping: Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{111}', to: '\u{111}', mapping: Valid }, ++ Range { from: '\u{112}', to: '\u{112}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{113}', to: '\u{113}', mapping: Valid }, ++ Range { from: '\u{114}', to: '\u{114}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{115}', to: '\u{115}', mapping: Valid }, ++ Range { from: '\u{116}', to: '\u{116}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{117}', to: '\u{117}', mapping: Valid }, ++ Range { from: '\u{118}', to: '\u{118}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{119}', to: '\u{119}', mapping: Valid }, ++ Range { from: '\u{11a}', to: '\u{11a}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{11b}', to: '\u{11b}', mapping: Valid }, ++ Range { from: '\u{11c}', to: '\u{11c}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{11d}', to: '\u{11d}', mapping: Valid }, ++ Range { from: '\u{11e}', to: '\u{11e}', mapping: Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{11f}', to: '\u{11f}', mapping: Valid }, ++ Range { from: '\u{120}', to: '\u{120}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{121}', to: '\u{121}', mapping: Valid }, ++ Range { from: '\u{122}', to: '\u{122}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{123}', to: '\u{123}', mapping: Valid }, ++ Range { from: '\u{124}', to: '\u{124}', mapping: Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{125}', to: '\u{125}', mapping: Valid }, ++ Range { from: '\u{126}', to: '\u{126}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{127}', to: '\u{127}', mapping: Valid }, ++ Range { from: '\u{128}', to: '\u{128}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{129}', to: '\u{129}', mapping: Valid }, ++ Range { from: '\u{12a}', to: '\u{12a}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{12b}', to: '\u{12b}', mapping: Valid }, ++ Range { from: '\u{12c}', to: '\u{12c}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{12d}', to: '\u{12d}', mapping: Valid }, ++ Range { from: '\u{12e}', to: '\u{12e}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{12f}', to: '\u{12f}', mapping: Valid }, ++ Range { from: '\u{130}', to: '\u{130}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 0, byte_len: 3 }) }, ++ Range { from: '\u{131}', to: '\u{131}', mapping: Valid }, ++ Range { from: '\u{132}', to: '\u{133}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{134}', to: '\u{134}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{135}', to: '\u{135}', mapping: Valid }, ++ Range { from: '\u{136}', to: '\u{136}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{137}', to: '\u{138}', mapping: Valid }, ++ Range { from: '\u{139}', to: '\u{139}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{13a}', to: '\u{13a}', mapping: Valid }, ++ Range { from: '\u{13b}', to: '\u{13b}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{13c}', to: '\u{13c}', mapping: Valid }, ++ Range { from: '\u{13d}', to: '\u{13d}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{13e}', to: '\u{13e}', mapping: Valid }, ++ Range { from: '\u{13f}', to: '\u{140}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 0, byte_len: 3 }) }, ++ Range { from: '\u{141}', to: '\u{141}', mapping: Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{142}', to: '\u{142}', mapping: Valid }, ++ Range { from: '\u{143}', to: '\u{143}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{144}', to: '\u{144}', mapping: Valid }, ++ Range { from: '\u{145}', to: '\u{145}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{146}', to: '\u{146}', mapping: Valid }, ++ Range { from: '\u{147}', to: '\u{147}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{148}', to: '\u{148}', mapping: Valid }, ++ Range { from: '\u{149}', to: '\u{149}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 0, byte_len: 3 }) }, ++ Range { from: '\u{14a}', to: '\u{14a}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{14b}', to: '\u{14b}', mapping: Valid }, ++ Range { from: '\u{14c}', to: '\u{14c}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{14d}', to: '\u{14d}', mapping: Valid }, ++ Range { from: '\u{14e}', to: '\u{14e}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{14f}', to: '\u{14f}', mapping: Valid }, ++ Range { from: '\u{150}', to: '\u{150}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{151}', to: '\u{151}', mapping: Valid }, ++ Range { from: '\u{152}', to: '\u{152}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{153}', to: '\u{153}', mapping: Valid }, ++ Range { from: '\u{154}', to: '\u{154}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{155}', to: '\u{155}', mapping: Valid }, ++ Range { from: '\u{156}', to: '\u{156}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{157}', to: '\u{157}', mapping: Valid }, ++ Range { from: '\u{158}', to: '\u{158}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{159}', to: '\u{159}', mapping: Valid }, ++ Range { from: '\u{15a}', to: '\u{15a}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{15b}', to: '\u{15b}', mapping: Valid }, ++ Range { from: '\u{15c}', to: '\u{15c}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{15d}', to: '\u{15d}', mapping: Valid }, ++ Range { from: '\u{15e}', to: '\u{15e}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{15f}', to: '\u{15f}', mapping: Valid }, ++ Range { from: '\u{160}', to: '\u{160}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{161}', to: '\u{161}', mapping: Valid }, ++ Range { from: '\u{162}', to: '\u{162}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{163}', to: '\u{163}', mapping: Valid }, ++ Range { from: '\u{164}', to: '\u{164}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{165}', to: '\u{165}', mapping: Valid }, ++ Range { from: '\u{166}', to: '\u{166}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{167}', to: '\u{167}', mapping: Valid }, ++ Range { from: '\u{168}', to: '\u{168}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{169}', to: '\u{169}', mapping: Valid }, ++ Range { from: '\u{16a}', to: '\u{16a}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{16b}', to: '\u{16b}', mapping: Valid }, ++ Range { from: '\u{16c}', to: '\u{16c}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{16d}', to: '\u{16d}', mapping: Valid }, ++ Range { from: '\u{16e}', to: '\u{16e}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{16f}', to: '\u{16f}', mapping: Valid }, ++ Range { from: '\u{170}', to: '\u{170}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{171}', to: '\u{171}', mapping: Valid }, ++ Range { from: '\u{172}', to: '\u{172}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{173}', to: '\u{173}', mapping: Valid }, ++ Range { from: '\u{174}', to: '\u{174}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{175}', to: '\u{175}', mapping: Valid }, ++ Range { from: '\u{176}', to: '\u{176}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{177}', to: '\u{177}', mapping: Valid }, ++ Range { from: '\u{178}', to: '\u{178}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{179}', to: '\u{179}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{17a}', to: '\u{17a}', mapping: Valid }, ++ Range { from: '\u{17b}', to: '\u{17b}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{17c}', to: '\u{17c}', mapping: Valid }, ++ Range { from: '\u{17d}', to: '\u{17d}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{17e}', to: '\u{17e}', mapping: Valid }, ++ Range { from: '\u{17f}', to: '\u{17f}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{180}', to: '\u{180}', mapping: Valid }, ++ Range { from: '\u{181}', to: '\u{181}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{182}', to: '\u{182}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{183}', to: '\u{183}', mapping: Valid }, ++ Range { from: '\u{184}', to: '\u{184}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{185}', to: '\u{185}', mapping: Valid }, ++ Range { from: '\u{186}', to: '\u{186}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{187}', to: '\u{187}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{188}', to: '\u{188}', mapping: Valid }, ++ Range { from: '\u{189}', to: '\u{189}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{18a}', to: '\u{18a}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{18b}', to: '\u{18b}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{18c}', to: '\u{18d}', mapping: Valid }, ++ Range { from: '\u{18e}', to: '\u{18e}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{18f}', to: '\u{18f}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{190}', to: '\u{190}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{191}', to: '\u{191}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{192}', to: '\u{192}', mapping: Valid }, ++ Range { from: '\u{193}', to: '\u{193}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{194}', to: '\u{194}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{195}', to: '\u{195}', mapping: Valid }, ++ Range { from: '\u{196}', to: '\u{196}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{197}', to: '\u{197}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{198}', to: '\u{198}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{199}', to: '\u{19b}', mapping: Valid }, ++ Range { from: '\u{19c}', to: '\u{19c}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{19d}', to: '\u{19d}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{19e}', to: '\u{19e}', mapping: Valid }, ++ Range { from: '\u{19f}', to: '\u{19f}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1a0}', to: '\u{1a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1a1}', to: '\u{1a1}', mapping: Valid }, ++ Range { from: '\u{1a2}', to: '\u{1a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1a3}', to: '\u{1a3}', mapping: Valid }, ++ Range { from: '\u{1a4}', to: '\u{1a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1a5}', to: '\u{1a5}', mapping: Valid }, ++ Range { from: '\u{1a6}', to: '\u{1a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1a7}', to: '\u{1a7}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1a8}', to: '\u{1a8}', mapping: Valid }, ++ Range { from: '\u{1a9}', to: '\u{1a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1aa}', to: '\u{1ab}', mapping: Valid }, ++ Range { from: '\u{1ac}', to: '\u{1ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1ad}', to: '\u{1ad}', mapping: Valid }, ++ Range { from: '\u{1ae}', to: '\u{1ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1af}', to: '\u{1af}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1b0}', to: '\u{1b0}', mapping: Valid }, ++ Range { from: '\u{1b1}', to: '\u{1b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1b2}', to: '\u{1b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1b3}', to: '\u{1b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1b4}', to: '\u{1b4}', mapping: Valid }, ++ Range { from: '\u{1b5}', to: '\u{1b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1b6}', to: '\u{1b6}', mapping: Valid }, ++ Range { from: '\u{1b7}', to: '\u{1b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1b8}', to: '\u{1b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1b9}', to: '\u{1bb}', mapping: Valid }, ++ Range { from: '\u{1bc}', to: '\u{1bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1bd}', to: '\u{1c3}', mapping: Valid }, ++ Range { from: '\u{1c4}', to: '\u{1c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 1, byte_len: 3 }) }, ++ Range { from: '\u{1c7}', to: '\u{1c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1ca}', to: '\u{1cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1cd}', to: '\u{1cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1ce}', to: '\u{1ce}', mapping: Valid }, ++ Range { from: '\u{1cf}', to: '\u{1cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d0}', to: '\u{1d0}', mapping: Valid }, ++ Range { from: '\u{1d1}', to: '\u{1d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d2}', to: '\u{1d2}', mapping: Valid }, ++ Range { from: '\u{1d3}', to: '\u{1d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d4}', to: '\u{1d4}', mapping: Valid }, ++ Range { from: '\u{1d5}', to: '\u{1d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d6}', to: '\u{1d6}', mapping: Valid }, ++ Range { from: '\u{1d7}', to: '\u{1d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d8}', to: '\u{1d8}', mapping: Valid }, ++ Range { from: '\u{1d9}', to: '\u{1d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1da}', to: '\u{1da}', mapping: Valid }, ++ Range { from: '\u{1db}', to: '\u{1db}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1dc}', to: '\u{1dd}', mapping: Valid }, ++ Range { from: '\u{1de}', to: '\u{1de}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1df}', to: '\u{1df}', mapping: Valid }, ++ Range { from: '\u{1e0}', to: '\u{1e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1e1}', to: '\u{1e1}', mapping: Valid }, ++ Range { from: '\u{1e2}', to: '\u{1e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1e3}', to: '\u{1e3}', mapping: Valid }, ++ Range { from: '\u{1e4}', to: '\u{1e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1e5}', to: '\u{1e5}', mapping: Valid }, ++ Range { from: '\u{1e6}', to: '\u{1e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1e7}', to: '\u{1e7}', mapping: Valid }, ++ Range { from: '\u{1e8}', to: '\u{1e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1e9}', to: '\u{1e9}', mapping: Valid }, ++ Range { from: '\u{1ea}', to: '\u{1ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1eb}', to: '\u{1eb}', mapping: Valid }, ++ Range { from: '\u{1ec}', to: '\u{1ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1ed}', to: '\u{1ed}', mapping: Valid }, ++ Range { from: '\u{1ee}', to: '\u{1ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1ef}', to: '\u{1f0}', mapping: Valid }, ++ Range { from: '\u{1f1}', to: '\u{1f3}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1f4}', to: '\u{1f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1f5}', to: '\u{1f5}', mapping: Valid }, ++ Range { from: '\u{1f6}', to: '\u{1f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1f7}', to: '\u{1f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1f8}', to: '\u{1f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1f9}', to: '\u{1f9}', mapping: Valid }, ++ Range { from: '\u{1fa}', to: '\u{1fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1fb}', to: '\u{1fb}', mapping: Valid }, ++ Range { from: '\u{1fc}', to: '\u{1fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1fd}', to: '\u{1fd}', mapping: Valid }, ++ Range { from: '\u{1fe}', to: '\u{1fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1ff}', to: '\u{1ff}', mapping: Valid }, ++ Range { from: '\u{200}', to: '\u{200}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{201}', to: '\u{201}', mapping: Valid }, ++ Range { from: '\u{202}', to: '\u{202}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{203}', to: '\u{203}', mapping: Valid }, ++ Range { from: '\u{204}', to: '\u{204}', mapping: Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{205}', to: '\u{205}', mapping: Valid }, ++ Range { from: '\u{206}', to: '\u{206}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{207}', to: '\u{207}', mapping: Valid }, ++ Range { from: '\u{208}', to: '\u{208}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{209}', to: '\u{209}', mapping: Valid }, ++ Range { from: '\u{20a}', to: '\u{20a}', mapping: Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{20b}', to: '\u{20b}', mapping: Valid }, ++ Range { from: '\u{20c}', to: '\u{20c}', mapping: Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{20d}', to: '\u{20d}', mapping: Valid }, ++ Range { from: '\u{20e}', to: '\u{20e}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{20f}', to: '\u{20f}', mapping: Valid }, ++ Range { from: '\u{210}', to: '\u{210}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{211}', to: '\u{211}', mapping: Valid }, ++ Range { from: '\u{212}', to: '\u{212}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{213}', to: '\u{213}', mapping: Valid }, ++ Range { from: '\u{214}', to: '\u{214}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{215}', to: '\u{215}', mapping: Valid }, ++ Range { from: '\u{216}', to: '\u{216}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{217}', to: '\u{217}', mapping: Valid }, ++ Range { from: '\u{218}', to: '\u{218}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{219}', to: '\u{219}', mapping: Valid }, ++ Range { from: '\u{21a}', to: '\u{21a}', mapping: Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{21b}', to: '\u{21b}', mapping: Valid }, ++ Range { from: '\u{21c}', to: '\u{21c}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{21d}', to: '\u{21d}', mapping: Valid }, ++ Range { from: '\u{21e}', to: '\u{21e}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{21f}', to: '\u{21f}', mapping: Valid }, ++ Range { from: '\u{220}', to: '\u{220}', mapping: Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{221}', to: '\u{221}', mapping: Valid }, ++ Range { from: '\u{222}', to: '\u{222}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{223}', to: '\u{223}', mapping: Valid }, ++ Range { from: '\u{224}', to: '\u{224}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{225}', to: '\u{225}', mapping: Valid }, ++ Range { from: '\u{226}', to: '\u{226}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{227}', to: '\u{227}', mapping: Valid }, ++ Range { from: '\u{228}', to: '\u{228}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{229}', to: '\u{229}', mapping: Valid }, ++ Range { from: '\u{22a}', to: '\u{22a}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{22b}', to: '\u{22b}', mapping: Valid }, ++ Range { from: '\u{22c}', to: '\u{22c}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{22d}', to: '\u{22d}', mapping: Valid }, ++ Range { from: '\u{22e}', to: '\u{22e}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{22f}', to: '\u{22f}', mapping: Valid }, ++ Range { from: '\u{230}', to: '\u{230}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{231}', to: '\u{231}', mapping: Valid }, ++ Range { from: '\u{232}', to: '\u{232}', mapping: Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{233}', to: '\u{239}', mapping: Valid }, ++ Range { from: '\u{23a}', to: '\u{23a}', mapping: Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 1, byte_len: 3 }) }, ++ Range { from: '\u{23b}', to: '\u{23b}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{23c}', to: '\u{23c}', mapping: Valid }, ++ Range { from: '\u{23d}', to: '\u{23d}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{23e}', to: '\u{23e}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 1, byte_len: 3 }) }, ++ Range { from: '\u{23f}', to: '\u{240}', mapping: Valid }, ++ Range { from: '\u{241}', to: '\u{241}', mapping: Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{242}', to: '\u{242}', mapping: Valid }, ++ Range { from: '\u{243}', to: '\u{243}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{244}', to: '\u{244}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{245}', to: '\u{245}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{246}', to: '\u{246}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{247}', to: '\u{247}', mapping: Valid }, ++ Range { from: '\u{248}', to: '\u{248}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{249}', to: '\u{249}', mapping: Valid }, ++ Range { from: '\u{24a}', to: '\u{24a}', mapping: Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{24b}', to: '\u{24b}', mapping: Valid }, ++ Range { from: '\u{24c}', to: '\u{24c}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{24d}', to: '\u{24d}', mapping: Valid }, ++ Range { from: '\u{24e}', to: '\u{24e}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{24f}', to: '\u{2af}', mapping: Valid }, ++ Range { from: '\u{2b0}', to: '\u{2b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2b1}', to: '\u{2b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{2b2}', to: '\u{2b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2b3}', to: '\u{2b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2b4}', to: '\u{2b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{2b5}', to: '\u{2b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{2b6}', to: '\u{2b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{2b7}', to: '\u{2b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2b8}', to: '\u{2b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2b9}', to: '\u{2d7}', mapping: Valid }, ++ Range { from: '\u{2d8}', to: '\u{2d8}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 1, byte_len: 3 }) }, ++ Range { from: '\u{2d9}', to: '\u{2d9}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 1, byte_len: 3 }) }, ++ Range { from: '\u{2da}', to: '\u{2da}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 1, byte_len: 3 }) }, ++ Range { from: '\u{2db}', to: '\u{2db}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 1, byte_len: 3 }) }, ++ Range { from: '\u{2dc}', to: '\u{2dc}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 1, byte_len: 3 }) }, ++ Range { from: '\u{2dd}', to: '\u{2dd}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 1, byte_len: 3 }) }, ++ Range { from: '\u{2de}', to: '\u{2df}', mapping: Valid }, ++ Range { from: '\u{2e0}', to: '\u{2e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{2e1}', to: '\u{2e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2e2}', to: '\u{2e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2e3}', to: '\u{2e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2e4}', to: '\u{2e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{2e5}', to: '\u{33f}', mapping: Valid }, ++ Range { from: '\u{340}', to: '\u{340}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{341}', to: '\u{341}', mapping: Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{342}', to: '\u{342}', mapping: Valid }, ++ Range { from: '\u{343}', to: '\u{343}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{344}', to: '\u{344}', mapping: Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 1, byte_len: 4 }) }, ++ Range { from: '\u{345}', to: '\u{345}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{346}', to: '\u{34e}', mapping: Valid }, ++ Range { from: '\u{34f}', to: '\u{34f}', mapping: Ignored }, ++ Range { from: '\u{350}', to: '\u{36f}', mapping: Valid }, ++ Range { from: '\u{370}', to: '\u{370}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{371}', to: '\u{371}', mapping: Valid }, ++ Range { from: '\u{372}', to: '\u{372}', mapping: Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{373}', to: '\u{373}', mapping: Valid }, ++ Range { from: '\u{374}', to: '\u{374}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{375}', to: '\u{375}', mapping: Valid }, ++ Range { from: '\u{376}', to: '\u{376}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{377}', to: '\u{377}', mapping: Valid }, ++ Range { from: '\u{378}', to: '\u{379}', mapping: Disallowed }, ++ Range { from: '\u{37a}', to: '\u{37a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 1, byte_len: 3 }) }, ++ Range { from: '\u{37b}', to: '\u{37d}', mapping: Valid }, ++ Range { from: '\u{37e}', to: '\u{37e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 2, byte_len: 1 }) }, ++ Range { from: '\u{37f}', to: '\u{37f}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{380}', to: '\u{383}', mapping: Disallowed }, ++ Range { from: '\u{384}', to: '\u{384}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 0, byte_len: 3 }) }, ++ Range { from: '\u{385}', to: '\u{385}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 2, byte_len: 5 }) }, ++ Range { from: '\u{386}', to: '\u{386}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{387}', to: '\u{387}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{388}', to: '\u{388}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{389}', to: '\u{389}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{38a}', to: '\u{38a}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{38b}', to: '\u{38b}', mapping: Disallowed }, ++ Range { from: '\u{38c}', to: '\u{38c}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{38d}', to: '\u{38d}', mapping: Disallowed }, ++ Range { from: '\u{38e}', to: '\u{38e}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{38f}', to: '\u{38f}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{390}', to: '\u{390}', mapping: Valid }, ++ Range { from: '\u{391}', to: '\u{391}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{392}', to: '\u{392}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{393}', to: '\u{393}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{394}', to: '\u{394}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{395}', to: '\u{395}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{396}', to: '\u{396}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{397}', to: '\u{397}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{398}', to: '\u{398}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{399}', to: '\u{399}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{39a}', to: '\u{39a}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{39b}', to: '\u{39b}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{39c}', to: '\u{39c}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{39d}', to: '\u{39d}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{39e}', to: '\u{39e}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{39f}', to: '\u{39f}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3a0}', to: '\u{3a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3a1}', to: '\u{3a1}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3a2}', to: '\u{3a2}', mapping: Disallowed }, ++ Range { from: '\u{3a3}', to: '\u{3a3}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3a4}', to: '\u{3a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3a5}', to: '\u{3a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3a6}', to: '\u{3a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3a7}', to: '\u{3a7}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3a8}', to: '\u{3a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3a9}', to: '\u{3a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3aa}', to: '\u{3aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3ab}', to: '\u{3ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3ac}', to: '\u{3c1}', mapping: Valid }, ++ Range { from: '\u{3c2}', to: '\u{3c2}', mapping: Deviation(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3c3}', to: '\u{3ce}', mapping: Valid }, ++ Range { from: '\u{3cf}', to: '\u{3cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3d0}', to: '\u{3d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3d1}', to: '\u{3d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3d2}', to: '\u{3d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3d3}', to: '\u{3d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3d4}', to: '\u{3d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3d5}', to: '\u{3d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3d6}', to: '\u{3d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3d7}', to: '\u{3d7}', mapping: Valid }, ++ Range { from: '\u{3d8}', to: '\u{3d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3d9}', to: '\u{3d9}', mapping: Valid }, ++ Range { from: '\u{3da}', to: '\u{3da}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3db}', to: '\u{3db}', mapping: Valid }, ++ Range { from: '\u{3dc}', to: '\u{3dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3dd}', to: '\u{3dd}', mapping: Valid }, ++ Range { from: '\u{3de}', to: '\u{3de}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3df}', to: '\u{3df}', mapping: Valid }, ++ Range { from: '\u{3e0}', to: '\u{3e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3e1}', to: '\u{3e1}', mapping: Valid }, ++ Range { from: '\u{3e2}', to: '\u{3e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3e3}', to: '\u{3e3}', mapping: Valid }, ++ Range { from: '\u{3e4}', to: '\u{3e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3e5}', to: '\u{3e5}', mapping: Valid }, ++ Range { from: '\u{3e6}', to: '\u{3e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3e7}', to: '\u{3e7}', mapping: Valid }, ++ Range { from: '\u{3e8}', to: '\u{3e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3e9}', to: '\u{3e9}', mapping: Valid }, ++ Range { from: '\u{3ea}', to: '\u{3ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3eb}', to: '\u{3eb}', mapping: Valid }, ++ Range { from: '\u{3ec}', to: '\u{3ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3ed}', to: '\u{3ed}', mapping: Valid }, ++ Range { from: '\u{3ee}', to: '\u{3ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3ef}', to: '\u{3ef}', mapping: Valid }, ++ Range { from: '\u{3f0}', to: '\u{3f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3f1}', to: '\u{3f1}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3f2}', to: '\u{3f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3f3}', to: '\u{3f3}', mapping: Valid }, ++ Range { from: '\u{3f4}', to: '\u{3f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3f5}', to: '\u{3f5}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3f6}', to: '\u{3f6}', mapping: Valid }, ++ Range { from: '\u{3f7}', to: '\u{3f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3f8}', to: '\u{3f8}', mapping: Valid }, ++ Range { from: '\u{3f9}', to: '\u{3f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3fa}', to: '\u{3fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3fb}', to: '\u{3fc}', mapping: Valid }, ++ Range { from: '\u{3fd}', to: '\u{3fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3fe}', to: '\u{3fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{3ff}', to: '\u{3ff}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{400}', to: '\u{400}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{401}', to: '\u{401}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{402}', to: '\u{402}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{403}', to: '\u{403}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{404}', to: '\u{404}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{405}', to: '\u{405}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{406}', to: '\u{406}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{407}', to: '\u{407}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{408}', to: '\u{408}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{409}', to: '\u{409}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{40a}', to: '\u{40a}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{40b}', to: '\u{40b}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{40c}', to: '\u{40c}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{40d}', to: '\u{40d}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{40e}', to: '\u{40e}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{40f}', to: '\u{40f}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{410}', to: '\u{410}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{411}', to: '\u{411}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{412}', to: '\u{412}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{413}', to: '\u{413}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{414}', to: '\u{414}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{415}', to: '\u{415}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{416}', to: '\u{416}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{417}', to: '\u{417}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{418}', to: '\u{418}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{419}', to: '\u{419}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{41a}', to: '\u{41a}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{41b}', to: '\u{41b}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{41c}', to: '\u{41c}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{41d}', to: '\u{41d}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{41e}', to: '\u{41e}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{41f}', to: '\u{41f}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{420}', to: '\u{420}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{421}', to: '\u{421}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{422}', to: '\u{422}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{423}', to: '\u{423}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{424}', to: '\u{424}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{425}', to: '\u{425}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{426}', to: '\u{426}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{427}', to: '\u{427}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{428}', to: '\u{428}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{429}', to: '\u{429}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{42a}', to: '\u{42a}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{42b}', to: '\u{42b}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{42c}', to: '\u{42c}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{42d}', to: '\u{42d}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{42e}', to: '\u{42e}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{42f}', to: '\u{42f}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{430}', to: '\u{45f}', mapping: Valid }, ++ Range { from: '\u{460}', to: '\u{460}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{461}', to: '\u{461}', mapping: Valid }, ++ Range { from: '\u{462}', to: '\u{462}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{463}', to: '\u{463}', mapping: Valid }, ++ Range { from: '\u{464}', to: '\u{464}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{465}', to: '\u{465}', mapping: Valid }, ++ Range { from: '\u{466}', to: '\u{466}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{467}', to: '\u{467}', mapping: Valid }, ++ Range { from: '\u{468}', to: '\u{468}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{469}', to: '\u{469}', mapping: Valid }, ++ Range { from: '\u{46a}', to: '\u{46a}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{46b}', to: '\u{46b}', mapping: Valid }, ++ Range { from: '\u{46c}', to: '\u{46c}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{46d}', to: '\u{46d}', mapping: Valid }, ++ Range { from: '\u{46e}', to: '\u{46e}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{46f}', to: '\u{46f}', mapping: Valid }, ++ Range { from: '\u{470}', to: '\u{470}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{471}', to: '\u{471}', mapping: Valid }, ++ Range { from: '\u{472}', to: '\u{472}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{473}', to: '\u{473}', mapping: Valid }, ++ Range { from: '\u{474}', to: '\u{474}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{475}', to: '\u{475}', mapping: Valid }, ++ Range { from: '\u{476}', to: '\u{476}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{477}', to: '\u{477}', mapping: Valid }, ++ Range { from: '\u{478}', to: '\u{478}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{479}', to: '\u{479}', mapping: Valid }, ++ Range { from: '\u{47a}', to: '\u{47a}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{47b}', to: '\u{47b}', mapping: Valid }, ++ Range { from: '\u{47c}', to: '\u{47c}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{47d}', to: '\u{47d}', mapping: Valid }, ++ Range { from: '\u{47e}', to: '\u{47e}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{47f}', to: '\u{47f}', mapping: Valid }, ++ Range { from: '\u{480}', to: '\u{480}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{481}', to: '\u{489}', mapping: Valid }, ++ Range { from: '\u{48a}', to: '\u{48a}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{48b}', to: '\u{48b}', mapping: Valid }, ++ Range { from: '\u{48c}', to: '\u{48c}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{48d}', to: '\u{48d}', mapping: Valid }, ++ Range { from: '\u{48e}', to: '\u{48e}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{48f}', to: '\u{48f}', mapping: Valid }, ++ Range { from: '\u{490}', to: '\u{490}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{491}', to: '\u{491}', mapping: Valid }, ++ Range { from: '\u{492}', to: '\u{492}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{493}', to: '\u{493}', mapping: Valid }, ++ Range { from: '\u{494}', to: '\u{494}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{495}', to: '\u{495}', mapping: Valid }, ++ Range { from: '\u{496}', to: '\u{496}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{497}', to: '\u{497}', mapping: Valid }, ++ Range { from: '\u{498}', to: '\u{498}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{499}', to: '\u{499}', mapping: Valid }, ++ Range { from: '\u{49a}', to: '\u{49a}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{49b}', to: '\u{49b}', mapping: Valid }, ++ Range { from: '\u{49c}', to: '\u{49c}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{49d}', to: '\u{49d}', mapping: Valid }, ++ Range { from: '\u{49e}', to: '\u{49e}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{49f}', to: '\u{49f}', mapping: Valid }, ++ Range { from: '\u{4a0}', to: '\u{4a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4a1}', to: '\u{4a1}', mapping: Valid }, ++ Range { from: '\u{4a2}', to: '\u{4a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4a3}', to: '\u{4a3}', mapping: Valid }, ++ Range { from: '\u{4a4}', to: '\u{4a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4a5}', to: '\u{4a5}', mapping: Valid }, ++ Range { from: '\u{4a6}', to: '\u{4a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4a7}', to: '\u{4a7}', mapping: Valid }, ++ Range { from: '\u{4a8}', to: '\u{4a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4a9}', to: '\u{4a9}', mapping: Valid }, ++ Range { from: '\u{4aa}', to: '\u{4aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4ab}', to: '\u{4ab}', mapping: Valid }, ++ Range { from: '\u{4ac}', to: '\u{4ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4ad}', to: '\u{4ad}', mapping: Valid }, ++ Range { from: '\u{4ae}', to: '\u{4ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4af}', to: '\u{4af}', mapping: Valid }, ++ Range { from: '\u{4b0}', to: '\u{4b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4b1}', to: '\u{4b1}', mapping: Valid }, ++ Range { from: '\u{4b2}', to: '\u{4b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4b3}', to: '\u{4b3}', mapping: Valid }, ++ Range { from: '\u{4b4}', to: '\u{4b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4b5}', to: '\u{4b5}', mapping: Valid }, ++ Range { from: '\u{4b6}', to: '\u{4b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4b7}', to: '\u{4b7}', mapping: Valid }, ++ Range { from: '\u{4b8}', to: '\u{4b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4b9}', to: '\u{4b9}', mapping: Valid }, ++ Range { from: '\u{4ba}', to: '\u{4ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4bb}', to: '\u{4bb}', mapping: Valid }, ++ Range { from: '\u{4bc}', to: '\u{4bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4bd}', to: '\u{4bd}', mapping: Valid }, ++ Range { from: '\u{4be}', to: '\u{4be}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4bf}', to: '\u{4bf}', mapping: Valid }, ++ Range { from: '\u{4c0}', to: '\u{4c0}', mapping: Disallowed }, ++ Range { from: '\u{4c1}', to: '\u{4c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4c2}', to: '\u{4c2}', mapping: Valid }, ++ Range { from: '\u{4c3}', to: '\u{4c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4c4}', to: '\u{4c4}', mapping: Valid }, ++ Range { from: '\u{4c5}', to: '\u{4c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4c6}', to: '\u{4c6}', mapping: Valid }, ++ Range { from: '\u{4c7}', to: '\u{4c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4c8}', to: '\u{4c8}', mapping: Valid }, ++ Range { from: '\u{4c9}', to: '\u{4c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4ca}', to: '\u{4ca}', mapping: Valid }, ++ Range { from: '\u{4cb}', to: '\u{4cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4cc}', to: '\u{4cc}', mapping: Valid }, ++ Range { from: '\u{4cd}', to: '\u{4cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4ce}', to: '\u{4cf}', mapping: Valid }, ++ Range { from: '\u{4d0}', to: '\u{4d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4d1}', to: '\u{4d1}', mapping: Valid }, ++ Range { from: '\u{4d2}', to: '\u{4d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4d3}', to: '\u{4d3}', mapping: Valid }, ++ Range { from: '\u{4d4}', to: '\u{4d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4d5}', to: '\u{4d5}', mapping: Valid }, ++ Range { from: '\u{4d6}', to: '\u{4d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4d7}', to: '\u{4d7}', mapping: Valid }, ++ Range { from: '\u{4d8}', to: '\u{4d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4d9}', to: '\u{4d9}', mapping: Valid }, ++ Range { from: '\u{4da}', to: '\u{4da}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4db}', to: '\u{4db}', mapping: Valid }, ++ Range { from: '\u{4dc}', to: '\u{4dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4dd}', to: '\u{4dd}', mapping: Valid }, ++ Range { from: '\u{4de}', to: '\u{4de}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4df}', to: '\u{4df}', mapping: Valid }, ++ Range { from: '\u{4e0}', to: '\u{4e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4e1}', to: '\u{4e1}', mapping: Valid }, ++ Range { from: '\u{4e2}', to: '\u{4e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4e3}', to: '\u{4e3}', mapping: Valid }, ++ Range { from: '\u{4e4}', to: '\u{4e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4e5}', to: '\u{4e5}', mapping: Valid }, ++ Range { from: '\u{4e6}', to: '\u{4e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4e7}', to: '\u{4e7}', mapping: Valid }, ++ Range { from: '\u{4e8}', to: '\u{4e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4e9}', to: '\u{4e9}', mapping: Valid }, ++ Range { from: '\u{4ea}', to: '\u{4ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4eb}', to: '\u{4eb}', mapping: Valid }, ++ Range { from: '\u{4ec}', to: '\u{4ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4ed}', to: '\u{4ed}', mapping: Valid }, ++ Range { from: '\u{4ee}', to: '\u{4ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4ef}', to: '\u{4ef}', mapping: Valid }, ++ Range { from: '\u{4f0}', to: '\u{4f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4f1}', to: '\u{4f1}', mapping: Valid }, ++ Range { from: '\u{4f2}', to: '\u{4f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4f3}', to: '\u{4f3}', mapping: Valid }, ++ Range { from: '\u{4f4}', to: '\u{4f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4f5}', to: '\u{4f5}', mapping: Valid }, ++ Range { from: '\u{4f6}', to: '\u{4f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4f7}', to: '\u{4f7}', mapping: Valid }, ++ Range { from: '\u{4f8}', to: '\u{4f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4f9}', to: '\u{4f9}', mapping: Valid }, ++ Range { from: '\u{4fa}', to: '\u{4fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4fb}', to: '\u{4fb}', mapping: Valid }, ++ Range { from: '\u{4fc}', to: '\u{4fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4fd}', to: '\u{4fd}', mapping: Valid }, ++ Range { from: '\u{4fe}', to: '\u{4fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{4ff}', to: '\u{4ff}', mapping: Valid }, ++ Range { from: '\u{500}', to: '\u{500}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{501}', to: '\u{501}', mapping: Valid }, ++ Range { from: '\u{502}', to: '\u{502}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{503}', to: '\u{503}', mapping: Valid }, ++ Range { from: '\u{504}', to: '\u{504}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{505}', to: '\u{505}', mapping: Valid }, ++ Range { from: '\u{506}', to: '\u{506}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{507}', to: '\u{507}', mapping: Valid }, ++ Range { from: '\u{508}', to: '\u{508}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{509}', to: '\u{509}', mapping: Valid }, ++ Range { from: '\u{50a}', to: '\u{50a}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{50b}', to: '\u{50b}', mapping: Valid }, ++ Range { from: '\u{50c}', to: '\u{50c}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{50d}', to: '\u{50d}', mapping: Valid }, ++ Range { from: '\u{50e}', to: '\u{50e}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{50f}', to: '\u{50f}', mapping: Valid }, ++ Range { from: '\u{510}', to: '\u{510}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{511}', to: '\u{511}', mapping: Valid }, ++ Range { from: '\u{512}', to: '\u{512}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{513}', to: '\u{513}', mapping: Valid }, ++ Range { from: '\u{514}', to: '\u{514}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{515}', to: '\u{515}', mapping: Valid }, ++ Range { from: '\u{516}', to: '\u{516}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{517}', to: '\u{517}', mapping: Valid }, ++ Range { from: '\u{518}', to: '\u{518}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{519}', to: '\u{519}', mapping: Valid }, ++ Range { from: '\u{51a}', to: '\u{51a}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{51b}', to: '\u{51b}', mapping: Valid }, ++ Range { from: '\u{51c}', to: '\u{51c}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{51d}', to: '\u{51d}', mapping: Valid }, ++ Range { from: '\u{51e}', to: '\u{51e}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{51f}', to: '\u{51f}', mapping: Valid }, ++ Range { from: '\u{520}', to: '\u{520}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{521}', to: '\u{521}', mapping: Valid }, ++ Range { from: '\u{522}', to: '\u{522}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{523}', to: '\u{523}', mapping: Valid }, ++ Range { from: '\u{524}', to: '\u{524}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{525}', to: '\u{525}', mapping: Valid }, ++ Range { from: '\u{526}', to: '\u{526}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{527}', to: '\u{527}', mapping: Valid }, ++ Range { from: '\u{528}', to: '\u{528}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{529}', to: '\u{529}', mapping: Valid }, ++ Range { from: '\u{52a}', to: '\u{52a}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{52b}', to: '\u{52b}', mapping: Valid }, ++ Range { from: '\u{52c}', to: '\u{52c}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{52d}', to: '\u{52d}', mapping: Valid }, ++ Range { from: '\u{52e}', to: '\u{52e}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{52f}', to: '\u{52f}', mapping: Valid }, ++ Range { from: '\u{530}', to: '\u{530}', mapping: Disallowed }, ++ Range { from: '\u{531}', to: '\u{531}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{532}', to: '\u{532}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{533}', to: '\u{533}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{534}', to: '\u{534}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{535}', to: '\u{535}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{536}', to: '\u{536}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{537}', to: '\u{537}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{538}', to: '\u{538}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{539}', to: '\u{539}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{53a}', to: '\u{53a}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{53b}', to: '\u{53b}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{53c}', to: '\u{53c}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{53d}', to: '\u{53d}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{53e}', to: '\u{53e}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{53f}', to: '\u{53f}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{540}', to: '\u{540}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{541}', to: '\u{541}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{542}', to: '\u{542}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{543}', to: '\u{543}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{544}', to: '\u{544}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{545}', to: '\u{545}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{546}', to: '\u{546}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{547}', to: '\u{547}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{548}', to: '\u{548}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{549}', to: '\u{549}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{54a}', to: '\u{54a}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{54b}', to: '\u{54b}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{54c}', to: '\u{54c}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{54d}', to: '\u{54d}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{54e}', to: '\u{54e}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{54f}', to: '\u{54f}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{550}', to: '\u{550}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{551}', to: '\u{551}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{552}', to: '\u{552}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{553}', to: '\u{553}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{554}', to: '\u{554}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{555}', to: '\u{555}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{556}', to: '\u{556}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 3, byte_len: 2 }) }, ++ Range { from: '\u{557}', to: '\u{558}', mapping: Disallowed }, ++ Range { from: '\u{559}', to: '\u{55f}', mapping: Valid }, ++ Range { from: '\u{560}', to: '\u{560}', mapping: Disallowed }, ++ Range { from: '\u{561}', to: '\u{586}', mapping: Valid }, ++ Range { from: '\u{587}', to: '\u{587}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 3, byte_len: 4 }) }, ++ Range { from: '\u{588}', to: '\u{588}', mapping: Disallowed }, ++ Range { from: '\u{589}', to: '\u{58a}', mapping: Valid }, ++ Range { from: '\u{58b}', to: '\u{58c}', mapping: Disallowed }, ++ Range { from: '\u{58d}', to: '\u{58f}', mapping: Valid }, ++ Range { from: '\u{590}', to: '\u{590}', mapping: Disallowed }, ++ Range { from: '\u{591}', to: '\u{5c7}', mapping: Valid }, ++ Range { from: '\u{5c8}', to: '\u{5cf}', mapping: Disallowed }, ++ Range { from: '\u{5d0}', to: '\u{5ea}', mapping: Valid }, ++ Range { from: '\u{5eb}', to: '\u{5ef}', mapping: Disallowed }, ++ Range { from: '\u{5f0}', to: '\u{5f4}', mapping: Valid }, ++ Range { from: '\u{5f5}', to: '\u{605}', mapping: Disallowed }, ++ Range { from: '\u{606}', to: '\u{61b}', mapping: Valid }, ++ Range { from: '\u{61c}', to: '\u{61d}', mapping: Disallowed }, ++ Range { from: '\u{61e}', to: '\u{674}', mapping: Valid }, ++ Range { from: '\u{675}', to: '\u{675}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 3, byte_len: 4 }) }, ++ Range { from: '\u{676}', to: '\u{676}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 3, byte_len: 4 }) }, ++ Range { from: '\u{677}', to: '\u{677}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 3, byte_len: 4 }) }, ++ Range { from: '\u{678}', to: '\u{678}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 3, byte_len: 4 }) }, ++ Range { from: '\u{679}', to: '\u{6dc}', mapping: Valid }, ++ Range { from: '\u{6dd}', to: '\u{6dd}', mapping: Disallowed }, ++ Range { from: '\u{6de}', to: '\u{70d}', mapping: Valid }, ++ Range { from: '\u{70e}', to: '\u{70f}', mapping: Disallowed }, ++ Range { from: '\u{710}', to: '\u{74a}', mapping: Valid }, ++ Range { from: '\u{74b}', to: '\u{74c}', mapping: Disallowed }, ++ Range { from: '\u{74d}', to: '\u{7b1}', mapping: Valid }, ++ Range { from: '\u{7b2}', to: '\u{7bf}', mapping: Disallowed }, ++ Range { from: '\u{7c0}', to: '\u{7fa}', mapping: Valid }, ++ Range { from: '\u{7fb}', to: '\u{7ff}', mapping: Disallowed }, ++ Range { from: '\u{800}', to: '\u{82d}', mapping: Valid }, ++ Range { from: '\u{82e}', to: '\u{82f}', mapping: Disallowed }, ++ Range { from: '\u{830}', to: '\u{83e}', mapping: Valid }, ++ Range { from: '\u{83f}', to: '\u{83f}', mapping: Disallowed }, ++ Range { from: '\u{840}', to: '\u{85b}', mapping: Valid }, ++ Range { from: '\u{85c}', to: '\u{85d}', mapping: Disallowed }, ++ Range { from: '\u{85e}', to: '\u{85e}', mapping: Valid }, ++ Range { from: '\u{85f}', to: '\u{85f}', mapping: Disallowed }, ++ Range { from: '\u{860}', to: '\u{86a}', mapping: Valid }, ++ Range { from: '\u{86b}', to: '\u{89f}', mapping: Disallowed }, ++ Range { from: '\u{8a0}', to: '\u{8b4}', mapping: Valid }, ++ Range { from: '\u{8b5}', to: '\u{8b5}', mapping: Disallowed }, ++ Range { from: '\u{8b6}', to: '\u{8bd}', mapping: Valid }, ++ Range { from: '\u{8be}', to: '\u{8d3}', mapping: Disallowed }, ++ Range { from: '\u{8d4}', to: '\u{8e1}', mapping: Valid }, ++ Range { from: '\u{8e2}', to: '\u{8e2}', mapping: Disallowed }, ++ Range { from: '\u{8e3}', to: '\u{957}', mapping: Valid }, ++ Range { from: '\u{958}', to: '\u{958}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 3, byte_len: 6 }) }, ++ Range { from: '\u{959}', to: '\u{959}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 3, byte_len: 6 }) }, ++ Range { from: '\u{95a}', to: '\u{95a}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 3, byte_len: 6 }) }, ++ Range { from: '\u{95b}', to: '\u{95b}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{95c}', to: '\u{95c}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{95d}', to: '\u{95d}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{95e}', to: '\u{95e}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{95f}', to: '\u{95f}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{960}', to: '\u{983}', mapping: Valid }, ++ Range { from: '\u{984}', to: '\u{984}', mapping: Disallowed }, ++ Range { from: '\u{985}', to: '\u{98c}', mapping: Valid }, ++ Range { from: '\u{98d}', to: '\u{98e}', mapping: Disallowed }, ++ Range { from: '\u{98f}', to: '\u{990}', mapping: Valid }, ++ Range { from: '\u{991}', to: '\u{992}', mapping: Disallowed }, ++ Range { from: '\u{993}', to: '\u{9a8}', mapping: Valid }, ++ Range { from: '\u{9a9}', to: '\u{9a9}', mapping: Disallowed }, ++ Range { from: '\u{9aa}', to: '\u{9b0}', mapping: Valid }, ++ Range { from: '\u{9b1}', to: '\u{9b1}', mapping: Disallowed }, ++ Range { from: '\u{9b2}', to: '\u{9b2}', mapping: Valid }, ++ Range { from: '\u{9b3}', to: '\u{9b5}', mapping: Disallowed }, ++ Range { from: '\u{9b6}', to: '\u{9b9}', mapping: Valid }, ++ Range { from: '\u{9ba}', to: '\u{9bb}', mapping: Disallowed }, ++ Range { from: '\u{9bc}', to: '\u{9c4}', mapping: Valid }, ++ Range { from: '\u{9c5}', to: '\u{9c6}', mapping: Disallowed }, ++ Range { from: '\u{9c7}', to: '\u{9c8}', mapping: Valid }, ++ Range { from: '\u{9c9}', to: '\u{9ca}', mapping: Disallowed }, ++ Range { from: '\u{9cb}', to: '\u{9ce}', mapping: Valid }, ++ Range { from: '\u{9cf}', to: '\u{9d6}', mapping: Disallowed }, ++ Range { from: '\u{9d7}', to: '\u{9d7}', mapping: Valid }, ++ Range { from: '\u{9d8}', to: '\u{9db}', mapping: Disallowed }, ++ Range { from: '\u{9dc}', to: '\u{9dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{9dd}', to: '\u{9dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{9de}', to: '\u{9de}', mapping: Disallowed }, ++ Range { from: '\u{9df}', to: '\u{9df}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{9e0}', to: '\u{9e3}', mapping: Valid }, ++ Range { from: '\u{9e4}', to: '\u{9e5}', mapping: Disallowed }, ++ Range { from: '\u{9e6}', to: '\u{9fd}', mapping: Valid }, ++ Range { from: '\u{9fe}', to: '\u{a00}', mapping: Disallowed }, ++ Range { from: '\u{a01}', to: '\u{a03}', mapping: Valid }, ++ Range { from: '\u{a04}', to: '\u{a04}', mapping: Disallowed }, ++ Range { from: '\u{a05}', to: '\u{a0a}', mapping: Valid }, ++ Range { from: '\u{a0b}', to: '\u{a0e}', mapping: Disallowed }, ++ Range { from: '\u{a0f}', to: '\u{a10}', mapping: Valid }, ++ Range { from: '\u{a11}', to: '\u{a12}', mapping: Disallowed }, ++ Range { from: '\u{a13}', to: '\u{a28}', mapping: Valid }, ++ Range { from: '\u{a29}', to: '\u{a29}', mapping: Disallowed }, ++ Range { from: '\u{a2a}', to: '\u{a30}', mapping: Valid }, ++ Range { from: '\u{a31}', to: '\u{a31}', mapping: Disallowed }, ++ Range { from: '\u{a32}', to: '\u{a32}', mapping: Valid }, ++ Range { from: '\u{a33}', to: '\u{a33}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{a34}', to: '\u{a34}', mapping: Disallowed }, ++ Range { from: '\u{a35}', to: '\u{a35}', mapping: Valid }, ++ Range { from: '\u{a36}', to: '\u{a36}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{a37}', to: '\u{a37}', mapping: Disallowed }, ++ Range { from: '\u{a38}', to: '\u{a39}', mapping: Valid }, ++ Range { from: '\u{a3a}', to: '\u{a3b}', mapping: Disallowed }, ++ Range { from: '\u{a3c}', to: '\u{a3c}', mapping: Valid }, ++ Range { from: '\u{a3d}', to: '\u{a3d}', mapping: Disallowed }, ++ Range { from: '\u{a3e}', to: '\u{a42}', mapping: Valid }, ++ Range { from: '\u{a43}', to: '\u{a46}', mapping: Disallowed }, ++ Range { from: '\u{a47}', to: '\u{a48}', mapping: Valid }, ++ Range { from: '\u{a49}', to: '\u{a4a}', mapping: Disallowed }, ++ Range { from: '\u{a4b}', to: '\u{a4d}', mapping: Valid }, ++ Range { from: '\u{a4e}', to: '\u{a50}', mapping: Disallowed }, ++ Range { from: '\u{a51}', to: '\u{a51}', mapping: Valid }, ++ Range { from: '\u{a52}', to: '\u{a58}', mapping: Disallowed }, ++ Range { from: '\u{a59}', to: '\u{a59}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{a5a}', to: '\u{a5a}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{a5b}', to: '\u{a5b}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{a5c}', to: '\u{a5c}', mapping: Valid }, ++ Range { from: '\u{a5d}', to: '\u{a5d}', mapping: Disallowed }, ++ Range { from: '\u{a5e}', to: '\u{a5e}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{a5f}', to: '\u{a65}', mapping: Disallowed }, ++ Range { from: '\u{a66}', to: '\u{a75}', mapping: Valid }, ++ Range { from: '\u{a76}', to: '\u{a80}', mapping: Disallowed }, ++ Range { from: '\u{a81}', to: '\u{a83}', mapping: Valid }, ++ Range { from: '\u{a84}', to: '\u{a84}', mapping: Disallowed }, ++ Range { from: '\u{a85}', to: '\u{a8d}', mapping: Valid }, ++ Range { from: '\u{a8e}', to: '\u{a8e}', mapping: Disallowed }, ++ Range { from: '\u{a8f}', to: '\u{a91}', mapping: Valid }, ++ Range { from: '\u{a92}', to: '\u{a92}', mapping: Disallowed }, ++ Range { from: '\u{a93}', to: '\u{aa8}', mapping: Valid }, ++ Range { from: '\u{aa9}', to: '\u{aa9}', mapping: Disallowed }, ++ Range { from: '\u{aaa}', to: '\u{ab0}', mapping: Valid }, ++ Range { from: '\u{ab1}', to: '\u{ab1}', mapping: Disallowed }, ++ Range { from: '\u{ab2}', to: '\u{ab3}', mapping: Valid }, ++ Range { from: '\u{ab4}', to: '\u{ab4}', mapping: Disallowed }, ++ Range { from: '\u{ab5}', to: '\u{ab9}', mapping: Valid }, ++ Range { from: '\u{aba}', to: '\u{abb}', mapping: Disallowed }, ++ Range { from: '\u{abc}', to: '\u{ac5}', mapping: Valid }, ++ Range { from: '\u{ac6}', to: '\u{ac6}', mapping: Disallowed }, ++ Range { from: '\u{ac7}', to: '\u{ac9}', mapping: Valid }, ++ Range { from: '\u{aca}', to: '\u{aca}', mapping: Disallowed }, ++ Range { from: '\u{acb}', to: '\u{acd}', mapping: Valid }, ++ Range { from: '\u{ace}', to: '\u{acf}', mapping: Disallowed }, ++ Range { from: '\u{ad0}', to: '\u{ad0}', mapping: Valid }, ++ Range { from: '\u{ad1}', to: '\u{adf}', mapping: Disallowed }, ++ Range { from: '\u{ae0}', to: '\u{ae3}', mapping: Valid }, ++ Range { from: '\u{ae4}', to: '\u{ae5}', mapping: Disallowed }, ++ Range { from: '\u{ae6}', to: '\u{af1}', mapping: Valid }, ++ Range { from: '\u{af2}', to: '\u{af8}', mapping: Disallowed }, ++ Range { from: '\u{af9}', to: '\u{aff}', mapping: Valid }, ++ Range { from: '\u{b00}', to: '\u{b00}', mapping: Disallowed }, ++ Range { from: '\u{b01}', to: '\u{b03}', mapping: Valid }, ++ Range { from: '\u{b04}', to: '\u{b04}', mapping: Disallowed }, ++ Range { from: '\u{b05}', to: '\u{b0c}', mapping: Valid }, ++ Range { from: '\u{b0d}', to: '\u{b0e}', mapping: Disallowed }, ++ Range { from: '\u{b0f}', to: '\u{b10}', mapping: Valid }, ++ Range { from: '\u{b11}', to: '\u{b12}', mapping: Disallowed }, ++ Range { from: '\u{b13}', to: '\u{b28}', mapping: Valid }, ++ Range { from: '\u{b29}', to: '\u{b29}', mapping: Disallowed }, ++ Range { from: '\u{b2a}', to: '\u{b30}', mapping: Valid }, ++ Range { from: '\u{b31}', to: '\u{b31}', mapping: Disallowed }, ++ Range { from: '\u{b32}', to: '\u{b33}', mapping: Valid }, ++ Range { from: '\u{b34}', to: '\u{b34}', mapping: Disallowed }, ++ Range { from: '\u{b35}', to: '\u{b39}', mapping: Valid }, ++ Range { from: '\u{b3a}', to: '\u{b3b}', mapping: Disallowed }, ++ Range { from: '\u{b3c}', to: '\u{b44}', mapping: Valid }, ++ Range { from: '\u{b45}', to: '\u{b46}', mapping: Disallowed }, ++ Range { from: '\u{b47}', to: '\u{b48}', mapping: Valid }, ++ Range { from: '\u{b49}', to: '\u{b4a}', mapping: Disallowed }, ++ Range { from: '\u{b4b}', to: '\u{b4d}', mapping: Valid }, ++ Range { from: '\u{b4e}', to: '\u{b55}', mapping: Disallowed }, ++ Range { from: '\u{b56}', to: '\u{b57}', mapping: Valid }, ++ Range { from: '\u{b58}', to: '\u{b5b}', mapping: Disallowed }, ++ Range { from: '\u{b5c}', to: '\u{b5c}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{b5d}', to: '\u{b5d}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{b5e}', to: '\u{b5e}', mapping: Disallowed }, ++ Range { from: '\u{b5f}', to: '\u{b63}', mapping: Valid }, ++ Range { from: '\u{b64}', to: '\u{b65}', mapping: Disallowed }, ++ Range { from: '\u{b66}', to: '\u{b77}', mapping: Valid }, ++ Range { from: '\u{b78}', to: '\u{b81}', mapping: Disallowed }, ++ Range { from: '\u{b82}', to: '\u{b83}', mapping: Valid }, ++ Range { from: '\u{b84}', to: '\u{b84}', mapping: Disallowed }, ++ Range { from: '\u{b85}', to: '\u{b8a}', mapping: Valid }, ++ Range { from: '\u{b8b}', to: '\u{b8d}', mapping: Disallowed }, ++ Range { from: '\u{b8e}', to: '\u{b90}', mapping: Valid }, ++ Range { from: '\u{b91}', to: '\u{b91}', mapping: Disallowed }, ++ Range { from: '\u{b92}', to: '\u{b95}', mapping: Valid }, ++ Range { from: '\u{b96}', to: '\u{b98}', mapping: Disallowed }, ++ Range { from: '\u{b99}', to: '\u{b9a}', mapping: Valid }, ++ Range { from: '\u{b9b}', to: '\u{b9b}', mapping: Disallowed }, ++ Range { from: '\u{b9c}', to: '\u{b9c}', mapping: Valid }, ++ Range { from: '\u{b9d}', to: '\u{b9d}', mapping: Disallowed }, ++ Range { from: '\u{b9e}', to: '\u{b9f}', mapping: Valid }, ++ Range { from: '\u{ba0}', to: '\u{ba2}', mapping: Disallowed }, ++ Range { from: '\u{ba3}', to: '\u{ba4}', mapping: Valid }, ++ Range { from: '\u{ba5}', to: '\u{ba7}', mapping: Disallowed }, ++ Range { from: '\u{ba8}', to: '\u{baa}', mapping: Valid }, ++ Range { from: '\u{bab}', to: '\u{bad}', mapping: Disallowed }, ++ Range { from: '\u{bae}', to: '\u{bb9}', mapping: Valid }, ++ Range { from: '\u{bba}', to: '\u{bbd}', mapping: Disallowed }, ++ Range { from: '\u{bbe}', to: '\u{bc2}', mapping: Valid }, ++ Range { from: '\u{bc3}', to: '\u{bc5}', mapping: Disallowed }, ++ Range { from: '\u{bc6}', to: '\u{bc8}', mapping: Valid }, ++ Range { from: '\u{bc9}', to: '\u{bc9}', mapping: Disallowed }, ++ Range { from: '\u{bca}', to: '\u{bcd}', mapping: Valid }, ++ Range { from: '\u{bce}', to: '\u{bcf}', mapping: Disallowed }, ++ Range { from: '\u{bd0}', to: '\u{bd0}', mapping: Valid }, ++ Range { from: '\u{bd1}', to: '\u{bd6}', mapping: Disallowed }, ++ Range { from: '\u{bd7}', to: '\u{bd7}', mapping: Valid }, ++ Range { from: '\u{bd8}', to: '\u{be5}', mapping: Disallowed }, ++ Range { from: '\u{be6}', to: '\u{bfa}', mapping: Valid }, ++ Range { from: '\u{bfb}', to: '\u{bff}', mapping: Disallowed }, ++ Range { from: '\u{c00}', to: '\u{c03}', mapping: Valid }, ++ Range { from: '\u{c04}', to: '\u{c04}', mapping: Disallowed }, ++ Range { from: '\u{c05}', to: '\u{c0c}', mapping: Valid }, ++ Range { from: '\u{c0d}', to: '\u{c0d}', mapping: Disallowed }, ++ Range { from: '\u{c0e}', to: '\u{c10}', mapping: Valid }, ++ Range { from: '\u{c11}', to: '\u{c11}', mapping: Disallowed }, ++ Range { from: '\u{c12}', to: '\u{c28}', mapping: Valid }, ++ Range { from: '\u{c29}', to: '\u{c29}', mapping: Disallowed }, ++ Range { from: '\u{c2a}', to: '\u{c39}', mapping: Valid }, ++ Range { from: '\u{c3a}', to: '\u{c3c}', mapping: Disallowed }, ++ Range { from: '\u{c3d}', to: '\u{c44}', mapping: Valid }, ++ Range { from: '\u{c45}', to: '\u{c45}', mapping: Disallowed }, ++ Range { from: '\u{c46}', to: '\u{c48}', mapping: Valid }, ++ Range { from: '\u{c49}', to: '\u{c49}', mapping: Disallowed }, ++ Range { from: '\u{c4a}', to: '\u{c4d}', mapping: Valid }, ++ Range { from: '\u{c4e}', to: '\u{c54}', mapping: Disallowed }, ++ Range { from: '\u{c55}', to: '\u{c56}', mapping: Valid }, ++ Range { from: '\u{c57}', to: '\u{c57}', mapping: Disallowed }, ++ Range { from: '\u{c58}', to: '\u{c5a}', mapping: Valid }, ++ Range { from: '\u{c5b}', to: '\u{c5f}', mapping: Disallowed }, ++ Range { from: '\u{c60}', to: '\u{c63}', mapping: Valid }, ++ Range { from: '\u{c64}', to: '\u{c65}', mapping: Disallowed }, ++ Range { from: '\u{c66}', to: '\u{c6f}', mapping: Valid }, ++ Range { from: '\u{c70}', to: '\u{c77}', mapping: Disallowed }, ++ Range { from: '\u{c78}', to: '\u{c83}', mapping: Valid }, ++ Range { from: '\u{c84}', to: '\u{c84}', mapping: Disallowed }, ++ Range { from: '\u{c85}', to: '\u{c8c}', mapping: Valid }, ++ Range { from: '\u{c8d}', to: '\u{c8d}', mapping: Disallowed }, ++ Range { from: '\u{c8e}', to: '\u{c90}', mapping: Valid }, ++ Range { from: '\u{c91}', to: '\u{c91}', mapping: Disallowed }, ++ Range { from: '\u{c92}', to: '\u{ca8}', mapping: Valid }, ++ Range { from: '\u{ca9}', to: '\u{ca9}', mapping: Disallowed }, ++ Range { from: '\u{caa}', to: '\u{cb3}', mapping: Valid }, ++ Range { from: '\u{cb4}', to: '\u{cb4}', mapping: Disallowed }, ++ Range { from: '\u{cb5}', to: '\u{cb9}', mapping: Valid }, ++ Range { from: '\u{cba}', to: '\u{cbb}', mapping: Disallowed }, ++ Range { from: '\u{cbc}', to: '\u{cc4}', mapping: Valid }, ++ Range { from: '\u{cc5}', to: '\u{cc5}', mapping: Disallowed }, ++ Range { from: '\u{cc6}', to: '\u{cc8}', mapping: Valid }, ++ Range { from: '\u{cc9}', to: '\u{cc9}', mapping: Disallowed }, ++ Range { from: '\u{cca}', to: '\u{ccd}', mapping: Valid }, ++ Range { from: '\u{cce}', to: '\u{cd4}', mapping: Disallowed }, ++ Range { from: '\u{cd5}', to: '\u{cd6}', mapping: Valid }, ++ Range { from: '\u{cd7}', to: '\u{cdd}', mapping: Disallowed }, ++ Range { from: '\u{cde}', to: '\u{cde}', mapping: Valid }, ++ Range { from: '\u{cdf}', to: '\u{cdf}', mapping: Disallowed }, ++ Range { from: '\u{ce0}', to: '\u{ce3}', mapping: Valid }, ++ Range { from: '\u{ce4}', to: '\u{ce5}', mapping: Disallowed }, ++ Range { from: '\u{ce6}', to: '\u{cef}', mapping: Valid }, ++ Range { from: '\u{cf0}', to: '\u{cf0}', mapping: Disallowed }, ++ Range { from: '\u{cf1}', to: '\u{cf2}', mapping: Valid }, ++ Range { from: '\u{cf3}', to: '\u{cff}', mapping: Disallowed }, ++ Range { from: '\u{d00}', to: '\u{d03}', mapping: Valid }, ++ Range { from: '\u{d04}', to: '\u{d04}', mapping: Disallowed }, ++ Range { from: '\u{d05}', to: '\u{d0c}', mapping: Valid }, ++ Range { from: '\u{d0d}', to: '\u{d0d}', mapping: Disallowed }, ++ Range { from: '\u{d0e}', to: '\u{d10}', mapping: Valid }, ++ Range { from: '\u{d11}', to: '\u{d11}', mapping: Disallowed }, ++ Range { from: '\u{d12}', to: '\u{d44}', mapping: Valid }, ++ Range { from: '\u{d45}', to: '\u{d45}', mapping: Disallowed }, ++ Range { from: '\u{d46}', to: '\u{d48}', mapping: Valid }, ++ Range { from: '\u{d49}', to: '\u{d49}', mapping: Disallowed }, ++ Range { from: '\u{d4a}', to: '\u{d4f}', mapping: Valid }, ++ Range { from: '\u{d50}', to: '\u{d53}', mapping: Disallowed }, ++ Range { from: '\u{d54}', to: '\u{d63}', mapping: Valid }, ++ Range { from: '\u{d64}', to: '\u{d65}', mapping: Disallowed }, ++ Range { from: '\u{d66}', to: '\u{d7f}', mapping: Valid }, ++ Range { from: '\u{d80}', to: '\u{d81}', mapping: Disallowed }, ++ Range { from: '\u{d82}', to: '\u{d83}', mapping: Valid }, ++ Range { from: '\u{d84}', to: '\u{d84}', mapping: Disallowed }, ++ Range { from: '\u{d85}', to: '\u{d96}', mapping: Valid }, ++ Range { from: '\u{d97}', to: '\u{d99}', mapping: Disallowed }, ++ Range { from: '\u{d9a}', to: '\u{db1}', mapping: Valid }, ++ Range { from: '\u{db2}', to: '\u{db2}', mapping: Disallowed }, ++ Range { from: '\u{db3}', to: '\u{dbb}', mapping: Valid }, ++ Range { from: '\u{dbc}', to: '\u{dbc}', mapping: Disallowed }, ++ Range { from: '\u{dbd}', to: '\u{dbd}', mapping: Valid }, ++ Range { from: '\u{dbe}', to: '\u{dbf}', mapping: Disallowed }, ++ Range { from: '\u{dc0}', to: '\u{dc6}', mapping: Valid }, ++ Range { from: '\u{dc7}', to: '\u{dc9}', mapping: Disallowed }, ++ Range { from: '\u{dca}', to: '\u{dca}', mapping: Valid }, ++ Range { from: '\u{dcb}', to: '\u{dce}', mapping: Disallowed }, ++ Range { from: '\u{dcf}', to: '\u{dd4}', mapping: Valid }, ++ Range { from: '\u{dd5}', to: '\u{dd5}', mapping: Disallowed }, ++ Range { from: '\u{dd6}', to: '\u{dd6}', mapping: Valid }, ++ Range { from: '\u{dd7}', to: '\u{dd7}', mapping: Disallowed }, ++ Range { from: '\u{dd8}', to: '\u{ddf}', mapping: Valid }, ++ Range { from: '\u{de0}', to: '\u{de5}', mapping: Disallowed }, ++ Range { from: '\u{de6}', to: '\u{def}', mapping: Valid }, ++ Range { from: '\u{df0}', to: '\u{df1}', mapping: Disallowed }, ++ Range { from: '\u{df2}', to: '\u{df4}', mapping: Valid }, ++ Range { from: '\u{df5}', to: '\u{e00}', mapping: Disallowed }, ++ Range { from: '\u{e01}', to: '\u{e32}', mapping: Valid }, ++ Range { from: '\u{e33}', to: '\u{e33}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{e34}', to: '\u{e3a}', mapping: Valid }, ++ Range { from: '\u{e3b}', to: '\u{e3e}', mapping: Disallowed }, ++ Range { from: '\u{e3f}', to: '\u{e5b}', mapping: Valid }, ++ Range { from: '\u{e5c}', to: '\u{e80}', mapping: Disallowed }, ++ Range { from: '\u{e81}', to: '\u{e82}', mapping: Valid }, ++ Range { from: '\u{e83}', to: '\u{e83}', mapping: Disallowed }, ++ Range { from: '\u{e84}', to: '\u{e84}', mapping: Valid }, ++ Range { from: '\u{e85}', to: '\u{e86}', mapping: Disallowed }, ++ Range { from: '\u{e87}', to: '\u{e88}', mapping: Valid }, ++ Range { from: '\u{e89}', to: '\u{e89}', mapping: Disallowed }, ++ Range { from: '\u{e8a}', to: '\u{e8a}', mapping: Valid }, ++ Range { from: '\u{e8b}', to: '\u{e8c}', mapping: Disallowed }, ++ Range { from: '\u{e8d}', to: '\u{e8d}', mapping: Valid }, ++ Range { from: '\u{e8e}', to: '\u{e93}', mapping: Disallowed }, ++ Range { from: '\u{e94}', to: '\u{e97}', mapping: Valid }, ++ Range { from: '\u{e98}', to: '\u{e98}', mapping: Disallowed }, ++ Range { from: '\u{e99}', to: '\u{e9f}', mapping: Valid }, ++ Range { from: '\u{ea0}', to: '\u{ea0}', mapping: Disallowed }, ++ Range { from: '\u{ea1}', to: '\u{ea3}', mapping: Valid }, ++ Range { from: '\u{ea4}', to: '\u{ea4}', mapping: Disallowed }, ++ Range { from: '\u{ea5}', to: '\u{ea5}', mapping: Valid }, ++ Range { from: '\u{ea6}', to: '\u{ea6}', mapping: Disallowed }, ++ Range { from: '\u{ea7}', to: '\u{ea7}', mapping: Valid }, ++ Range { from: '\u{ea8}', to: '\u{ea9}', mapping: Disallowed }, ++ Range { from: '\u{eaa}', to: '\u{eab}', mapping: Valid }, ++ Range { from: '\u{eac}', to: '\u{eac}', mapping: Disallowed }, ++ Range { from: '\u{ead}', to: '\u{eb2}', mapping: Valid }, ++ Range { from: '\u{eb3}', to: '\u{eb3}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{eb4}', to: '\u{eb9}', mapping: Valid }, ++ Range { from: '\u{eba}', to: '\u{eba}', mapping: Disallowed }, ++ Range { from: '\u{ebb}', to: '\u{ebd}', mapping: Valid }, ++ Range { from: '\u{ebe}', to: '\u{ebf}', mapping: Disallowed }, ++ Range { from: '\u{ec0}', to: '\u{ec4}', mapping: Valid }, ++ Range { from: '\u{ec5}', to: '\u{ec5}', mapping: Disallowed }, ++ Range { from: '\u{ec6}', to: '\u{ec6}', mapping: Valid }, ++ Range { from: '\u{ec7}', to: '\u{ec7}', mapping: Disallowed }, ++ Range { from: '\u{ec8}', to: '\u{ecd}', mapping: Valid }, ++ Range { from: '\u{ece}', to: '\u{ecf}', mapping: Disallowed }, ++ Range { from: '\u{ed0}', to: '\u{ed9}', mapping: Valid }, ++ Range { from: '\u{eda}', to: '\u{edb}', mapping: Disallowed }, ++ Range { from: '\u{edc}', to: '\u{edc}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{edd}', to: '\u{edd}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{ede}', to: '\u{edf}', mapping: Valid }, ++ Range { from: '\u{ee0}', to: '\u{eff}', mapping: Disallowed }, ++ Range { from: '\u{f00}', to: '\u{f0b}', mapping: Valid }, ++ Range { from: '\u{f0c}', to: '\u{f0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 4, byte_len: 3 }) }, ++ Range { from: '\u{f0d}', to: '\u{f42}', mapping: Valid }, ++ Range { from: '\u{f43}', to: '\u{f43}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{f44}', to: '\u{f47}', mapping: Valid }, ++ Range { from: '\u{f48}', to: '\u{f48}', mapping: Disallowed }, ++ Range { from: '\u{f49}', to: '\u{f4c}', mapping: Valid }, ++ Range { from: '\u{f4d}', to: '\u{f4d}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{f4e}', to: '\u{f51}', mapping: Valid }, ++ Range { from: '\u{f52}', to: '\u{f52}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{f53}', to: '\u{f56}', mapping: Valid }, ++ Range { from: '\u{f57}', to: '\u{f57}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{f58}', to: '\u{f5b}', mapping: Valid }, ++ Range { from: '\u{f5c}', to: '\u{f5c}', mapping: Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{f5d}', to: '\u{f68}', mapping: Valid }, ++ Range { from: '\u{f69}', to: '\u{f69}', mapping: Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{f6a}', to: '\u{f6c}', mapping: Valid }, ++ Range { from: '\u{f6d}', to: '\u{f70}', mapping: Disallowed }, ++ Range { from: '\u{f71}', to: '\u{f72}', mapping: Valid }, ++ Range { from: '\u{f73}', to: '\u{f73}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{f74}', to: '\u{f74}', mapping: Valid }, ++ Range { from: '\u{f75}', to: '\u{f75}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{f76}', to: '\u{f76}', mapping: Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{f77}', to: '\u{f77}', mapping: Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 4, byte_len: 9 }) }, ++ Range { from: '\u{f78}', to: '\u{f78}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{f79}', to: '\u{f79}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 4, byte_len: 9 }) }, ++ Range { from: '\u{f7a}', to: '\u{f80}', mapping: Valid }, ++ Range { from: '\u{f81}', to: '\u{f81}', mapping: Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{f82}', to: '\u{f92}', mapping: Valid }, ++ Range { from: '\u{f93}', to: '\u{f93}', mapping: Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{f94}', to: '\u{f97}', mapping: Valid }, ++ Range { from: '\u{f98}', to: '\u{f98}', mapping: Disallowed }, ++ Range { from: '\u{f99}', to: '\u{f9c}', mapping: Valid }, ++ Range { from: '\u{f9d}', to: '\u{f9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{f9e}', to: '\u{fa1}', mapping: Valid }, ++ Range { from: '\u{fa2}', to: '\u{fa2}', mapping: Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{fa3}', to: '\u{fa6}', mapping: Valid }, ++ Range { from: '\u{fa7}', to: '\u{fa7}', mapping: Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{fa8}', to: '\u{fab}', mapping: Valid }, ++ Range { from: '\u{fac}', to: '\u{fac}', mapping: Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{fad}', to: '\u{fb8}', mapping: Valid }, ++ Range { from: '\u{fb9}', to: '\u{fb9}', mapping: Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 4, byte_len: 6 }) }, ++ Range { from: '\u{fba}', to: '\u{fbc}', mapping: Valid }, ++ Range { from: '\u{fbd}', to: '\u{fbd}', mapping: Disallowed }, ++ Range { from: '\u{fbe}', to: '\u{fcc}', mapping: Valid }, ++ Range { from: '\u{fcd}', to: '\u{fcd}', mapping: Disallowed }, ++ Range { from: '\u{fce}', to: '\u{fda}', mapping: Valid }, ++ Range { from: '\u{fdb}', to: '\u{fff}', mapping: Disallowed }, ++ Range { from: '\u{1000}', to: '\u{109f}', mapping: Valid }, ++ Range { from: '\u{10a0}', to: '\u{10c6}', mapping: Disallowed }, ++ Range { from: '\u{10c7}', to: '\u{10c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 4, byte_len: 3 }) }, ++ Range { from: '\u{10c8}', to: '\u{10cc}', mapping: Disallowed }, ++ Range { from: '\u{10cd}', to: '\u{10cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 4, byte_len: 3 }) }, ++ Range { from: '\u{10ce}', to: '\u{10cf}', mapping: Disallowed }, ++ Range { from: '\u{10d0}', to: '\u{10fb}', mapping: Valid }, ++ Range { from: '\u{10fc}', to: '\u{10fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 4, byte_len: 3 }) }, ++ Range { from: '\u{10fd}', to: '\u{115e}', mapping: Valid }, ++ Range { from: '\u{115f}', to: '\u{1160}', mapping: Disallowed }, ++ Range { from: '\u{1161}', to: '\u{1248}', mapping: Valid }, ++ Range { from: '\u{1249}', to: '\u{1249}', mapping: Disallowed }, ++ Range { from: '\u{124a}', to: '\u{124d}', mapping: Valid }, ++ Range { from: '\u{124e}', to: '\u{124f}', mapping: Disallowed }, ++ Range { from: '\u{1250}', to: '\u{1256}', mapping: Valid }, ++ Range { from: '\u{1257}', to: '\u{1257}', mapping: Disallowed }, ++ Range { from: '\u{1258}', to: '\u{1258}', mapping: Valid }, ++ Range { from: '\u{1259}', to: '\u{1259}', mapping: Disallowed }, ++ Range { from: '\u{125a}', to: '\u{125d}', mapping: Valid }, ++ Range { from: '\u{125e}', to: '\u{125f}', mapping: Disallowed }, ++ Range { from: '\u{1260}', to: '\u{1288}', mapping: Valid }, ++ Range { from: '\u{1289}', to: '\u{1289}', mapping: Disallowed }, ++ Range { from: '\u{128a}', to: '\u{128d}', mapping: Valid }, ++ Range { from: '\u{128e}', to: '\u{128f}', mapping: Disallowed }, ++ Range { from: '\u{1290}', to: '\u{12b0}', mapping: Valid }, ++ Range { from: '\u{12b1}', to: '\u{12b1}', mapping: Disallowed }, ++ Range { from: '\u{12b2}', to: '\u{12b5}', mapping: Valid }, ++ Range { from: '\u{12b6}', to: '\u{12b7}', mapping: Disallowed }, ++ Range { from: '\u{12b8}', to: '\u{12be}', mapping: Valid }, ++ Range { from: '\u{12bf}', to: '\u{12bf}', mapping: Disallowed }, ++ Range { from: '\u{12c0}', to: '\u{12c0}', mapping: Valid }, ++ Range { from: '\u{12c1}', to: '\u{12c1}', mapping: Disallowed }, ++ Range { from: '\u{12c2}', to: '\u{12c5}', mapping: Valid }, ++ Range { from: '\u{12c6}', to: '\u{12c7}', mapping: Disallowed }, ++ Range { from: '\u{12c8}', to: '\u{12d6}', mapping: Valid }, ++ Range { from: '\u{12d7}', to: '\u{12d7}', mapping: Disallowed }, ++ Range { from: '\u{12d8}', to: '\u{1310}', mapping: Valid }, ++ Range { from: '\u{1311}', to: '\u{1311}', mapping: Disallowed }, ++ Range { from: '\u{1312}', to: '\u{1315}', mapping: Valid }, ++ Range { from: '\u{1316}', to: '\u{1317}', mapping: Disallowed }, ++ Range { from: '\u{1318}', to: '\u{135a}', mapping: Valid }, ++ Range { from: '\u{135b}', to: '\u{135c}', mapping: Disallowed }, ++ Range { from: '\u{135d}', to: '\u{137c}', mapping: Valid }, ++ Range { from: '\u{137d}', to: '\u{137f}', mapping: Disallowed }, ++ Range { from: '\u{1380}', to: '\u{1399}', mapping: Valid }, ++ Range { from: '\u{139a}', to: '\u{139f}', mapping: Disallowed }, ++ Range { from: '\u{13a0}', to: '\u{13f5}', mapping: Valid }, ++ Range { from: '\u{13f6}', to: '\u{13f7}', mapping: Disallowed }, ++ Range { from: '\u{13f8}', to: '\u{13f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{13f9}', to: '\u{13f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{13fa}', to: '\u{13fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{13fb}', to: '\u{13fb}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{13fc}', to: '\u{13fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{13fd}', to: '\u{13fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{13fe}', to: '\u{13ff}', mapping: Disallowed }, ++ Range { from: '\u{1400}', to: '\u{167f}', mapping: Valid }, ++ Range { from: '\u{1680}', to: '\u{1680}', mapping: Disallowed }, ++ Range { from: '\u{1681}', to: '\u{169c}', mapping: Valid }, ++ Range { from: '\u{169d}', to: '\u{169f}', mapping: Disallowed }, ++ Range { from: '\u{16a0}', to: '\u{16f8}', mapping: Valid }, ++ Range { from: '\u{16f9}', to: '\u{16ff}', mapping: Disallowed }, ++ Range { from: '\u{1700}', to: '\u{170c}', mapping: Valid }, ++ Range { from: '\u{170d}', to: '\u{170d}', mapping: Disallowed }, ++ Range { from: '\u{170e}', to: '\u{1714}', mapping: Valid }, ++ Range { from: '\u{1715}', to: '\u{171f}', mapping: Disallowed }, ++ Range { from: '\u{1720}', to: '\u{1736}', mapping: Valid }, ++ Range { from: '\u{1737}', to: '\u{173f}', mapping: Disallowed }, ++ Range { from: '\u{1740}', to: '\u{1753}', mapping: Valid }, ++ Range { from: '\u{1754}', to: '\u{175f}', mapping: Disallowed }, ++ Range { from: '\u{1760}', to: '\u{176c}', mapping: Valid }, ++ Range { from: '\u{176d}', to: '\u{176d}', mapping: Disallowed }, ++ Range { from: '\u{176e}', to: '\u{1770}', mapping: Valid }, ++ Range { from: '\u{1771}', to: '\u{1771}', mapping: Disallowed }, ++ Range { from: '\u{1772}', to: '\u{1773}', mapping: Valid }, ++ Range { from: '\u{1774}', to: '\u{177f}', mapping: Disallowed }, ++ Range { from: '\u{1780}', to: '\u{17b3}', mapping: Valid }, ++ Range { from: '\u{17b4}', to: '\u{17b5}', mapping: Disallowed }, ++ Range { from: '\u{17b6}', to: '\u{17dd}', mapping: Valid }, ++ Range { from: '\u{17de}', to: '\u{17df}', mapping: Disallowed }, ++ Range { from: '\u{17e0}', to: '\u{17e9}', mapping: Valid }, ++ Range { from: '\u{17ea}', to: '\u{17ef}', mapping: Disallowed }, ++ Range { from: '\u{17f0}', to: '\u{17f9}', mapping: Valid }, ++ Range { from: '\u{17fa}', to: '\u{17ff}', mapping: Disallowed }, ++ Range { from: '\u{1800}', to: '\u{1805}', mapping: Valid }, ++ Range { from: '\u{1806}', to: '\u{1806}', mapping: Disallowed }, ++ Range { from: '\u{1807}', to: '\u{180a}', mapping: Valid }, ++ Range { from: '\u{180b}', to: '\u{180d}', mapping: Ignored }, ++ Range { from: '\u{180e}', to: '\u{180f}', mapping: Disallowed }, ++ Range { from: '\u{1810}', to: '\u{1819}', mapping: Valid }, ++ Range { from: '\u{181a}', to: '\u{181f}', mapping: Disallowed }, ++ Range { from: '\u{1820}', to: '\u{1877}', mapping: Valid }, ++ Range { from: '\u{1878}', to: '\u{187f}', mapping: Disallowed }, ++ Range { from: '\u{1880}', to: '\u{18aa}', mapping: Valid }, ++ Range { from: '\u{18ab}', to: '\u{18af}', mapping: Disallowed }, ++ Range { from: '\u{18b0}', to: '\u{18f5}', mapping: Valid }, ++ Range { from: '\u{18f6}', to: '\u{18ff}', mapping: Disallowed }, ++ Range { from: '\u{1900}', to: '\u{191e}', mapping: Valid }, ++ Range { from: '\u{191f}', to: '\u{191f}', mapping: Disallowed }, ++ Range { from: '\u{1920}', to: '\u{192b}', mapping: Valid }, ++ Range { from: '\u{192c}', to: '\u{192f}', mapping: Disallowed }, ++ Range { from: '\u{1930}', to: '\u{193b}', mapping: Valid }, ++ Range { from: '\u{193c}', to: '\u{193f}', mapping: Disallowed }, ++ Range { from: '\u{1940}', to: '\u{1940}', mapping: Valid }, ++ Range { from: '\u{1941}', to: '\u{1943}', mapping: Disallowed }, ++ Range { from: '\u{1944}', to: '\u{196d}', mapping: Valid }, ++ Range { from: '\u{196e}', to: '\u{196f}', mapping: Disallowed }, ++ Range { from: '\u{1970}', to: '\u{1974}', mapping: Valid }, ++ Range { from: '\u{1975}', to: '\u{197f}', mapping: Disallowed }, ++ Range { from: '\u{1980}', to: '\u{19ab}', mapping: Valid }, ++ Range { from: '\u{19ac}', to: '\u{19af}', mapping: Disallowed }, ++ Range { from: '\u{19b0}', to: '\u{19c9}', mapping: Valid }, ++ Range { from: '\u{19ca}', to: '\u{19cf}', mapping: Disallowed }, ++ Range { from: '\u{19d0}', to: '\u{19da}', mapping: Valid }, ++ Range { from: '\u{19db}', to: '\u{19dd}', mapping: Disallowed }, ++ Range { from: '\u{19de}', to: '\u{1a1b}', mapping: Valid }, ++ Range { from: '\u{1a1c}', to: '\u{1a1d}', mapping: Disallowed }, ++ Range { from: '\u{1a1e}', to: '\u{1a5e}', mapping: Valid }, ++ Range { from: '\u{1a5f}', to: '\u{1a5f}', mapping: Disallowed }, ++ Range { from: '\u{1a60}', to: '\u{1a7c}', mapping: Valid }, ++ Range { from: '\u{1a7d}', to: '\u{1a7e}', mapping: Disallowed }, ++ Range { from: '\u{1a7f}', to: '\u{1a89}', mapping: Valid }, ++ Range { from: '\u{1a8a}', to: '\u{1a8f}', mapping: Disallowed }, ++ Range { from: '\u{1a90}', to: '\u{1a99}', mapping: Valid }, ++ Range { from: '\u{1a9a}', to: '\u{1a9f}', mapping: Disallowed }, ++ Range { from: '\u{1aa0}', to: '\u{1aad}', mapping: Valid }, ++ Range { from: '\u{1aae}', to: '\u{1aaf}', mapping: Disallowed }, ++ Range { from: '\u{1ab0}', to: '\u{1abe}', mapping: Valid }, ++ Range { from: '\u{1abf}', to: '\u{1aff}', mapping: Disallowed }, ++ Range { from: '\u{1b00}', to: '\u{1b4b}', mapping: Valid }, ++ Range { from: '\u{1b4c}', to: '\u{1b4f}', mapping: Disallowed }, ++ Range { from: '\u{1b50}', to: '\u{1b7c}', mapping: Valid }, ++ Range { from: '\u{1b7d}', to: '\u{1b7f}', mapping: Disallowed }, ++ Range { from: '\u{1b80}', to: '\u{1bf3}', mapping: Valid }, ++ Range { from: '\u{1bf4}', to: '\u{1bfb}', mapping: Disallowed }, ++ Range { from: '\u{1bfc}', to: '\u{1c37}', mapping: Valid }, ++ Range { from: '\u{1c38}', to: '\u{1c3a}', mapping: Disallowed }, ++ Range { from: '\u{1c3b}', to: '\u{1c49}', mapping: Valid }, ++ Range { from: '\u{1c4a}', to: '\u{1c4c}', mapping: Disallowed }, ++ Range { from: '\u{1c4d}', to: '\u{1c7f}', mapping: Valid }, ++ Range { from: '\u{1c80}', to: '\u{1c80}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1c81}', to: '\u{1c81}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1c82}', to: '\u{1c82}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1c83}', to: '\u{1c83}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1c84}', to: '\u{1c85}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1c86}', to: '\u{1c86}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1c87}', to: '\u{1c87}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1c88}', to: '\u{1c88}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1c89}', to: '\u{1cbf}', mapping: Disallowed }, ++ Range { from: '\u{1cc0}', to: '\u{1cc7}', mapping: Valid }, ++ Range { from: '\u{1cc8}', to: '\u{1ccf}', mapping: Disallowed }, ++ Range { from: '\u{1cd0}', to: '\u{1cf9}', mapping: Valid }, ++ Range { from: '\u{1cfa}', to: '\u{1cff}', mapping: Disallowed }, ++ Range { from: '\u{1d00}', to: '\u{1d2b}', mapping: Valid }, ++ Range { from: '\u{1d2c}', to: '\u{1d2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d2d}', to: '\u{1d2d}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{1d2e}', to: '\u{1d2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d2f}', to: '\u{1d2f}', mapping: Valid }, ++ Range { from: '\u{1d30}', to: '\u{1d30}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d31}', to: '\u{1d31}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d32}', to: '\u{1d32}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d33}', to: '\u{1d33}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d34}', to: '\u{1d34}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d35}', to: '\u{1d35}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d36}', to: '\u{1d36}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d37}', to: '\u{1d37}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d38}', to: '\u{1d38}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d39}', to: '\u{1d39}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d3a}', to: '\u{1d3a}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d3b}', to: '\u{1d3b}', mapping: Valid }, ++ Range { from: '\u{1d3c}', to: '\u{1d3c}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d3d}', to: '\u{1d3d}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d3e}', to: '\u{1d3e}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d3f}', to: '\u{1d3f}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d40}', to: '\u{1d40}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d41}', to: '\u{1d41}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d42}', to: '\u{1d42}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d43}', to: '\u{1d43}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d44}', to: '\u{1d44}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1d45}', to: '\u{1d45}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1d46}', to: '\u{1d46}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1d47}', to: '\u{1d47}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d48}', to: '\u{1d48}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d49}', to: '\u{1d49}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4a}', to: '\u{1d4a}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d4b}', to: '\u{1d4b}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d4c}', to: '\u{1d4c}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1d4d}', to: '\u{1d4d}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4e}', to: '\u{1d4e}', mapping: Valid }, ++ Range { from: '\u{1d4f}', to: '\u{1d4f}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d50}', to: '\u{1d50}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d51}', to: '\u{1d51}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{1d52}', to: '\u{1d52}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d53}', to: '\u{1d53}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d54}', to: '\u{1d54}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1d55}', to: '\u{1d55}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1d56}', to: '\u{1d56}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d57}', to: '\u{1d57}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d58}', to: '\u{1d58}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d59}', to: '\u{1d59}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1d5a}', to: '\u{1d5a}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d5b}', to: '\u{1d5b}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5c}', to: '\u{1d5c}', mapping: Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1d5d}', to: '\u{1d5d}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d5e}', to: '\u{1d5e}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d5f}', to: '\u{1d5f}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d60}', to: '\u{1d60}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d61}', to: '\u{1d61}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d62}', to: '\u{1d62}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d63}', to: '\u{1d63}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d64}', to: '\u{1d64}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d65}', to: '\u{1d65}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d66}', to: '\u{1d66}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d67}', to: '\u{1d67}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d68}', to: '\u{1d68}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d69}', to: '\u{1d69}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6a}', to: '\u{1d6a}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6b}', to: '\u{1d77}', mapping: Valid }, ++ Range { from: '\u{1d78}', to: '\u{1d78}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d79}', to: '\u{1d9a}', mapping: Valid }, ++ Range { from: '\u{1d9b}', to: '\u{1d9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1d9c}', to: '\u{1d9c}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d9d}', to: '\u{1d9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1d9e}', to: '\u{1d9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{1d9f}', to: '\u{1d9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1da0}', to: '\u{1da0}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1da1}', to: '\u{1da1}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1da2}', to: '\u{1da2}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1da3}', to: '\u{1da3}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1da4}', to: '\u{1da4}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1da5}', to: '\u{1da5}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1da6}', to: '\u{1da6}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1da7}', to: '\u{1da7}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1da8}', to: '\u{1da8}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1da9}', to: '\u{1da9}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1daa}', to: '\u{1daa}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1dab}', to: '\u{1dab}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1dac}', to: '\u{1dac}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1dad}', to: '\u{1dad}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1dae}', to: '\u{1dae}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1daf}', to: '\u{1daf}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1db0}', to: '\u{1db0}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1db1}', to: '\u{1db1}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1db2}', to: '\u{1db2}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1db3}', to: '\u{1db3}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1db4}', to: '\u{1db4}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1db5}', to: '\u{1db5}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1db6}', to: '\u{1db6}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1db7}', to: '\u{1db7}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1db8}', to: '\u{1db8}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1db9}', to: '\u{1db9}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1dba}', to: '\u{1dba}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1dbb}', to: '\u{1dbb}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1dbc}', to: '\u{1dbc}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1dbd}', to: '\u{1dbd}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{1dbe}', to: '\u{1dbe}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1dbf}', to: '\u{1dbf}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1dc0}', to: '\u{1df9}', mapping: Valid }, ++ Range { from: '\u{1dfa}', to: '\u{1dfa}', mapping: Disallowed }, ++ Range { from: '\u{1dfb}', to: '\u{1dff}', mapping: Valid }, ++ Range { from: '\u{1e00}', to: '\u{1e00}', mapping: Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e01}', to: '\u{1e01}', mapping: Valid }, ++ Range { from: '\u{1e02}', to: '\u{1e02}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e03}', to: '\u{1e03}', mapping: Valid }, ++ Range { from: '\u{1e04}', to: '\u{1e04}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e05}', to: '\u{1e05}', mapping: Valid }, ++ Range { from: '\u{1e06}', to: '\u{1e06}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e07}', to: '\u{1e07}', mapping: Valid }, ++ Range { from: '\u{1e08}', to: '\u{1e08}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e09}', to: '\u{1e09}', mapping: Valid }, ++ Range { from: '\u{1e0a}', to: '\u{1e0a}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e0b}', to: '\u{1e0b}', mapping: Valid }, ++ Range { from: '\u{1e0c}', to: '\u{1e0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e0d}', to: '\u{1e0d}', mapping: Valid }, ++ Range { from: '\u{1e0e}', to: '\u{1e0e}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e0f}', to: '\u{1e0f}', mapping: Valid }, ++ Range { from: '\u{1e10}', to: '\u{1e10}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e11}', to: '\u{1e11}', mapping: Valid }, ++ Range { from: '\u{1e12}', to: '\u{1e12}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e13}', to: '\u{1e13}', mapping: Valid }, ++ Range { from: '\u{1e14}', to: '\u{1e14}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e15}', to: '\u{1e15}', mapping: Valid }, ++ Range { from: '\u{1e16}', to: '\u{1e16}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e17}', to: '\u{1e17}', mapping: Valid }, ++ Range { from: '\u{1e18}', to: '\u{1e18}', mapping: Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e19}', to: '\u{1e19}', mapping: Valid }, ++ Range { from: '\u{1e1a}', to: '\u{1e1a}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e1b}', to: '\u{1e1b}', mapping: Valid }, ++ Range { from: '\u{1e1c}', to: '\u{1e1c}', mapping: Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e1d}', to: '\u{1e1d}', mapping: Valid }, ++ Range { from: '\u{1e1e}', to: '\u{1e1e}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e1f}', to: '\u{1e1f}', mapping: Valid }, ++ Range { from: '\u{1e20}', to: '\u{1e20}', mapping: Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e21}', to: '\u{1e21}', mapping: Valid }, ++ Range { from: '\u{1e22}', to: '\u{1e22}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e23}', to: '\u{1e23}', mapping: Valid }, ++ Range { from: '\u{1e24}', to: '\u{1e24}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e25}', to: '\u{1e25}', mapping: Valid }, ++ Range { from: '\u{1e26}', to: '\u{1e26}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e27}', to: '\u{1e27}', mapping: Valid }, ++ Range { from: '\u{1e28}', to: '\u{1e28}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e29}', to: '\u{1e29}', mapping: Valid }, ++ Range { from: '\u{1e2a}', to: '\u{1e2a}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e2b}', to: '\u{1e2b}', mapping: Valid }, ++ Range { from: '\u{1e2c}', to: '\u{1e2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e2d}', to: '\u{1e2d}', mapping: Valid }, ++ Range { from: '\u{1e2e}', to: '\u{1e2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e2f}', to: '\u{1e2f}', mapping: Valid }, ++ Range { from: '\u{1e30}', to: '\u{1e30}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e31}', to: '\u{1e31}', mapping: Valid }, ++ Range { from: '\u{1e32}', to: '\u{1e32}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e33}', to: '\u{1e33}', mapping: Valid }, ++ Range { from: '\u{1e34}', to: '\u{1e34}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e35}', to: '\u{1e35}', mapping: Valid }, ++ Range { from: '\u{1e36}', to: '\u{1e36}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e37}', to: '\u{1e37}', mapping: Valid }, ++ Range { from: '\u{1e38}', to: '\u{1e38}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e39}', to: '\u{1e39}', mapping: Valid }, ++ Range { from: '\u{1e3a}', to: '\u{1e3a}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e3b}', to: '\u{1e3b}', mapping: Valid }, ++ Range { from: '\u{1e3c}', to: '\u{1e3c}', mapping: Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e3d}', to: '\u{1e3d}', mapping: Valid }, ++ Range { from: '\u{1e3e}', to: '\u{1e3e}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e3f}', to: '\u{1e3f}', mapping: Valid }, ++ Range { from: '\u{1e40}', to: '\u{1e40}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e41}', to: '\u{1e41}', mapping: Valid }, ++ Range { from: '\u{1e42}', to: '\u{1e42}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e43}', to: '\u{1e43}', mapping: Valid }, ++ Range { from: '\u{1e44}', to: '\u{1e44}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e45}', to: '\u{1e45}', mapping: Valid }, ++ Range { from: '\u{1e46}', to: '\u{1e46}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e47}', to: '\u{1e47}', mapping: Valid }, ++ Range { from: '\u{1e48}', to: '\u{1e48}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e49}', to: '\u{1e49}', mapping: Valid }, ++ Range { from: '\u{1e4a}', to: '\u{1e4a}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e4b}', to: '\u{1e4b}', mapping: Valid }, ++ Range { from: '\u{1e4c}', to: '\u{1e4c}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e4d}', to: '\u{1e4d}', mapping: Valid }, ++ Range { from: '\u{1e4e}', to: '\u{1e4e}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e4f}', to: '\u{1e4f}', mapping: Valid }, ++ Range { from: '\u{1e50}', to: '\u{1e50}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e51}', to: '\u{1e51}', mapping: Valid }, ++ Range { from: '\u{1e52}', to: '\u{1e52}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e53}', to: '\u{1e53}', mapping: Valid }, ++ Range { from: '\u{1e54}', to: '\u{1e54}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e55}', to: '\u{1e55}', mapping: Valid }, ++ Range { from: '\u{1e56}', to: '\u{1e56}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e57}', to: '\u{1e57}', mapping: Valid }, ++ Range { from: '\u{1e58}', to: '\u{1e58}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e59}', to: '\u{1e59}', mapping: Valid }, ++ Range { from: '\u{1e5a}', to: '\u{1e5a}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e5b}', to: '\u{1e5b}', mapping: Valid }, ++ Range { from: '\u{1e5c}', to: '\u{1e5c}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e5d}', to: '\u{1e5d}', mapping: Valid }, ++ Range { from: '\u{1e5e}', to: '\u{1e5e}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e5f}', to: '\u{1e5f}', mapping: Valid }, ++ Range { from: '\u{1e60}', to: '\u{1e60}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e61}', to: '\u{1e61}', mapping: Valid }, ++ Range { from: '\u{1e62}', to: '\u{1e62}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e63}', to: '\u{1e63}', mapping: Valid }, ++ Range { from: '\u{1e64}', to: '\u{1e64}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e65}', to: '\u{1e65}', mapping: Valid }, ++ Range { from: '\u{1e66}', to: '\u{1e66}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e67}', to: '\u{1e67}', mapping: Valid }, ++ Range { from: '\u{1e68}', to: '\u{1e68}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e69}', to: '\u{1e69}', mapping: Valid }, ++ Range { from: '\u{1e6a}', to: '\u{1e6a}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e6b}', to: '\u{1e6b}', mapping: Valid }, ++ Range { from: '\u{1e6c}', to: '\u{1e6c}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e6d}', to: '\u{1e6d}', mapping: Valid }, ++ Range { from: '\u{1e6e}', to: '\u{1e6e}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e6f}', to: '\u{1e6f}', mapping: Valid }, ++ Range { from: '\u{1e70}', to: '\u{1e70}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e71}', to: '\u{1e71}', mapping: Valid }, ++ Range { from: '\u{1e72}', to: '\u{1e72}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1e73}', to: '\u{1e73}', mapping: Valid }, ++ Range { from: '\u{1e74}', to: '\u{1e74}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1e75}', to: '\u{1e75}', mapping: Valid }, ++ Range { from: '\u{1e76}', to: '\u{1e76}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1e77}', to: '\u{1e77}', mapping: Valid }, ++ Range { from: '\u{1e78}', to: '\u{1e78}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1e79}', to: '\u{1e79}', mapping: Valid }, ++ Range { from: '\u{1e7a}', to: '\u{1e7a}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1e7b}', to: '\u{1e7b}', mapping: Valid }, ++ Range { from: '\u{1e7c}', to: '\u{1e7c}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1e7d}', to: '\u{1e7d}', mapping: Valid }, ++ Range { from: '\u{1e7e}', to: '\u{1e7e}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1e7f}', to: '\u{1e7f}', mapping: Valid }, ++ Range { from: '\u{1e80}', to: '\u{1e80}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1e81}', to: '\u{1e81}', mapping: Valid }, ++ Range { from: '\u{1e82}', to: '\u{1e82}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1e83}', to: '\u{1e83}', mapping: Valid }, ++ Range { from: '\u{1e84}', to: '\u{1e84}', mapping: Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1e85}', to: '\u{1e85}', mapping: Valid }, ++ Range { from: '\u{1e86}', to: '\u{1e86}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1e87}', to: '\u{1e87}', mapping: Valid }, ++ Range { from: '\u{1e88}', to: '\u{1e88}', mapping: Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1e89}', to: '\u{1e89}', mapping: Valid }, ++ Range { from: '\u{1e8a}', to: '\u{1e8a}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1e8b}', to: '\u{1e8b}', mapping: Valid }, ++ Range { from: '\u{1e8c}', to: '\u{1e8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1e8d}', to: '\u{1e8d}', mapping: Valid }, ++ Range { from: '\u{1e8e}', to: '\u{1e8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1e8f}', to: '\u{1e8f}', mapping: Valid }, ++ Range { from: '\u{1e90}', to: '\u{1e90}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1e91}', to: '\u{1e91}', mapping: Valid }, ++ Range { from: '\u{1e92}', to: '\u{1e92}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1e93}', to: '\u{1e93}', mapping: Valid }, ++ Range { from: '\u{1e94}', to: '\u{1e94}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1e95}', to: '\u{1e99}', mapping: Valid }, ++ Range { from: '\u{1e9a}', to: '\u{1e9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1e9b}', to: '\u{1e9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{1e9c}', to: '\u{1e9d}', mapping: Valid }, ++ Range { from: '\u{1e9e}', to: '\u{1e9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{1e9f}', to: '\u{1e9f}', mapping: Valid }, ++ Range { from: '\u{1ea0}', to: '\u{1ea0}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ea1}', to: '\u{1ea1}', mapping: Valid }, ++ Range { from: '\u{1ea2}', to: '\u{1ea2}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ea3}', to: '\u{1ea3}', mapping: Valid }, ++ Range { from: '\u{1ea4}', to: '\u{1ea4}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ea5}', to: '\u{1ea5}', mapping: Valid }, ++ Range { from: '\u{1ea6}', to: '\u{1ea6}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ea7}', to: '\u{1ea7}', mapping: Valid }, ++ Range { from: '\u{1ea8}', to: '\u{1ea8}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ea9}', to: '\u{1ea9}', mapping: Valid }, ++ Range { from: '\u{1eaa}', to: '\u{1eaa}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1eab}', to: '\u{1eab}', mapping: Valid }, ++ Range { from: '\u{1eac}', to: '\u{1eac}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ead}', to: '\u{1ead}', mapping: Valid }, ++ Range { from: '\u{1eae}', to: '\u{1eae}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1eaf}', to: '\u{1eaf}', mapping: Valid }, ++ Range { from: '\u{1eb0}', to: '\u{1eb0}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1eb1}', to: '\u{1eb1}', mapping: Valid }, ++ Range { from: '\u{1eb2}', to: '\u{1eb2}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1eb3}', to: '\u{1eb3}', mapping: Valid }, ++ Range { from: '\u{1eb4}', to: '\u{1eb4}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1eb5}', to: '\u{1eb5}', mapping: Valid }, ++ Range { from: '\u{1eb6}', to: '\u{1eb6}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1eb7}', to: '\u{1eb7}', mapping: Valid }, ++ Range { from: '\u{1eb8}', to: '\u{1eb8}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1eb9}', to: '\u{1eb9}', mapping: Valid }, ++ Range { from: '\u{1eba}', to: '\u{1eba}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ebb}', to: '\u{1ebb}', mapping: Valid }, ++ Range { from: '\u{1ebc}', to: '\u{1ebc}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ebd}', to: '\u{1ebd}', mapping: Valid }, ++ Range { from: '\u{1ebe}', to: '\u{1ebe}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ebf}', to: '\u{1ebf}', mapping: Valid }, ++ Range { from: '\u{1ec0}', to: '\u{1ec0}', mapping: Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ec1}', to: '\u{1ec1}', mapping: Valid }, ++ Range { from: '\u{1ec2}', to: '\u{1ec2}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ec3}', to: '\u{1ec3}', mapping: Valid }, ++ Range { from: '\u{1ec4}', to: '\u{1ec4}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ec5}', to: '\u{1ec5}', mapping: Valid }, ++ Range { from: '\u{1ec6}', to: '\u{1ec6}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ec7}', to: '\u{1ec7}', mapping: Valid }, ++ Range { from: '\u{1ec8}', to: '\u{1ec8}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ec9}', to: '\u{1ec9}', mapping: Valid }, ++ Range { from: '\u{1eca}', to: '\u{1eca}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ecb}', to: '\u{1ecb}', mapping: Valid }, ++ Range { from: '\u{1ecc}', to: '\u{1ecc}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ecd}', to: '\u{1ecd}', mapping: Valid }, ++ Range { from: '\u{1ece}', to: '\u{1ece}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ecf}', to: '\u{1ecf}', mapping: Valid }, ++ Range { from: '\u{1ed0}', to: '\u{1ed0}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ed1}', to: '\u{1ed1}', mapping: Valid }, ++ Range { from: '\u{1ed2}', to: '\u{1ed2}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ed3}', to: '\u{1ed3}', mapping: Valid }, ++ Range { from: '\u{1ed4}', to: '\u{1ed4}', mapping: Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ed5}', to: '\u{1ed5}', mapping: Valid }, ++ Range { from: '\u{1ed6}', to: '\u{1ed6}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ed7}', to: '\u{1ed7}', mapping: Valid }, ++ Range { from: '\u{1ed8}', to: '\u{1ed8}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ed9}', to: '\u{1ed9}', mapping: Valid }, ++ Range { from: '\u{1eda}', to: '\u{1eda}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1edb}', to: '\u{1edb}', mapping: Valid }, ++ Range { from: '\u{1edc}', to: '\u{1edc}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1edd}', to: '\u{1edd}', mapping: Valid }, ++ Range { from: '\u{1ede}', to: '\u{1ede}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1edf}', to: '\u{1edf}', mapping: Valid }, ++ Range { from: '\u{1ee0}', to: '\u{1ee0}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ee1}', to: '\u{1ee1}', mapping: Valid }, ++ Range { from: '\u{1ee2}', to: '\u{1ee2}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ee3}', to: '\u{1ee3}', mapping: Valid }, ++ Range { from: '\u{1ee4}', to: '\u{1ee4}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ee5}', to: '\u{1ee5}', mapping: Valid }, ++ Range { from: '\u{1ee6}', to: '\u{1ee6}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ee7}', to: '\u{1ee7}', mapping: Valid }, ++ Range { from: '\u{1ee8}', to: '\u{1ee8}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ee9}', to: '\u{1ee9}', mapping: Valid }, ++ Range { from: '\u{1eea}', to: '\u{1eea}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1eeb}', to: '\u{1eeb}', mapping: Valid }, ++ Range { from: '\u{1eec}', to: '\u{1eec}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1eed}', to: '\u{1eed}', mapping: Valid }, ++ Range { from: '\u{1eee}', to: '\u{1eee}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1eef}', to: '\u{1eef}', mapping: Valid }, ++ Range { from: '\u{1ef0}', to: '\u{1ef0}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ef1}', to: '\u{1ef1}', mapping: Valid }, ++ Range { from: '\u{1ef2}', to: '\u{1ef2}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ef3}', to: '\u{1ef3}', mapping: Valid }, ++ Range { from: '\u{1ef4}', to: '\u{1ef4}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ef5}', to: '\u{1ef5}', mapping: Valid }, ++ Range { from: '\u{1ef6}', to: '\u{1ef6}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ef7}', to: '\u{1ef7}', mapping: Valid }, ++ Range { from: '\u{1ef8}', to: '\u{1ef8}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1ef9}', to: '\u{1ef9}', mapping: Valid }, ++ Range { from: '\u{1efa}', to: '\u{1efa}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1efb}', to: '\u{1efb}', mapping: Valid }, ++ Range { from: '\u{1efc}', to: '\u{1efc}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1efd}', to: '\u{1efd}', mapping: Valid }, ++ Range { from: '\u{1efe}', to: '\u{1efe}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1eff}', to: '\u{1f07}', mapping: Valid }, ++ Range { from: '\u{1f08}', to: '\u{1f08}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1f09}', to: '\u{1f09}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1f0a}', to: '\u{1f0a}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1f0b}', to: '\u{1f0b}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1f0c}', to: '\u{1f0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1f0d}', to: '\u{1f0d}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1f0e}', to: '\u{1f0e}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1f0f}', to: '\u{1f0f}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1f10}', to: '\u{1f15}', mapping: Valid }, ++ Range { from: '\u{1f16}', to: '\u{1f17}', mapping: Disallowed }, ++ Range { from: '\u{1f18}', to: '\u{1f18}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1f19}', to: '\u{1f19}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1f1a}', to: '\u{1f1a}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1f1b}', to: '\u{1f1b}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1f1c}', to: '\u{1f1c}', mapping: Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1f1d}', to: '\u{1f1d}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1f1e}', to: '\u{1f1f}', mapping: Disallowed }, ++ Range { from: '\u{1f20}', to: '\u{1f27}', mapping: Valid }, ++ Range { from: '\u{1f28}', to: '\u{1f28}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1f29}', to: '\u{1f29}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1f2a}', to: '\u{1f2a}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1f2b}', to: '\u{1f2b}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 6, byte_len: 3 }) }, ++ Range { from: '\u{1f2c}', to: '\u{1f2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f2d}', to: '\u{1f2d}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f2e}', to: '\u{1f2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f2f}', to: '\u{1f2f}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f30}', to: '\u{1f37}', mapping: Valid }, ++ Range { from: '\u{1f38}', to: '\u{1f38}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f39}', to: '\u{1f39}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f3a}', to: '\u{1f3a}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f3b}', to: '\u{1f3b}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f3c}', to: '\u{1f3c}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f3d}', to: '\u{1f3d}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f3e}', to: '\u{1f3e}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f3f}', to: '\u{1f3f}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f40}', to: '\u{1f45}', mapping: Valid }, ++ Range { from: '\u{1f46}', to: '\u{1f47}', mapping: Disallowed }, ++ Range { from: '\u{1f48}', to: '\u{1f48}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f49}', to: '\u{1f49}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f4a}', to: '\u{1f4a}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f4b}', to: '\u{1f4b}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f4c}', to: '\u{1f4c}', mapping: Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f4d}', to: '\u{1f4d}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f4e}', to: '\u{1f4f}', mapping: Disallowed }, ++ Range { from: '\u{1f50}', to: '\u{1f57}', mapping: Valid }, ++ Range { from: '\u{1f58}', to: '\u{1f58}', mapping: Disallowed }, ++ Range { from: '\u{1f59}', to: '\u{1f59}', mapping: Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f5a}', to: '\u{1f5a}', mapping: Disallowed }, ++ Range { from: '\u{1f5b}', to: '\u{1f5b}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f5c}', to: '\u{1f5c}', mapping: Disallowed }, ++ Range { from: '\u{1f5d}', to: '\u{1f5d}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f5e}', to: '\u{1f5e}', mapping: Disallowed }, ++ Range { from: '\u{1f5f}', to: '\u{1f5f}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f60}', to: '\u{1f67}', mapping: Valid }, ++ Range { from: '\u{1f68}', to: '\u{1f68}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f69}', to: '\u{1f69}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f6a}', to: '\u{1f6a}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f6b}', to: '\u{1f6b}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f6c}', to: '\u{1f6c}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f6d}', to: '\u{1f6d}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f6e}', to: '\u{1f6e}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f6f}', to: '\u{1f6f}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1f70}', to: '\u{1f70}', mapping: Valid }, ++ Range { from: '\u{1f71}', to: '\u{1f71}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1f72}', to: '\u{1f72}', mapping: Valid }, ++ Range { from: '\u{1f73}', to: '\u{1f73}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1f74}', to: '\u{1f74}', mapping: Valid }, ++ Range { from: '\u{1f75}', to: '\u{1f75}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1f76}', to: '\u{1f76}', mapping: Valid }, ++ Range { from: '\u{1f77}', to: '\u{1f77}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1f78}', to: '\u{1f78}', mapping: Valid }, ++ Range { from: '\u{1f79}', to: '\u{1f79}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1f7a}', to: '\u{1f7a}', mapping: Valid }, ++ Range { from: '\u{1f7b}', to: '\u{1f7b}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1f7c}', to: '\u{1f7c}', mapping: Valid }, ++ Range { from: '\u{1f7d}', to: '\u{1f7d}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1f7e}', to: '\u{1f7f}', mapping: Disallowed }, ++ Range { from: '\u{1f80}', to: '\u{1f80}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f81}', to: '\u{1f81}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f82}', to: '\u{1f82}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f83}', to: '\u{1f83}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f84}', to: '\u{1f84}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f85}', to: '\u{1f85}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f86}', to: '\u{1f86}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f87}', to: '\u{1f87}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f88}', to: '\u{1f88}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f89}', to: '\u{1f89}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f8a}', to: '\u{1f8a}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f8b}', to: '\u{1f8b}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f8c}', to: '\u{1f8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f8d}', to: '\u{1f8d}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f8e}', to: '\u{1f8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f8f}', to: '\u{1f8f}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f90}', to: '\u{1f90}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f91}', to: '\u{1f91}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f92}', to: '\u{1f92}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f93}', to: '\u{1f93}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f94}', to: '\u{1f94}', mapping: Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f95}', to: '\u{1f95}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f96}', to: '\u{1f96}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f97}', to: '\u{1f97}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f98}', to: '\u{1f98}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f99}', to: '\u{1f99}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f9a}', to: '\u{1f9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f9b}', to: '\u{1f9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f9c}', to: '\u{1f9c}', mapping: Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f9d}', to: '\u{1f9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f9e}', to: '\u{1f9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1f9f}', to: '\u{1f9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1fa0}', to: '\u{1fa0}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1fa1}', to: '\u{1fa1}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1fa2}', to: '\u{1fa2}', mapping: Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1fa3}', to: '\u{1fa3}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1fa4}', to: '\u{1fa4}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1fa5}', to: '\u{1fa5}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1fa6}', to: '\u{1fa6}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1fa7}', to: '\u{1fa7}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1fa8}', to: '\u{1fa8}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1fa9}', to: '\u{1fa9}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1faa}', to: '\u{1faa}', mapping: Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1fab}', to: '\u{1fab}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1fac}', to: '\u{1fac}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1fad}', to: '\u{1fad}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1fae}', to: '\u{1fae}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1faf}', to: '\u{1faf}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1fb0}', to: '\u{1fb1}', mapping: Valid }, ++ Range { from: '\u{1fb2}', to: '\u{1fb2}', mapping: Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1fb3}', to: '\u{1fb3}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 7, byte_len: 4 }) }, ++ Range { from: '\u{1fb4}', to: '\u{1fb4}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 7, byte_len: 4 }) }, ++ Range { from: '\u{1fb5}', to: '\u{1fb5}', mapping: Disallowed }, ++ Range { from: '\u{1fb6}', to: '\u{1fb6}', mapping: Valid }, ++ Range { from: '\u{1fb7}', to: '\u{1fb7}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1fb8}', to: '\u{1fb8}', mapping: Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1fb9}', to: '\u{1fb9}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1fba}', to: '\u{1fba}', mapping: Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1fbb}', to: '\u{1fbb}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1fbc}', to: '\u{1fbc}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 7, byte_len: 4 }) }, ++ Range { from: '\u{1fbd}', to: '\u{1fbd}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1fbe}', to: '\u{1fbe}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1fbf}', to: '\u{1fbf}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1fc0}', to: '\u{1fc0}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 7, byte_len: 3 }) }, ++ Range { from: '\u{1fc1}', to: '\u{1fc1}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1fc2}', to: '\u{1fc2}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 7, byte_len: 5 }) }, ++ Range { from: '\u{1fc3}', to: '\u{1fc3}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 7, byte_len: 4 }) }, ++ Range { from: '\u{1fc4}', to: '\u{1fc4}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 8, byte_len: 4 }) }, ++ Range { from: '\u{1fc5}', to: '\u{1fc5}', mapping: Disallowed }, ++ Range { from: '\u{1fc6}', to: '\u{1fc6}', mapping: Valid }, ++ Range { from: '\u{1fc7}', to: '\u{1fc7}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 8, byte_len: 5 }) }, ++ Range { from: '\u{1fc8}', to: '\u{1fc8}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{1fc9}', to: '\u{1fc9}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1fca}', to: '\u{1fca}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{1fcb}', to: '\u{1fcb}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1fcc}', to: '\u{1fcc}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 7, byte_len: 4 }) }, ++ Range { from: '\u{1fcd}', to: '\u{1fcd}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 8, byte_len: 5 }) }, ++ Range { from: '\u{1fce}', to: '\u{1fce}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 8, byte_len: 5 }) }, ++ Range { from: '\u{1fcf}', to: '\u{1fcf}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 8, byte_len: 5 }) }, ++ Range { from: '\u{1fd0}', to: '\u{1fd2}', mapping: Valid }, ++ Range { from: '\u{1fd3}', to: '\u{1fd3}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 8, byte_len: 2 }) }, ++ Range { from: '\u{1fd4}', to: '\u{1fd5}', mapping: Disallowed }, ++ Range { from: '\u{1fd6}', to: '\u{1fd7}', mapping: Valid }, ++ Range { from: '\u{1fd8}', to: '\u{1fd8}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{1fd9}', to: '\u{1fd9}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{1fda}', to: '\u{1fda}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{1fdb}', to: '\u{1fdb}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1fdc}', to: '\u{1fdc}', mapping: Disallowed }, ++ Range { from: '\u{1fdd}', to: '\u{1fdd}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 8, byte_len: 5 }) }, ++ Range { from: '\u{1fde}', to: '\u{1fde}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 8, byte_len: 5 }) }, ++ Range { from: '\u{1fdf}', to: '\u{1fdf}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 8, byte_len: 5 }) }, ++ Range { from: '\u{1fe0}', to: '\u{1fe2}', mapping: Valid }, ++ Range { from: '\u{1fe3}', to: '\u{1fe3}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 8, byte_len: 2 }) }, ++ Range { from: '\u{1fe4}', to: '\u{1fe7}', mapping: Valid }, ++ Range { from: '\u{1fe8}', to: '\u{1fe8}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{1fe9}', to: '\u{1fe9}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{1fea}', to: '\u{1fea}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{1feb}', to: '\u{1feb}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1fec}', to: '\u{1fec}', mapping: Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{1fed}', to: '\u{1fed}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 8, byte_len: 5 }) }, ++ Range { from: '\u{1fee}', to: '\u{1fee}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 2, byte_len: 5 }) }, ++ Range { from: '\u{1fef}', to: '\u{1fef}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1ff0}', to: '\u{1ff1}', mapping: Disallowed }, ++ Range { from: '\u{1ff2}', to: '\u{1ff2}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 8, byte_len: 5 }) }, ++ Range { from: '\u{1ff3}', to: '\u{1ff3}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 8, byte_len: 4 }) }, ++ Range { from: '\u{1ff4}', to: '\u{1ff4}', mapping: Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 8, byte_len: 4 }) }, ++ Range { from: '\u{1ff5}', to: '\u{1ff5}', mapping: Disallowed }, ++ Range { from: '\u{1ff6}', to: '\u{1ff6}', mapping: Valid }, ++ Range { from: '\u{1ff7}', to: '\u{1ff7}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 8, byte_len: 5 }) }, ++ Range { from: '\u{1ff8}', to: '\u{1ff8}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{1ff9}', to: '\u{1ff9}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1ffa}', to: '\u{1ffa}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{1ffb}', to: '\u{1ffb}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1ffc}', to: '\u{1ffc}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 8, byte_len: 4 }) }, ++ Range { from: '\u{1ffd}', to: '\u{1ffd}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 0, byte_len: 3 }) }, ++ Range { from: '\u{1ffe}', to: '\u{1ffe}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{1fff}', to: '\u{1fff}', mapping: Disallowed }, ++ Range { from: '\u{2000}', to: '\u{200a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{200b}', to: '\u{200b}', mapping: Ignored }, ++ Range { from: '\u{200c}', to: '\u{200d}', mapping: Deviation(StringTableSlice { byte_start_lo: 105, byte_start_hi: 8, byte_len: 0 }) }, ++ Range { from: '\u{200e}', to: '\u{200f}', mapping: Disallowed }, ++ Range { from: '\u{2010}', to: '\u{2010}', mapping: Valid }, ++ Range { from: '\u{2011}', to: '\u{2011}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{2012}', to: '\u{2016}', mapping: Valid }, ++ Range { from: '\u{2017}', to: '\u{2017}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{2018}', to: '\u{2023}', mapping: Valid }, ++ Range { from: '\u{2024}', to: '\u{2026}', mapping: Disallowed }, ++ Range { from: '\u{2027}', to: '\u{2027}', mapping: Valid }, ++ Range { from: '\u{2028}', to: '\u{202e}', mapping: Disallowed }, ++ Range { from: '\u{202f}', to: '\u{202f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2030}', to: '\u{2032}', mapping: Valid }, ++ Range { from: '\u{2033}', to: '\u{2033}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 8, byte_len: 6 }) }, ++ Range { from: '\u{2034}', to: '\u{2034}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 8, byte_len: 9 }) }, ++ Range { from: '\u{2035}', to: '\u{2035}', mapping: Valid }, ++ Range { from: '\u{2036}', to: '\u{2036}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 8, byte_len: 6 }) }, ++ Range { from: '\u{2037}', to: '\u{2037}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 8, byte_len: 9 }) }, ++ Range { from: '\u{2038}', to: '\u{203b}', mapping: Valid }, ++ Range { from: '\u{203c}', to: '\u{203c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 8, byte_len: 2 }) }, ++ Range { from: '\u{203d}', to: '\u{203d}', mapping: Valid }, ++ Range { from: '\u{203e}', to: '\u{203e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{203f}', to: '\u{2046}', mapping: Valid }, ++ Range { from: '\u{2047}', to: '\u{2047}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 8, byte_len: 2 }) }, ++ Range { from: '\u{2048}', to: '\u{2048}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 8, byte_len: 2 }) }, ++ Range { from: '\u{2049}', to: '\u{2049}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 8, byte_len: 2 }) }, ++ Range { from: '\u{204a}', to: '\u{2056}', mapping: Valid }, ++ Range { from: '\u{2057}', to: '\u{2057}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 8, byte_len: 12 }) }, ++ Range { from: '\u{2058}', to: '\u{205e}', mapping: Valid }, ++ Range { from: '\u{205f}', to: '\u{205f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2060}', to: '\u{2060}', mapping: Ignored }, ++ Range { from: '\u{2061}', to: '\u{2063}', mapping: Disallowed }, ++ Range { from: '\u{2064}', to: '\u{2064}', mapping: Ignored }, ++ Range { from: '\u{2065}', to: '\u{206f}', mapping: Disallowed }, ++ Range { from: '\u{2070}', to: '\u{2070}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{2071}', to: '\u{2071}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2072}', to: '\u{2073}', mapping: Disallowed }, ++ Range { from: '\u{2074}', to: '\u{2074}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{2075}', to: '\u{2075}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{2076}', to: '\u{2076}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{2077}', to: '\u{2077}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{2078}', to: '\u{2078}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{2079}', to: '\u{2079}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{207a}', to: '\u{207a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{207b}', to: '\u{207b}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{207c}', to: '\u{207c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{207d}', to: '\u{207d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{207e}', to: '\u{207e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{207f}', to: '\u{207f}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2080}', to: '\u{2080}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{2081}', to: '\u{2081}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2082}', to: '\u{2082}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2083}', to: '\u{2083}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2084}', to: '\u{2084}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{2085}', to: '\u{2085}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{2086}', to: '\u{2086}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{2087}', to: '\u{2087}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{2088}', to: '\u{2088}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{2089}', to: '\u{2089}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{208a}', to: '\u{208a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{208b}', to: '\u{208b}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{208c}', to: '\u{208c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{208d}', to: '\u{208d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{208e}', to: '\u{208e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{208f}', to: '\u{208f}', mapping: Disallowed }, ++ Range { from: '\u{2090}', to: '\u{2090}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2091}', to: '\u{2091}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2092}', to: '\u{2092}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2093}', to: '\u{2093}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2094}', to: '\u{2094}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{2095}', to: '\u{2095}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2096}', to: '\u{2096}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2097}', to: '\u{2097}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2098}', to: '\u{2098}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2099}', to: '\u{2099}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{209a}', to: '\u{209a}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{209b}', to: '\u{209b}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{209c}', to: '\u{209c}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{209d}', to: '\u{209f}', mapping: Disallowed }, ++ Range { from: '\u{20a0}', to: '\u{20a7}', mapping: Valid }, ++ Range { from: '\u{20a8}', to: '\u{20a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 8, byte_len: 2 }) }, ++ Range { from: '\u{20a9}', to: '\u{20bf}', mapping: Valid }, ++ Range { from: '\u{20c0}', to: '\u{20cf}', mapping: Disallowed }, ++ Range { from: '\u{20d0}', to: '\u{20f0}', mapping: Valid }, ++ Range { from: '\u{20f1}', to: '\u{20ff}', mapping: Disallowed }, ++ Range { from: '\u{2100}', to: '\u{2100}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{2101}', to: '\u{2101}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{2102}', to: '\u{2102}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2103}', to: '\u{2103}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{2104}', to: '\u{2104}', mapping: Valid }, ++ Range { from: '\u{2105}', to: '\u{2105}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{2106}', to: '\u{2106}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{2107}', to: '\u{2107}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{2108}', to: '\u{2108}', mapping: Valid }, ++ Range { from: '\u{2109}', to: '\u{2109}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{210a}', to: '\u{210a}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{210b}', to: '\u{210e}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{210f}', to: '\u{210f}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{2110}', to: '\u{2111}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2112}', to: '\u{2113}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2114}', to: '\u{2114}', mapping: Valid }, ++ Range { from: '\u{2115}', to: '\u{2115}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2116}', to: '\u{2116}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 8, byte_len: 2 }) }, ++ Range { from: '\u{2117}', to: '\u{2118}', mapping: Valid }, ++ Range { from: '\u{2119}', to: '\u{2119}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{211a}', to: '\u{211a}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{211b}', to: '\u{211d}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{211e}', to: '\u{211f}', mapping: Valid }, ++ Range { from: '\u{2120}', to: '\u{2120}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 8, byte_len: 2 }) }, ++ Range { from: '\u{2121}', to: '\u{2121}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{2122}', to: '\u{2122}', mapping: Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 8, byte_len: 2 }) }, ++ Range { from: '\u{2123}', to: '\u{2123}', mapping: Valid }, ++ Range { from: '\u{2124}', to: '\u{2124}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2125}', to: '\u{2125}', mapping: Valid }, ++ Range { from: '\u{2126}', to: '\u{2126}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{2127}', to: '\u{2127}', mapping: Valid }, ++ Range { from: '\u{2128}', to: '\u{2128}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2129}', to: '\u{2129}', mapping: Valid }, ++ Range { from: '\u{212a}', to: '\u{212a}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{212b}', to: '\u{212b}', mapping: Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{212c}', to: '\u{212c}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{212d}', to: '\u{212d}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{212e}', to: '\u{212e}', mapping: Valid }, ++ Range { from: '\u{212f}', to: '\u{2130}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2131}', to: '\u{2131}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2132}', to: '\u{2132}', mapping: Disallowed }, ++ Range { from: '\u{2133}', to: '\u{2133}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2134}', to: '\u{2134}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2135}', to: '\u{2135}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 8, byte_len: 2 }) }, ++ Range { from: '\u{2136}', to: '\u{2136}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 8, byte_len: 2 }) }, ++ Range { from: '\u{2137}', to: '\u{2137}', mapping: Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 8, byte_len: 2 }) }, ++ Range { from: '\u{2138}', to: '\u{2138}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 8, byte_len: 2 }) }, ++ Range { from: '\u{2139}', to: '\u{2139}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{213a}', to: '\u{213a}', mapping: Valid }, ++ Range { from: '\u{213b}', to: '\u{213b}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{213c}', to: '\u{213c}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{213d}', to: '\u{213e}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{213f}', to: '\u{213f}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{2140}', to: '\u{2140}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{2141}', to: '\u{2144}', mapping: Valid }, ++ Range { from: '\u{2145}', to: '\u{2146}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2147}', to: '\u{2147}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2148}', to: '\u{2148}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2149}', to: '\u{2149}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{214a}', to: '\u{214f}', mapping: Valid }, ++ Range { from: '\u{2150}', to: '\u{2150}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 8, byte_len: 5 }) }, ++ Range { from: '\u{2151}', to: '\u{2151}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 8, byte_len: 5 }) }, ++ Range { from: '\u{2152}', to: '\u{2152}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 8, byte_len: 6 }) }, ++ Range { from: '\u{2153}', to: '\u{2153}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 8, byte_len: 5 }) }, ++ Range { from: '\u{2154}', to: '\u{2154}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 8, byte_len: 5 }) }, ++ Range { from: '\u{2155}', to: '\u{2155}', mapping: Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 8, byte_len: 5 }) }, ++ Range { from: '\u{2156}', to: '\u{2156}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 8, byte_len: 5 }) }, ++ Range { from: '\u{2157}', to: '\u{2157}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 9, byte_len: 5 }) }, ++ Range { from: '\u{2158}', to: '\u{2158}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 9, byte_len: 5 }) }, ++ Range { from: '\u{2159}', to: '\u{2159}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 9, byte_len: 5 }) }, ++ Range { from: '\u{215a}', to: '\u{215a}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 9, byte_len: 5 }) }, ++ Range { from: '\u{215b}', to: '\u{215b}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 9, byte_len: 5 }) }, ++ Range { from: '\u{215c}', to: '\u{215c}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 9, byte_len: 5 }) }, ++ Range { from: '\u{215d}', to: '\u{215d}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 9, byte_len: 5 }) }, ++ Range { from: '\u{215e}', to: '\u{215e}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 9, byte_len: 5 }) }, ++ Range { from: '\u{215f}', to: '\u{215f}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 9, byte_len: 4 }) }, ++ Range { from: '\u{2160}', to: '\u{2160}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2161}', to: '\u{2161}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{2162}', to: '\u{2162}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{2163}', to: '\u{2163}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{2164}', to: '\u{2164}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2165}', to: '\u{2165}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{2166}', to: '\u{2166}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{2167}', to: '\u{2167}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 9, byte_len: 4 }) }, ++ Range { from: '\u{2168}', to: '\u{2168}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{2169}', to: '\u{2169}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{216a}', to: '\u{216a}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{216b}', to: '\u{216b}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{216c}', to: '\u{216c}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{216d}', to: '\u{216d}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{216e}', to: '\u{216e}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{216f}', to: '\u{216f}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2170}', to: '\u{2170}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2171}', to: '\u{2171}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{2172}', to: '\u{2172}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{2173}', to: '\u{2173}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{2174}', to: '\u{2174}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2175}', to: '\u{2175}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{2176}', to: '\u{2176}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{2177}', to: '\u{2177}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 9, byte_len: 4 }) }, ++ Range { from: '\u{2178}', to: '\u{2178}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{2179}', to: '\u{2179}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{217a}', to: '\u{217a}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{217b}', to: '\u{217b}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{217c}', to: '\u{217c}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{217d}', to: '\u{217d}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{217e}', to: '\u{217e}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{217f}', to: '\u{217f}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2180}', to: '\u{2182}', mapping: Valid }, ++ Range { from: '\u{2183}', to: '\u{2183}', mapping: Disallowed }, ++ Range { from: '\u{2184}', to: '\u{2188}', mapping: Valid }, ++ Range { from: '\u{2189}', to: '\u{2189}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 9, byte_len: 5 }) }, ++ Range { from: '\u{218a}', to: '\u{218b}', mapping: Valid }, ++ Range { from: '\u{218c}', to: '\u{218f}', mapping: Disallowed }, ++ Range { from: '\u{2190}', to: '\u{222b}', mapping: Valid }, ++ Range { from: '\u{222c}', to: '\u{222c}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 9, byte_len: 6 }) }, ++ Range { from: '\u{222d}', to: '\u{222d}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 9, byte_len: 9 }) }, ++ Range { from: '\u{222e}', to: '\u{222e}', mapping: Valid }, ++ Range { from: '\u{222f}', to: '\u{222f}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 9, byte_len: 6 }) }, ++ Range { from: '\u{2230}', to: '\u{2230}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 9, byte_len: 9 }) }, ++ Range { from: '\u{2231}', to: '\u{225f}', mapping: Valid }, ++ Range { from: '\u{2260}', to: '\u{2260}', mapping: DisallowedStd3Valid }, ++ Range { from: '\u{2261}', to: '\u{226d}', mapping: Valid }, ++ Range { from: '\u{226e}', to: '\u{226f}', mapping: DisallowedStd3Valid }, ++ Range { from: '\u{2270}', to: '\u{2328}', mapping: Valid }, ++ Range { from: '\u{2329}', to: '\u{2329}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{232a}', to: '\u{232a}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{232b}', to: '\u{2426}', mapping: Valid }, ++ Range { from: '\u{2427}', to: '\u{243f}', mapping: Disallowed }, ++ Range { from: '\u{2440}', to: '\u{244a}', mapping: Valid }, ++ Range { from: '\u{244b}', to: '\u{245f}', mapping: Disallowed }, ++ Range { from: '\u{2460}', to: '\u{2460}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2461}', to: '\u{2461}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2462}', to: '\u{2462}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2463}', to: '\u{2463}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{2464}', to: '\u{2464}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{2465}', to: '\u{2465}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{2466}', to: '\u{2466}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{2467}', to: '\u{2467}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{2468}', to: '\u{2468}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{2469}', to: '\u{2469}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{246a}', to: '\u{246a}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{246b}', to: '\u{246b}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{246c}', to: '\u{246c}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{246d}', to: '\u{246d}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{246e}', to: '\u{246e}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{246f}', to: '\u{246f}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{2470}', to: '\u{2470}', mapping: Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{2471}', to: '\u{2471}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{2472}', to: '\u{2472}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{2473}', to: '\u{2473}', mapping: Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 9, byte_len: 2 }) }, ++ Range { from: '\u{2474}', to: '\u{2474}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{2475}', to: '\u{2475}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{2476}', to: '\u{2476}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{2477}', to: '\u{2477}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{2478}', to: '\u{2478}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{2479}', to: '\u{2479}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{247a}', to: '\u{247a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{247b}', to: '\u{247b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{247c}', to: '\u{247c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{247d}', to: '\u{247d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 9, byte_len: 4 }) }, ++ Range { from: '\u{247e}', to: '\u{247e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 9, byte_len: 4 }) }, ++ Range { from: '\u{247f}', to: '\u{247f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 9, byte_len: 4 }) }, ++ Range { from: '\u{2480}', to: '\u{2480}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 9, byte_len: 4 }) }, ++ Range { from: '\u{2481}', to: '\u{2481}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 9, byte_len: 4 }) }, ++ Range { from: '\u{2482}', to: '\u{2482}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 9, byte_len: 4 }) }, ++ Range { from: '\u{2483}', to: '\u{2483}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 9, byte_len: 4 }) }, ++ Range { from: '\u{2484}', to: '\u{2484}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 9, byte_len: 4 }) }, ++ Range { from: '\u{2485}', to: '\u{2485}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 9, byte_len: 4 }) }, ++ Range { from: '\u{2486}', to: '\u{2486}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 9, byte_len: 4 }) }, ++ Range { from: '\u{2487}', to: '\u{2487}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 9, byte_len: 4 }) }, ++ Range { from: '\u{2488}', to: '\u{249b}', mapping: Disallowed }, ++ Range { from: '\u{249c}', to: '\u{249c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{249d}', to: '\u{249d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{249e}', to: '\u{249e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{249f}', to: '\u{249f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{24a0}', to: '\u{24a0}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{24a1}', to: '\u{24a1}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{24a2}', to: '\u{24a2}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{24a3}', to: '\u{24a3}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{24a4}', to: '\u{24a4}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{24a5}', to: '\u{24a5}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{24a6}', to: '\u{24a6}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{24a7}', to: '\u{24a7}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{24a8}', to: '\u{24a8}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{24a9}', to: '\u{24a9}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{24aa}', to: '\u{24aa}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{24ab}', to: '\u{24ab}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{24ac}', to: '\u{24ac}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{24ad}', to: '\u{24ad}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{24ae}', to: '\u{24ae}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{24af}', to: '\u{24af}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{24b0}', to: '\u{24b0}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{24b1}', to: '\u{24b1}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{24b2}', to: '\u{24b2}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{24b3}', to: '\u{24b3}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{24b4}', to: '\u{24b4}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{24b5}', to: '\u{24b5}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{24b6}', to: '\u{24b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24b7}', to: '\u{24b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24b8}', to: '\u{24b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24b9}', to: '\u{24b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24ba}', to: '\u{24ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24bb}', to: '\u{24bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24bc}', to: '\u{24bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24bd}', to: '\u{24bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24be}', to: '\u{24be}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24bf}', to: '\u{24bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24c0}', to: '\u{24c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24c1}', to: '\u{24c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24c2}', to: '\u{24c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24c3}', to: '\u{24c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24c4}', to: '\u{24c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24c5}', to: '\u{24c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24c6}', to: '\u{24c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24c7}', to: '\u{24c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24c8}', to: '\u{24c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24c9}', to: '\u{24c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24ca}', to: '\u{24ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24cb}', to: '\u{24cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24cc}', to: '\u{24cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24cd}', to: '\u{24cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24ce}', to: '\u{24ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24cf}', to: '\u{24cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24d0}', to: '\u{24d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24d1}', to: '\u{24d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24d2}', to: '\u{24d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24d3}', to: '\u{24d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24d4}', to: '\u{24d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24d5}', to: '\u{24d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24d6}', to: '\u{24d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24d7}', to: '\u{24d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24d8}', to: '\u{24d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24d9}', to: '\u{24d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24da}', to: '\u{24da}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24db}', to: '\u{24db}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24dc}', to: '\u{24dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24dd}', to: '\u{24dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24de}', to: '\u{24de}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24df}', to: '\u{24df}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24e0}', to: '\u{24e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24e1}', to: '\u{24e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24e2}', to: '\u{24e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24e3}', to: '\u{24e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24e4}', to: '\u{24e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24e5}', to: '\u{24e5}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24e6}', to: '\u{24e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24e7}', to: '\u{24e7}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24e8}', to: '\u{24e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24e9}', to: '\u{24e9}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{24ea}', to: '\u{24ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{24eb}', to: '\u{2a0b}', mapping: Valid }, ++ Range { from: '\u{2a0c}', to: '\u{2a0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 10, byte_len: 12 }) }, ++ Range { from: '\u{2a0d}', to: '\u{2a73}', mapping: Valid }, ++ Range { from: '\u{2a74}', to: '\u{2a74}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2a75}', to: '\u{2a75}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 10, byte_len: 2 }) }, ++ Range { from: '\u{2a76}', to: '\u{2a76}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2a77}', to: '\u{2adb}', mapping: Valid }, ++ Range { from: '\u{2adc}', to: '\u{2adc}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 10, byte_len: 5 }) }, ++ Range { from: '\u{2add}', to: '\u{2b73}', mapping: Valid }, ++ Range { from: '\u{2b74}', to: '\u{2b75}', mapping: Disallowed }, ++ Range { from: '\u{2b76}', to: '\u{2b95}', mapping: Valid }, ++ Range { from: '\u{2b96}', to: '\u{2b97}', mapping: Disallowed }, ++ Range { from: '\u{2b98}', to: '\u{2bb9}', mapping: Valid }, ++ Range { from: '\u{2bba}', to: '\u{2bbc}', mapping: Disallowed }, ++ Range { from: '\u{2bbd}', to: '\u{2bc8}', mapping: Valid }, ++ Range { from: '\u{2bc9}', to: '\u{2bc9}', mapping: Disallowed }, ++ Range { from: '\u{2bca}', to: '\u{2bd2}', mapping: Valid }, ++ Range { from: '\u{2bd3}', to: '\u{2beb}', mapping: Disallowed }, ++ Range { from: '\u{2bec}', to: '\u{2bef}', mapping: Valid }, ++ Range { from: '\u{2bf0}', to: '\u{2bff}', mapping: Disallowed }, ++ Range { from: '\u{2c00}', to: '\u{2c00}', mapping: Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c01}', to: '\u{2c01}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c02}', to: '\u{2c02}', mapping: Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c03}', to: '\u{2c03}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c04}', to: '\u{2c04}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c05}', to: '\u{2c05}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c06}', to: '\u{2c06}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c07}', to: '\u{2c07}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c08}', to: '\u{2c08}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c09}', to: '\u{2c09}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c0a}', to: '\u{2c0a}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c0b}', to: '\u{2c0b}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c0c}', to: '\u{2c0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c0d}', to: '\u{2c0d}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c0e}', to: '\u{2c0e}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c0f}', to: '\u{2c0f}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c10}', to: '\u{2c10}', mapping: Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c11}', to: '\u{2c11}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c12}', to: '\u{2c12}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c13}', to: '\u{2c13}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c14}', to: '\u{2c14}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c15}', to: '\u{2c15}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c16}', to: '\u{2c16}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c17}', to: '\u{2c17}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c18}', to: '\u{2c18}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c19}', to: '\u{2c19}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c1a}', to: '\u{2c1a}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c1b}', to: '\u{2c1b}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c1c}', to: '\u{2c1c}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c1d}', to: '\u{2c1d}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c1e}', to: '\u{2c1e}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c1f}', to: '\u{2c1f}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c20}', to: '\u{2c20}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c21}', to: '\u{2c21}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c22}', to: '\u{2c22}', mapping: Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c23}', to: '\u{2c23}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c24}', to: '\u{2c24}', mapping: Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c25}', to: '\u{2c25}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c26}', to: '\u{2c26}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c27}', to: '\u{2c27}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c28}', to: '\u{2c28}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c29}', to: '\u{2c29}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c2a}', to: '\u{2c2a}', mapping: Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c2b}', to: '\u{2c2b}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c2c}', to: '\u{2c2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c2d}', to: '\u{2c2d}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c2e}', to: '\u{2c2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c2f}', to: '\u{2c2f}', mapping: Disallowed }, ++ Range { from: '\u{2c30}', to: '\u{2c5e}', mapping: Valid }, ++ Range { from: '\u{2c5f}', to: '\u{2c5f}', mapping: Disallowed }, ++ Range { from: '\u{2c60}', to: '\u{2c60}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c61}', to: '\u{2c61}', mapping: Valid }, ++ Range { from: '\u{2c62}', to: '\u{2c62}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 10, byte_len: 2 }) }, ++ Range { from: '\u{2c63}', to: '\u{2c63}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c64}', to: '\u{2c64}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 10, byte_len: 2 }) }, ++ Range { from: '\u{2c65}', to: '\u{2c66}', mapping: Valid }, ++ Range { from: '\u{2c67}', to: '\u{2c67}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c68}', to: '\u{2c68}', mapping: Valid }, ++ Range { from: '\u{2c69}', to: '\u{2c69}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c6a}', to: '\u{2c6a}', mapping: Valid }, ++ Range { from: '\u{2c6b}', to: '\u{2c6b}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c6c}', to: '\u{2c6c}', mapping: Valid }, ++ Range { from: '\u{2c6d}', to: '\u{2c6d}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{2c6e}', to: '\u{2c6e}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{2c6f}', to: '\u{2c6f}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{2c70}', to: '\u{2c70}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{2c71}', to: '\u{2c71}', mapping: Valid }, ++ Range { from: '\u{2c72}', to: '\u{2c72}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c73}', to: '\u{2c74}', mapping: Valid }, ++ Range { from: '\u{2c75}', to: '\u{2c75}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c76}', to: '\u{2c7b}', mapping: Valid }, ++ Range { from: '\u{2c7c}', to: '\u{2c7c}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2c7d}', to: '\u{2c7d}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{2c7e}', to: '\u{2c7e}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 10, byte_len: 2 }) }, ++ Range { from: '\u{2c7f}', to: '\u{2c7f}', mapping: Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 10, byte_len: 2 }) }, ++ Range { from: '\u{2c80}', to: '\u{2c80}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c81}', to: '\u{2c81}', mapping: Valid }, ++ Range { from: '\u{2c82}', to: '\u{2c82}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c83}', to: '\u{2c83}', mapping: Valid }, ++ Range { from: '\u{2c84}', to: '\u{2c84}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c85}', to: '\u{2c85}', mapping: Valid }, ++ Range { from: '\u{2c86}', to: '\u{2c86}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c87}', to: '\u{2c87}', mapping: Valid }, ++ Range { from: '\u{2c88}', to: '\u{2c88}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c89}', to: '\u{2c89}', mapping: Valid }, ++ Range { from: '\u{2c8a}', to: '\u{2c8a}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c8b}', to: '\u{2c8b}', mapping: Valid }, ++ Range { from: '\u{2c8c}', to: '\u{2c8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c8d}', to: '\u{2c8d}', mapping: Valid }, ++ Range { from: '\u{2c8e}', to: '\u{2c8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c8f}', to: '\u{2c8f}', mapping: Valid }, ++ Range { from: '\u{2c90}', to: '\u{2c90}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c91}', to: '\u{2c91}', mapping: Valid }, ++ Range { from: '\u{2c92}', to: '\u{2c92}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c93}', to: '\u{2c93}', mapping: Valid }, ++ Range { from: '\u{2c94}', to: '\u{2c94}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c95}', to: '\u{2c95}', mapping: Valid }, ++ Range { from: '\u{2c96}', to: '\u{2c96}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c97}', to: '\u{2c97}', mapping: Valid }, ++ Range { from: '\u{2c98}', to: '\u{2c98}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{2c99}', to: '\u{2c99}', mapping: Valid }, ++ Range { from: '\u{2c9a}', to: '\u{2c9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2c9b}', to: '\u{2c9b}', mapping: Valid }, ++ Range { from: '\u{2c9c}', to: '\u{2c9c}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2c9d}', to: '\u{2c9d}', mapping: Valid }, ++ Range { from: '\u{2c9e}', to: '\u{2c9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2c9f}', to: '\u{2c9f}', mapping: Valid }, ++ Range { from: '\u{2ca0}', to: '\u{2ca0}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2ca1}', to: '\u{2ca1}', mapping: Valid }, ++ Range { from: '\u{2ca2}', to: '\u{2ca2}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2ca3}', to: '\u{2ca3}', mapping: Valid }, ++ Range { from: '\u{2ca4}', to: '\u{2ca4}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2ca5}', to: '\u{2ca5}', mapping: Valid }, ++ Range { from: '\u{2ca6}', to: '\u{2ca6}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2ca7}', to: '\u{2ca7}', mapping: Valid }, ++ Range { from: '\u{2ca8}', to: '\u{2ca8}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2ca9}', to: '\u{2ca9}', mapping: Valid }, ++ Range { from: '\u{2caa}', to: '\u{2caa}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cab}', to: '\u{2cab}', mapping: Valid }, ++ Range { from: '\u{2cac}', to: '\u{2cac}', mapping: Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cad}', to: '\u{2cad}', mapping: Valid }, ++ Range { from: '\u{2cae}', to: '\u{2cae}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2caf}', to: '\u{2caf}', mapping: Valid }, ++ Range { from: '\u{2cb0}', to: '\u{2cb0}', mapping: Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cb1}', to: '\u{2cb1}', mapping: Valid }, ++ Range { from: '\u{2cb2}', to: '\u{2cb2}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cb3}', to: '\u{2cb3}', mapping: Valid }, ++ Range { from: '\u{2cb4}', to: '\u{2cb4}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cb5}', to: '\u{2cb5}', mapping: Valid }, ++ Range { from: '\u{2cb6}', to: '\u{2cb6}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cb7}', to: '\u{2cb7}', mapping: Valid }, ++ Range { from: '\u{2cb8}', to: '\u{2cb8}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cb9}', to: '\u{2cb9}', mapping: Valid }, ++ Range { from: '\u{2cba}', to: '\u{2cba}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cbb}', to: '\u{2cbb}', mapping: Valid }, ++ Range { from: '\u{2cbc}', to: '\u{2cbc}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cbd}', to: '\u{2cbd}', mapping: Valid }, ++ Range { from: '\u{2cbe}', to: '\u{2cbe}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cbf}', to: '\u{2cbf}', mapping: Valid }, ++ Range { from: '\u{2cc0}', to: '\u{2cc0}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cc1}', to: '\u{2cc1}', mapping: Valid }, ++ Range { from: '\u{2cc2}', to: '\u{2cc2}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cc3}', to: '\u{2cc3}', mapping: Valid }, ++ Range { from: '\u{2cc4}', to: '\u{2cc4}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cc5}', to: '\u{2cc5}', mapping: Valid }, ++ Range { from: '\u{2cc6}', to: '\u{2cc6}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cc7}', to: '\u{2cc7}', mapping: Valid }, ++ Range { from: '\u{2cc8}', to: '\u{2cc8}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cc9}', to: '\u{2cc9}', mapping: Valid }, ++ Range { from: '\u{2cca}', to: '\u{2cca}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2ccb}', to: '\u{2ccb}', mapping: Valid }, ++ Range { from: '\u{2ccc}', to: '\u{2ccc}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2ccd}', to: '\u{2ccd}', mapping: Valid }, ++ Range { from: '\u{2cce}', to: '\u{2cce}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2ccf}', to: '\u{2ccf}', mapping: Valid }, ++ Range { from: '\u{2cd0}', to: '\u{2cd0}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cd1}', to: '\u{2cd1}', mapping: Valid }, ++ Range { from: '\u{2cd2}', to: '\u{2cd2}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cd3}', to: '\u{2cd3}', mapping: Valid }, ++ Range { from: '\u{2cd4}', to: '\u{2cd4}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cd5}', to: '\u{2cd5}', mapping: Valid }, ++ Range { from: '\u{2cd6}', to: '\u{2cd6}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cd7}', to: '\u{2cd7}', mapping: Valid }, ++ Range { from: '\u{2cd8}', to: '\u{2cd8}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cd9}', to: '\u{2cd9}', mapping: Valid }, ++ Range { from: '\u{2cda}', to: '\u{2cda}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cdb}', to: '\u{2cdb}', mapping: Valid }, ++ Range { from: '\u{2cdc}', to: '\u{2cdc}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cdd}', to: '\u{2cdd}', mapping: Valid }, ++ Range { from: '\u{2cde}', to: '\u{2cde}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cdf}', to: '\u{2cdf}', mapping: Valid }, ++ Range { from: '\u{2ce0}', to: '\u{2ce0}', mapping: Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2ce1}', to: '\u{2ce1}', mapping: Valid }, ++ Range { from: '\u{2ce2}', to: '\u{2ce2}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2ce3}', to: '\u{2cea}', mapping: Valid }, ++ Range { from: '\u{2ceb}', to: '\u{2ceb}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cec}', to: '\u{2cec}', mapping: Valid }, ++ Range { from: '\u{2ced}', to: '\u{2ced}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cee}', to: '\u{2cf1}', mapping: Valid }, ++ Range { from: '\u{2cf2}', to: '\u{2cf2}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2cf3}', to: '\u{2cf3}', mapping: Valid }, ++ Range { from: '\u{2cf4}', to: '\u{2cf8}', mapping: Disallowed }, ++ Range { from: '\u{2cf9}', to: '\u{2d25}', mapping: Valid }, ++ Range { from: '\u{2d26}', to: '\u{2d26}', mapping: Disallowed }, ++ Range { from: '\u{2d27}', to: '\u{2d27}', mapping: Valid }, ++ Range { from: '\u{2d28}', to: '\u{2d2c}', mapping: Disallowed }, ++ Range { from: '\u{2d2d}', to: '\u{2d2d}', mapping: Valid }, ++ Range { from: '\u{2d2e}', to: '\u{2d2f}', mapping: Disallowed }, ++ Range { from: '\u{2d30}', to: '\u{2d67}', mapping: Valid }, ++ Range { from: '\u{2d68}', to: '\u{2d6e}', mapping: Disallowed }, ++ Range { from: '\u{2d6f}', to: '\u{2d6f}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2d70}', to: '\u{2d70}', mapping: Valid }, ++ Range { from: '\u{2d71}', to: '\u{2d7e}', mapping: Disallowed }, ++ Range { from: '\u{2d7f}', to: '\u{2d96}', mapping: Valid }, ++ Range { from: '\u{2d97}', to: '\u{2d9f}', mapping: Disallowed }, ++ Range { from: '\u{2da0}', to: '\u{2da6}', mapping: Valid }, ++ Range { from: '\u{2da7}', to: '\u{2da7}', mapping: Disallowed }, ++ Range { from: '\u{2da8}', to: '\u{2dae}', mapping: Valid }, ++ Range { from: '\u{2daf}', to: '\u{2daf}', mapping: Disallowed }, ++ Range { from: '\u{2db0}', to: '\u{2db6}', mapping: Valid }, ++ Range { from: '\u{2db7}', to: '\u{2db7}', mapping: Disallowed }, ++ Range { from: '\u{2db8}', to: '\u{2dbe}', mapping: Valid }, ++ Range { from: '\u{2dbf}', to: '\u{2dbf}', mapping: Disallowed }, ++ Range { from: '\u{2dc0}', to: '\u{2dc6}', mapping: Valid }, ++ Range { from: '\u{2dc7}', to: '\u{2dc7}', mapping: Disallowed }, ++ Range { from: '\u{2dc8}', to: '\u{2dce}', mapping: Valid }, ++ Range { from: '\u{2dcf}', to: '\u{2dcf}', mapping: Disallowed }, ++ Range { from: '\u{2dd0}', to: '\u{2dd6}', mapping: Valid }, ++ Range { from: '\u{2dd7}', to: '\u{2dd7}', mapping: Disallowed }, ++ Range { from: '\u{2dd8}', to: '\u{2dde}', mapping: Valid }, ++ Range { from: '\u{2ddf}', to: '\u{2ddf}', mapping: Disallowed }, ++ Range { from: '\u{2de0}', to: '\u{2e49}', mapping: Valid }, ++ Range { from: '\u{2e4a}', to: '\u{2e7f}', mapping: Disallowed }, ++ Range { from: '\u{2e80}', to: '\u{2e99}', mapping: Valid }, ++ Range { from: '\u{2e9a}', to: '\u{2e9a}', mapping: Disallowed }, ++ Range { from: '\u{2e9b}', to: '\u{2e9e}', mapping: Valid }, ++ Range { from: '\u{2e9f}', to: '\u{2e9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2ea0}', to: '\u{2ef2}', mapping: Valid }, ++ Range { from: '\u{2ef3}', to: '\u{2ef3}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2ef4}', to: '\u{2eff}', mapping: Disallowed }, ++ Range { from: '\u{2f00}', to: '\u{2f00}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f01}', to: '\u{2f01}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f02}', to: '\u{2f02}', mapping: Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f03}', to: '\u{2f03}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f04}', to: '\u{2f04}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f05}', to: '\u{2f05}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f06}', to: '\u{2f06}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f07}', to: '\u{2f07}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f08}', to: '\u{2f08}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f09}', to: '\u{2f09}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f0a}', to: '\u{2f0a}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f0b}', to: '\u{2f0b}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f0c}', to: '\u{2f0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f0d}', to: '\u{2f0d}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f0e}', to: '\u{2f0e}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f0f}', to: '\u{2f0f}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f10}', to: '\u{2f10}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f11}', to: '\u{2f11}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f12}', to: '\u{2f12}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f13}', to: '\u{2f13}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f14}', to: '\u{2f14}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f15}', to: '\u{2f15}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f16}', to: '\u{2f16}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f17}', to: '\u{2f17}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f18}', to: '\u{2f18}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f19}', to: '\u{2f19}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f1a}', to: '\u{2f1a}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f1b}', to: '\u{2f1b}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f1c}', to: '\u{2f1c}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f1d}', to: '\u{2f1d}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f1e}', to: '\u{2f1e}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f1f}', to: '\u{2f1f}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f20}', to: '\u{2f20}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f21}', to: '\u{2f21}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f22}', to: '\u{2f22}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f23}', to: '\u{2f23}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f24}', to: '\u{2f24}', mapping: Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f25}', to: '\u{2f25}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f26}', to: '\u{2f26}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f27}', to: '\u{2f27}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f28}', to: '\u{2f28}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f29}', to: '\u{2f29}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f2a}', to: '\u{2f2a}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f2b}', to: '\u{2f2b}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f2c}', to: '\u{2f2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f2d}', to: '\u{2f2d}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f2e}', to: '\u{2f2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f2f}', to: '\u{2f2f}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f30}', to: '\u{2f30}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f31}', to: '\u{2f31}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f32}', to: '\u{2f32}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f33}', to: '\u{2f33}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f34}', to: '\u{2f34}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f35}', to: '\u{2f35}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f36}', to: '\u{2f36}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f37}', to: '\u{2f37}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f38}', to: '\u{2f38}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f39}', to: '\u{2f39}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f3a}', to: '\u{2f3a}', mapping: Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f3b}', to: '\u{2f3b}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f3c}', to: '\u{2f3c}', mapping: Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f3d}', to: '\u{2f3d}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f3e}', to: '\u{2f3e}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f3f}', to: '\u{2f3f}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f40}', to: '\u{2f40}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f41}', to: '\u{2f41}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f42}', to: '\u{2f42}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f43}', to: '\u{2f43}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f44}', to: '\u{2f44}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f45}', to: '\u{2f45}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f46}', to: '\u{2f46}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f47}', to: '\u{2f47}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f48}', to: '\u{2f48}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f49}', to: '\u{2f49}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f4a}', to: '\u{2f4a}', mapping: Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f4b}', to: '\u{2f4b}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f4c}', to: '\u{2f4c}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f4d}', to: '\u{2f4d}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f4e}', to: '\u{2f4e}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f4f}', to: '\u{2f4f}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f50}', to: '\u{2f50}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f51}', to: '\u{2f51}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f52}', to: '\u{2f52}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f53}', to: '\u{2f53}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f54}', to: '\u{2f54}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f55}', to: '\u{2f55}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f56}', to: '\u{2f56}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f57}', to: '\u{2f57}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f58}', to: '\u{2f58}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f59}', to: '\u{2f59}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f5a}', to: '\u{2f5a}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f5b}', to: '\u{2f5b}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f5c}', to: '\u{2f5c}', mapping: Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f5d}', to: '\u{2f5d}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f5e}', to: '\u{2f5e}', mapping: Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f5f}', to: '\u{2f5f}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f60}', to: '\u{2f60}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f61}', to: '\u{2f61}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f62}', to: '\u{2f62}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f63}', to: '\u{2f63}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f64}', to: '\u{2f64}', mapping: Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f65}', to: '\u{2f65}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f66}', to: '\u{2f66}', mapping: Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f67}', to: '\u{2f67}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f68}', to: '\u{2f68}', mapping: Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f69}', to: '\u{2f69}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f6a}', to: '\u{2f6a}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f6b}', to: '\u{2f6b}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f6c}', to: '\u{2f6c}', mapping: Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f6d}', to: '\u{2f6d}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f6e}', to: '\u{2f6e}', mapping: Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f6f}', to: '\u{2f6f}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f70}', to: '\u{2f70}', mapping: Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f71}', to: '\u{2f71}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f72}', to: '\u{2f72}', mapping: Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f73}', to: '\u{2f73}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f74}', to: '\u{2f74}', mapping: Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f75}', to: '\u{2f75}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f76}', to: '\u{2f76}', mapping: Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f77}', to: '\u{2f77}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f78}', to: '\u{2f78}', mapping: Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f79}', to: '\u{2f79}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f7a}', to: '\u{2f7a}', mapping: Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f7b}', to: '\u{2f7b}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f7c}', to: '\u{2f7c}', mapping: Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f7d}', to: '\u{2f7d}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f7e}', to: '\u{2f7e}', mapping: Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f7f}', to: '\u{2f7f}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f80}', to: '\u{2f80}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f81}', to: '\u{2f81}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f82}', to: '\u{2f82}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f83}', to: '\u{2f83}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f84}', to: '\u{2f84}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f85}', to: '\u{2f85}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f86}', to: '\u{2f86}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f87}', to: '\u{2f87}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f88}', to: '\u{2f88}', mapping: Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f89}', to: '\u{2f89}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f8a}', to: '\u{2f8a}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f8b}', to: '\u{2f8b}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f8c}', to: '\u{2f8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f8d}', to: '\u{2f8d}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f8e}', to: '\u{2f8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f8f}', to: '\u{2f8f}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f90}', to: '\u{2f90}', mapping: Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f91}', to: '\u{2f91}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f92}', to: '\u{2f92}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f93}', to: '\u{2f93}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f94}', to: '\u{2f94}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f95}', to: '\u{2f95}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f96}', to: '\u{2f96}', mapping: Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f97}', to: '\u{2f97}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f98}', to: '\u{2f98}', mapping: Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f99}', to: '\u{2f99}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f9a}', to: '\u{2f9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f9b}', to: '\u{2f9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f9c}', to: '\u{2f9c}', mapping: Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f9d}', to: '\u{2f9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f9e}', to: '\u{2f9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f9f}', to: '\u{2f9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fa0}', to: '\u{2fa0}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fa1}', to: '\u{2fa1}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fa2}', to: '\u{2fa2}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fa3}', to: '\u{2fa3}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fa4}', to: '\u{2fa4}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fa5}', to: '\u{2fa5}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fa6}', to: '\u{2fa6}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fa7}', to: '\u{2fa7}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fa8}', to: '\u{2fa8}', mapping: Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fa9}', to: '\u{2fa9}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2faa}', to: '\u{2faa}', mapping: Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fab}', to: '\u{2fab}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fac}', to: '\u{2fac}', mapping: Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fad}', to: '\u{2fad}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fae}', to: '\u{2fae}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2faf}', to: '\u{2faf}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fb0}', to: '\u{2fb0}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fb1}', to: '\u{2fb1}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fb2}', to: '\u{2fb2}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fb3}', to: '\u{2fb3}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fb4}', to: '\u{2fb4}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fb5}', to: '\u{2fb5}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fb6}', to: '\u{2fb6}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fb7}', to: '\u{2fb7}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fb8}', to: '\u{2fb8}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fb9}', to: '\u{2fb9}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fba}', to: '\u{2fba}', mapping: Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fbb}', to: '\u{2fbb}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fbc}', to: '\u{2fbc}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fbd}', to: '\u{2fbd}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fbe}', to: '\u{2fbe}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fbf}', to: '\u{2fbf}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fc0}', to: '\u{2fc0}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fc1}', to: '\u{2fc1}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fc2}', to: '\u{2fc2}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fc3}', to: '\u{2fc3}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fc4}', to: '\u{2fc4}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fc5}', to: '\u{2fc5}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fc6}', to: '\u{2fc6}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fc7}', to: '\u{2fc7}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fc8}', to: '\u{2fc8}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fc9}', to: '\u{2fc9}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fca}', to: '\u{2fca}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fcb}', to: '\u{2fcb}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fcc}', to: '\u{2fcc}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fcd}', to: '\u{2fcd}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fce}', to: '\u{2fce}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fcf}', to: '\u{2fcf}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fd0}', to: '\u{2fd0}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fd1}', to: '\u{2fd1}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fd2}', to: '\u{2fd2}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fd3}', to: '\u{2fd3}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fd4}', to: '\u{2fd4}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fd5}', to: '\u{2fd5}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{2fd6}', to: '\u{2fff}', mapping: Disallowed }, ++ Range { from: '\u{3000}', to: '\u{3000}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{3001}', to: '\u{3001}', mapping: Valid }, ++ Range { from: '\u{3002}', to: '\u{3002}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 14, byte_len: 1 }) }, ++ Range { from: '\u{3003}', to: '\u{3035}', mapping: Valid }, ++ Range { from: '\u{3036}', to: '\u{3036}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3037}', to: '\u{3037}', mapping: Valid }, ++ Range { from: '\u{3038}', to: '\u{3038}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{3039}', to: '\u{3039}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{303a}', to: '\u{303a}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{303b}', to: '\u{303f}', mapping: Valid }, ++ Range { from: '\u{3040}', to: '\u{3040}', mapping: Disallowed }, ++ Range { from: '\u{3041}', to: '\u{3096}', mapping: Valid }, ++ Range { from: '\u{3097}', to: '\u{3098}', mapping: Disallowed }, ++ Range { from: '\u{3099}', to: '\u{309a}', mapping: Valid }, ++ Range { from: '\u{309b}', to: '\u{309b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 14, byte_len: 4 }) }, ++ Range { from: '\u{309c}', to: '\u{309c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 14, byte_len: 4 }) }, ++ Range { from: '\u{309d}', to: '\u{309e}', mapping: Valid }, ++ Range { from: '\u{309f}', to: '\u{309f}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 14, byte_len: 6 }) }, ++ Range { from: '\u{30a0}', to: '\u{30fe}', mapping: Valid }, ++ Range { from: '\u{30ff}', to: '\u{30ff}', mapping: Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 14, byte_len: 6 }) }, ++ Range { from: '\u{3100}', to: '\u{3104}', mapping: Disallowed }, ++ Range { from: '\u{3105}', to: '\u{312e}', mapping: Valid }, ++ Range { from: '\u{312f}', to: '\u{3130}', mapping: Disallowed }, ++ Range { from: '\u{3131}', to: '\u{3131}', mapping: Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3132}', to: '\u{3132}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3133}', to: '\u{3133}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3134}', to: '\u{3134}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3135}', to: '\u{3135}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3136}', to: '\u{3136}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3137}', to: '\u{3137}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3138}', to: '\u{3138}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3139}', to: '\u{3139}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{313a}', to: '\u{313a}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{313b}', to: '\u{313b}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{313c}', to: '\u{313c}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{313d}', to: '\u{313d}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{313e}', to: '\u{313e}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{313f}', to: '\u{313f}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3140}', to: '\u{3140}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3141}', to: '\u{3141}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3142}', to: '\u{3142}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3143}', to: '\u{3143}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3144}', to: '\u{3144}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3145}', to: '\u{3145}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3146}', to: '\u{3146}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3147}', to: '\u{3147}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3148}', to: '\u{3148}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3149}', to: '\u{3149}', mapping: Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{314a}', to: '\u{314a}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{314b}', to: '\u{314b}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{314c}', to: '\u{314c}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{314d}', to: '\u{314d}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{314e}', to: '\u{314e}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{314f}', to: '\u{314f}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3150}', to: '\u{3150}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3151}', to: '\u{3151}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3152}', to: '\u{3152}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3153}', to: '\u{3153}', mapping: Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3154}', to: '\u{3154}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3155}', to: '\u{3155}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3156}', to: '\u{3156}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3157}', to: '\u{3157}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3158}', to: '\u{3158}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3159}', to: '\u{3159}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{315a}', to: '\u{315a}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{315b}', to: '\u{315b}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{315c}', to: '\u{315c}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{315d}', to: '\u{315d}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{315e}', to: '\u{315e}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{315f}', to: '\u{315f}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3160}', to: '\u{3160}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3161}', to: '\u{3161}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3162}', to: '\u{3162}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3163}', to: '\u{3163}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3164}', to: '\u{3164}', mapping: Disallowed }, ++ Range { from: '\u{3165}', to: '\u{3165}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3166}', to: '\u{3166}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3167}', to: '\u{3167}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3168}', to: '\u{3168}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3169}', to: '\u{3169}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{316a}', to: '\u{316a}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{316b}', to: '\u{316b}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{316c}', to: '\u{316c}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{316d}', to: '\u{316d}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{316e}', to: '\u{316e}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{316f}', to: '\u{316f}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3170}', to: '\u{3170}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3171}', to: '\u{3171}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3172}', to: '\u{3172}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3173}', to: '\u{3173}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3174}', to: '\u{3174}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3175}', to: '\u{3175}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3176}', to: '\u{3176}', mapping: Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3177}', to: '\u{3177}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3178}', to: '\u{3178}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3179}', to: '\u{3179}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{317a}', to: '\u{317a}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{317b}', to: '\u{317b}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{317c}', to: '\u{317c}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{317d}', to: '\u{317d}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{317e}', to: '\u{317e}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{317f}', to: '\u{317f}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{3180}', to: '\u{3180}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{3181}', to: '\u{3181}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{3182}', to: '\u{3182}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{3183}', to: '\u{3183}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{3184}', to: '\u{3184}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{3185}', to: '\u{3185}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{3186}', to: '\u{3186}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{3187}', to: '\u{3187}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{3188}', to: '\u{3188}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{3189}', to: '\u{3189}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{318a}', to: '\u{318a}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{318b}', to: '\u{318b}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{318c}', to: '\u{318c}', mapping: Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{318d}', to: '\u{318d}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{318e}', to: '\u{318e}', mapping: Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{318f}', to: '\u{318f}', mapping: Disallowed }, ++ Range { from: '\u{3190}', to: '\u{3191}', mapping: Valid }, ++ Range { from: '\u{3192}', to: '\u{3192}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{3193}', to: '\u{3193}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{3194}', to: '\u{3194}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{3195}', to: '\u{3195}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{3196}', to: '\u{3196}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{3197}', to: '\u{3197}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{3198}', to: '\u{3198}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{3199}', to: '\u{3199}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{319a}', to: '\u{319a}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{319b}', to: '\u{319b}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{319c}', to: '\u{319c}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{319d}', to: '\u{319d}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{319e}', to: '\u{319e}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{319f}', to: '\u{319f}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{31a0}', to: '\u{31ba}', mapping: Valid }, ++ Range { from: '\u{31bb}', to: '\u{31bf}', mapping: Disallowed }, ++ Range { from: '\u{31c0}', to: '\u{31e3}', mapping: Valid }, ++ Range { from: '\u{31e4}', to: '\u{31ef}', mapping: Disallowed }, ++ Range { from: '\u{31f0}', to: '\u{31ff}', mapping: Valid }, ++ Range { from: '\u{3200}', to: '\u{3200}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3201}', to: '\u{3201}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3202}', to: '\u{3202}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3203}', to: '\u{3203}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3204}', to: '\u{3204}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3205}', to: '\u{3205}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3206}', to: '\u{3206}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3207}', to: '\u{3207}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3208}', to: '\u{3208}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3209}', to: '\u{3209}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{320a}', to: '\u{320a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{320b}', to: '\u{320b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{320c}', to: '\u{320c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{320d}', to: '\u{320d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{320e}', to: '\u{320e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{320f}', to: '\u{320f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3210}', to: '\u{3210}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3211}', to: '\u{3211}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3212}', to: '\u{3212}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3213}', to: '\u{3213}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3214}', to: '\u{3214}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3215}', to: '\u{3215}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3216}', to: '\u{3216}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3217}', to: '\u{3217}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3218}', to: '\u{3218}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3219}', to: '\u{3219}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{321a}', to: '\u{321a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{321b}', to: '\u{321b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{321c}', to: '\u{321c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{321d}', to: '\u{321d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 15, byte_len: 8 }) }, ++ Range { from: '\u{321e}', to: '\u{321e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 15, byte_len: 8 }) }, ++ Range { from: '\u{321f}', to: '\u{321f}', mapping: Disallowed }, ++ Range { from: '\u{3220}', to: '\u{3220}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3221}', to: '\u{3221}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 15, byte_len: 5 }) }, ++ Range { from: '\u{3222}', to: '\u{3222}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3223}', to: '\u{3223}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3224}', to: '\u{3224}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3225}', to: '\u{3225}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3226}', to: '\u{3226}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3227}', to: '\u{3227}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3228}', to: '\u{3228}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3229}', to: '\u{3229}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{322a}', to: '\u{322a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{322b}', to: '\u{322b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{322c}', to: '\u{322c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{322d}', to: '\u{322d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{322e}', to: '\u{322e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{322f}', to: '\u{322f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3230}', to: '\u{3230}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3231}', to: '\u{3231}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3232}', to: '\u{3232}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3233}', to: '\u{3233}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3234}', to: '\u{3234}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3235}', to: '\u{3235}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3236}', to: '\u{3236}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3237}', to: '\u{3237}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3238}', to: '\u{3238}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3239}', to: '\u{3239}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{323a}', to: '\u{323a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{323b}', to: '\u{323b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{323c}', to: '\u{323c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{323d}', to: '\u{323d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{323e}', to: '\u{323e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{323f}', to: '\u{323f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3240}', to: '\u{3240}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3241}', to: '\u{3241}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3242}', to: '\u{3242}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3243}', to: '\u{3243}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 16, byte_len: 5 }) }, ++ Range { from: '\u{3244}', to: '\u{3244}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 16, byte_len: 3 }) }, ++ Range { from: '\u{3245}', to: '\u{3245}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 16, byte_len: 3 }) }, ++ Range { from: '\u{3246}', to: '\u{3246}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{3247}', to: '\u{3247}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 16, byte_len: 3 }) }, ++ Range { from: '\u{3248}', to: '\u{324f}', mapping: Valid }, ++ Range { from: '\u{3250}', to: '\u{3250}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 16, byte_len: 3 }) }, ++ Range { from: '\u{3251}', to: '\u{3251}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 16, byte_len: 2 }) }, ++ Range { from: '\u{3252}', to: '\u{3252}', mapping: Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 16, byte_len: 2 }) }, ++ Range { from: '\u{3253}', to: '\u{3253}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 16, byte_len: 2 }) }, ++ Range { from: '\u{3254}', to: '\u{3254}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 16, byte_len: 2 }) }, ++ Range { from: '\u{3255}', to: '\u{3255}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 16, byte_len: 2 }) }, ++ Range { from: '\u{3256}', to: '\u{3256}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 16, byte_len: 2 }) }, ++ Range { from: '\u{3257}', to: '\u{3257}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 16, byte_len: 2 }) }, ++ Range { from: '\u{3258}', to: '\u{3258}', mapping: Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 16, byte_len: 2 }) }, ++ Range { from: '\u{3259}', to: '\u{3259}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 16, byte_len: 2 }) }, ++ Range { from: '\u{325a}', to: '\u{325a}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 16, byte_len: 2 }) }, ++ Range { from: '\u{325b}', to: '\u{325b}', mapping: Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 16, byte_len: 2 }) }, ++ Range { from: '\u{325c}', to: '\u{325c}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 16, byte_len: 2 }) }, ++ Range { from: '\u{325d}', to: '\u{325d}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 16, byte_len: 2 }) }, ++ Range { from: '\u{325e}', to: '\u{325e}', mapping: Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 16, byte_len: 2 }) }, ++ Range { from: '\u{325f}', to: '\u{325f}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 16, byte_len: 2 }) }, ++ Range { from: '\u{3260}', to: '\u{3260}', mapping: Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3261}', to: '\u{3261}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3262}', to: '\u{3262}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3263}', to: '\u{3263}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3264}', to: '\u{3264}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3265}', to: '\u{3265}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3266}', to: '\u{3266}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3267}', to: '\u{3267}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3268}', to: '\u{3268}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{3269}', to: '\u{3269}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{326a}', to: '\u{326a}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{326b}', to: '\u{326b}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{326c}', to: '\u{326c}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{326d}', to: '\u{326d}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{326e}', to: '\u{326e}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 16, byte_len: 3 }) }, ++ Range { from: '\u{326f}', to: '\u{326f}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 16, byte_len: 3 }) }, ++ Range { from: '\u{3270}', to: '\u{3270}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 16, byte_len: 3 }) }, ++ Range { from: '\u{3271}', to: '\u{3271}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 16, byte_len: 3 }) }, ++ Range { from: '\u{3272}', to: '\u{3272}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 16, byte_len: 3 }) }, ++ Range { from: '\u{3273}', to: '\u{3273}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 16, byte_len: 3 }) }, ++ Range { from: '\u{3274}', to: '\u{3274}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 16, byte_len: 3 }) }, ++ Range { from: '\u{3275}', to: '\u{3275}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 16, byte_len: 3 }) }, ++ Range { from: '\u{3276}', to: '\u{3276}', mapping: Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 16, byte_len: 3 }) }, ++ Range { from: '\u{3277}', to: '\u{3277}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 16, byte_len: 3 }) }, ++ Range { from: '\u{3278}', to: '\u{3278}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 16, byte_len: 3 }) }, ++ Range { from: '\u{3279}', to: '\u{3279}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 16, byte_len: 3 }) }, ++ Range { from: '\u{327a}', to: '\u{327a}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 16, byte_len: 3 }) }, ++ Range { from: '\u{327b}', to: '\u{327b}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 16, byte_len: 3 }) }, ++ Range { from: '\u{327c}', to: '\u{327c}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 17, byte_len: 6 }) }, ++ Range { from: '\u{327d}', to: '\u{327d}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 17, byte_len: 6 }) }, ++ Range { from: '\u{327e}', to: '\u{327e}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{327f}', to: '\u{327f}', mapping: Valid }, ++ Range { from: '\u{3280}', to: '\u{3280}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{3281}', to: '\u{3281}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{3282}', to: '\u{3282}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{3283}', to: '\u{3283}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{3284}', to: '\u{3284}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{3285}', to: '\u{3285}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{3286}', to: '\u{3286}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{3287}', to: '\u{3287}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{3288}', to: '\u{3288}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{3289}', to: '\u{3289}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{328a}', to: '\u{328a}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{328b}', to: '\u{328b}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{328c}', to: '\u{328c}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{328d}', to: '\u{328d}', mapping: Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{328e}', to: '\u{328e}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{328f}', to: '\u{328f}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{3290}', to: '\u{3290}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{3291}', to: '\u{3291}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{3292}', to: '\u{3292}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{3293}', to: '\u{3293}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{3294}', to: '\u{3294}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{3295}', to: '\u{3295}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{3296}', to: '\u{3296}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{3297}', to: '\u{3297}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{3298}', to: '\u{3298}', mapping: Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{3299}', to: '\u{3299}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{329a}', to: '\u{329a}', mapping: Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{329b}', to: '\u{329b}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{329c}', to: '\u{329c}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{329d}', to: '\u{329d}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{329e}', to: '\u{329e}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{329f}', to: '\u{329f}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32a0}', to: '\u{32a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32a1}', to: '\u{32a1}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32a2}', to: '\u{32a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32a3}', to: '\u{32a3}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32a4}', to: '\u{32a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{32a5}', to: '\u{32a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{32a6}', to: '\u{32a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{32a7}', to: '\u{32a7}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32a8}', to: '\u{32a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32a9}', to: '\u{32a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32aa}', to: '\u{32aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32ab}', to: '\u{32ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32ac}', to: '\u{32ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32ad}', to: '\u{32ad}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32ae}', to: '\u{32ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32af}', to: '\u{32af}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32b0}', to: '\u{32b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32b1}', to: '\u{32b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 17, byte_len: 2 }) }, ++ Range { from: '\u{32b2}', to: '\u{32b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 17, byte_len: 2 }) }, ++ Range { from: '\u{32b3}', to: '\u{32b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 17, byte_len: 2 }) }, ++ Range { from: '\u{32b4}', to: '\u{32b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 17, byte_len: 2 }) }, ++ Range { from: '\u{32b5}', to: '\u{32b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 17, byte_len: 2 }) }, ++ Range { from: '\u{32b6}', to: '\u{32b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 17, byte_len: 2 }) }, ++ Range { from: '\u{32b7}', to: '\u{32b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 17, byte_len: 2 }) }, ++ Range { from: '\u{32b8}', to: '\u{32b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 17, byte_len: 2 }) }, ++ Range { from: '\u{32b9}', to: '\u{32b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 17, byte_len: 2 }) }, ++ Range { from: '\u{32ba}', to: '\u{32ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 17, byte_len: 2 }) }, ++ Range { from: '\u{32bb}', to: '\u{32bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 17, byte_len: 2 }) }, ++ Range { from: '\u{32bc}', to: '\u{32bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 17, byte_len: 2 }) }, ++ Range { from: '\u{32bd}', to: '\u{32bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 17, byte_len: 2 }) }, ++ Range { from: '\u{32be}', to: '\u{32be}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 17, byte_len: 2 }) }, ++ Range { from: '\u{32bf}', to: '\u{32bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 17, byte_len: 2 }) }, ++ Range { from: '\u{32c0}', to: '\u{32c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 17, byte_len: 4 }) }, ++ Range { from: '\u{32c1}', to: '\u{32c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 17, byte_len: 4 }) }, ++ Range { from: '\u{32c2}', to: '\u{32c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 17, byte_len: 4 }) }, ++ Range { from: '\u{32c3}', to: '\u{32c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 17, byte_len: 4 }) }, ++ Range { from: '\u{32c4}', to: '\u{32c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 17, byte_len: 4 }) }, ++ Range { from: '\u{32c5}', to: '\u{32c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 17, byte_len: 4 }) }, ++ Range { from: '\u{32c6}', to: '\u{32c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 17, byte_len: 4 }) }, ++ Range { from: '\u{32c7}', to: '\u{32c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 17, byte_len: 4 }) }, ++ Range { from: '\u{32c8}', to: '\u{32c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 17, byte_len: 4 }) }, ++ Range { from: '\u{32c9}', to: '\u{32c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 17, byte_len: 5 }) }, ++ Range { from: '\u{32ca}', to: '\u{32ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 17, byte_len: 5 }) }, ++ Range { from: '\u{32cb}', to: '\u{32cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 17, byte_len: 5 }) }, ++ Range { from: '\u{32cc}', to: '\u{32cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 17, byte_len: 2 }) }, ++ Range { from: '\u{32cd}', to: '\u{32cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32ce}', to: '\u{32ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 17, byte_len: 2 }) }, ++ Range { from: '\u{32cf}', to: '\u{32cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32d0}', to: '\u{32d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32d1}', to: '\u{32d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32d2}', to: '\u{32d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32d3}', to: '\u{32d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32d4}', to: '\u{32d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32d5}', to: '\u{32d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32d6}', to: '\u{32d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32d7}', to: '\u{32d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32d8}', to: '\u{32d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32d9}', to: '\u{32d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32da}', to: '\u{32da}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32db}', to: '\u{32db}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32dc}', to: '\u{32dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32dd}', to: '\u{32dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32de}', to: '\u{32de}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32df}', to: '\u{32df}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32e0}', to: '\u{32e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32e1}', to: '\u{32e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{32e2}', to: '\u{32e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32e3}', to: '\u{32e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32e4}', to: '\u{32e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32e5}', to: '\u{32e5}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32e6}', to: '\u{32e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32e7}', to: '\u{32e7}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32e8}', to: '\u{32e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32e9}', to: '\u{32e9}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32ea}', to: '\u{32ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32eb}', to: '\u{32eb}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32ec}', to: '\u{32ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32ed}', to: '\u{32ed}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32ee}', to: '\u{32ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32ef}', to: '\u{32ef}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32f0}', to: '\u{32f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32f1}', to: '\u{32f1}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32f2}', to: '\u{32f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32f3}', to: '\u{32f3}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32f4}', to: '\u{32f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32f5}', to: '\u{32f5}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32f6}', to: '\u{32f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32f7}', to: '\u{32f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32f8}', to: '\u{32f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32f9}', to: '\u{32f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32fa}', to: '\u{32fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32fb}', to: '\u{32fb}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32fc}', to: '\u{32fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32fd}', to: '\u{32fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32fe}', to: '\u{32fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{32ff}', to: '\u{32ff}', mapping: Disallowed }, ++ Range { from: '\u{3300}', to: '\u{3300}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 18, byte_len: 12 }) }, ++ Range { from: '\u{3301}', to: '\u{3301}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 18, byte_len: 12 }) }, ++ Range { from: '\u{3302}', to: '\u{3302}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 18, byte_len: 12 }) }, ++ Range { from: '\u{3303}', to: '\u{3303}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 18, byte_len: 9 }) }, ++ Range { from: '\u{3304}', to: '\u{3304}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 18, byte_len: 12 }) }, ++ Range { from: '\u{3305}', to: '\u{3305}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 18, byte_len: 9 }) }, ++ Range { from: '\u{3306}', to: '\u{3306}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 18, byte_len: 9 }) }, ++ Range { from: '\u{3307}', to: '\u{3307}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 18, byte_len: 15 }) }, ++ Range { from: '\u{3308}', to: '\u{3308}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 18, byte_len: 12 }) }, ++ Range { from: '\u{3309}', to: '\u{3309}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 18, byte_len: 9 }) }, ++ Range { from: '\u{330a}', to: '\u{330a}', mapping: Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 18, byte_len: 9 }) }, ++ Range { from: '\u{330b}', to: '\u{330b}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 18, byte_len: 9 }) }, ++ Range { from: '\u{330c}', to: '\u{330c}', mapping: Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 18, byte_len: 12 }) }, ++ Range { from: '\u{330d}', to: '\u{330d}', mapping: Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 18, byte_len: 12 }) }, ++ Range { from: '\u{330e}', to: '\u{330e}', mapping: Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 18, byte_len: 9 }) }, ++ Range { from: '\u{330f}', to: '\u{330f}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 18, byte_len: 9 }) }, ++ Range { from: '\u{3310}', to: '\u{3310}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 19, byte_len: 6 }) }, ++ Range { from: '\u{3311}', to: '\u{3311}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 19, byte_len: 9 }) }, ++ Range { from: '\u{3312}', to: '\u{3312}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 19, byte_len: 12 }) }, ++ Range { from: '\u{3313}', to: '\u{3313}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 19, byte_len: 12 }) }, ++ Range { from: '\u{3314}', to: '\u{3314}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 19, byte_len: 6 }) }, ++ Range { from: '\u{3315}', to: '\u{3315}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 19, byte_len: 15 }) }, ++ Range { from: '\u{3316}', to: '\u{3316}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 19, byte_len: 18 }) }, ++ Range { from: '\u{3317}', to: '\u{3317}', mapping: Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 19, byte_len: 15 }) }, ++ Range { from: '\u{3318}', to: '\u{3318}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 19, byte_len: 9 }) }, ++ Range { from: '\u{3319}', to: '\u{3319}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 19, byte_len: 15 }) }, ++ Range { from: '\u{331a}', to: '\u{331a}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 19, byte_len: 15 }) }, ++ Range { from: '\u{331b}', to: '\u{331b}', mapping: Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 19, byte_len: 12 }) }, ++ Range { from: '\u{331c}', to: '\u{331c}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 19, byte_len: 9 }) }, ++ Range { from: '\u{331d}', to: '\u{331d}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 19, byte_len: 9 }) }, ++ Range { from: '\u{331e}', to: '\u{331e}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 19, byte_len: 9 }) }, ++ Range { from: '\u{331f}', to: '\u{331f}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 19, byte_len: 12 }) }, ++ Range { from: '\u{3320}', to: '\u{3320}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 19, byte_len: 15 }) }, ++ Range { from: '\u{3321}', to: '\u{3321}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 19, byte_len: 12 }) }, ++ Range { from: '\u{3322}', to: '\u{3322}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 19, byte_len: 9 }) }, ++ Range { from: '\u{3323}', to: '\u{3323}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 19, byte_len: 9 }) }, ++ Range { from: '\u{3324}', to: '\u{3324}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 19, byte_len: 9 }) }, ++ Range { from: '\u{3325}', to: '\u{3325}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 19, byte_len: 6 }) }, ++ Range { from: '\u{3326}', to: '\u{3326}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 19, byte_len: 6 }) }, ++ Range { from: '\u{3327}', to: '\u{3327}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 19, byte_len: 6 }) }, ++ Range { from: '\u{3328}', to: '\u{3328}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 20, byte_len: 6 }) }, ++ Range { from: '\u{3329}', to: '\u{3329}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 20, byte_len: 9 }) }, ++ Range { from: '\u{332a}', to: '\u{332a}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 20, byte_len: 9 }) }, ++ Range { from: '\u{332b}', to: '\u{332b}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 20, byte_len: 15 }) }, ++ Range { from: '\u{332c}', to: '\u{332c}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 20, byte_len: 9 }) }, ++ Range { from: '\u{332d}', to: '\u{332d}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 20, byte_len: 12 }) }, ++ Range { from: '\u{332e}', to: '\u{332e}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 20, byte_len: 15 }) }, ++ Range { from: '\u{332f}', to: '\u{332f}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 20, byte_len: 9 }) }, ++ Range { from: '\u{3330}', to: '\u{3330}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 20, byte_len: 6 }) }, ++ Range { from: '\u{3331}', to: '\u{3331}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 20, byte_len: 6 }) }, ++ Range { from: '\u{3332}', to: '\u{3332}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 20, byte_len: 15 }) }, ++ Range { from: '\u{3333}', to: '\u{3333}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 20, byte_len: 12 }) }, ++ Range { from: '\u{3334}', to: '\u{3334}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 20, byte_len: 15 }) }, ++ Range { from: '\u{3335}', to: '\u{3335}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 20, byte_len: 9 }) }, ++ Range { from: '\u{3336}', to: '\u{3336}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 20, byte_len: 15 }) }, ++ Range { from: '\u{3337}', to: '\u{3337}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 20, byte_len: 6 }) }, ++ Range { from: '\u{3338}', to: '\u{3338}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 20, byte_len: 9 }) }, ++ Range { from: '\u{3339}', to: '\u{3339}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 20, byte_len: 9 }) }, ++ Range { from: '\u{333a}', to: '\u{333a}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 20, byte_len: 9 }) }, ++ Range { from: '\u{333b}', to: '\u{333b}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 20, byte_len: 9 }) }, ++ Range { from: '\u{333c}', to: '\u{333c}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 20, byte_len: 9 }) }, ++ Range { from: '\u{333d}', to: '\u{333d}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 20, byte_len: 12 }) }, ++ Range { from: '\u{333e}', to: '\u{333e}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 20, byte_len: 9 }) }, ++ Range { from: '\u{333f}', to: '\u{333f}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 20, byte_len: 6 }) }, ++ Range { from: '\u{3340}', to: '\u{3340}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 20, byte_len: 9 }) }, ++ Range { from: '\u{3341}', to: '\u{3341}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 20, byte_len: 9 }) }, ++ Range { from: '\u{3342}', to: '\u{3342}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 21, byte_len: 9 }) }, ++ Range { from: '\u{3343}', to: '\u{3343}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 21, byte_len: 12 }) }, ++ Range { from: '\u{3344}', to: '\u{3344}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 21, byte_len: 9 }) }, ++ Range { from: '\u{3345}', to: '\u{3345}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 21, byte_len: 9 }) }, ++ Range { from: '\u{3346}', to: '\u{3346}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 21, byte_len: 9 }) }, ++ Range { from: '\u{3347}', to: '\u{3347}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 21, byte_len: 15 }) }, ++ Range { from: '\u{3348}', to: '\u{3348}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 21, byte_len: 12 }) }, ++ Range { from: '\u{3349}', to: '\u{3349}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 21, byte_len: 6 }) }, ++ Range { from: '\u{334a}', to: '\u{334a}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 21, byte_len: 15 }) }, ++ Range { from: '\u{334b}', to: '\u{334b}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 21, byte_len: 6 }) }, ++ Range { from: '\u{334c}', to: '\u{334c}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 21, byte_len: 12 }) }, ++ Range { from: '\u{334d}', to: '\u{334d}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 21, byte_len: 12 }) }, ++ Range { from: '\u{334e}', to: '\u{334e}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 21, byte_len: 9 }) }, ++ Range { from: '\u{334f}', to: '\u{334f}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 21, byte_len: 9 }) }, ++ Range { from: '\u{3350}', to: '\u{3350}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 21, byte_len: 9 }) }, ++ Range { from: '\u{3351}', to: '\u{3351}', mapping: Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 21, byte_len: 12 }) }, ++ Range { from: '\u{3352}', to: '\u{3352}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 21, byte_len: 6 }) }, ++ Range { from: '\u{3353}', to: '\u{3353}', mapping: Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 21, byte_len: 9 }) }, ++ Range { from: '\u{3354}', to: '\u{3354}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 21, byte_len: 12 }) }, ++ Range { from: '\u{3355}', to: '\u{3355}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 21, byte_len: 6 }) }, ++ Range { from: '\u{3356}', to: '\u{3356}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 21, byte_len: 15 }) }, ++ Range { from: '\u{3357}', to: '\u{3357}', mapping: Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 21, byte_len: 9 }) }, ++ Range { from: '\u{3358}', to: '\u{3358}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 21, byte_len: 4 }) }, ++ Range { from: '\u{3359}', to: '\u{3359}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 21, byte_len: 4 }) }, ++ Range { from: '\u{335a}', to: '\u{335a}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 21, byte_len: 4 }) }, ++ Range { from: '\u{335b}', to: '\u{335b}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 21, byte_len: 4 }) }, ++ Range { from: '\u{335c}', to: '\u{335c}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 21, byte_len: 4 }) }, ++ Range { from: '\u{335d}', to: '\u{335d}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 21, byte_len: 4 }) }, ++ Range { from: '\u{335e}', to: '\u{335e}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 21, byte_len: 4 }) }, ++ Range { from: '\u{335f}', to: '\u{335f}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 21, byte_len: 4 }) }, ++ Range { from: '\u{3360}', to: '\u{3360}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 22, byte_len: 4 }) }, ++ Range { from: '\u{3361}', to: '\u{3361}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 22, byte_len: 4 }) }, ++ Range { from: '\u{3362}', to: '\u{3362}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 22, byte_len: 5 }) }, ++ Range { from: '\u{3363}', to: '\u{3363}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 22, byte_len: 5 }) }, ++ Range { from: '\u{3364}', to: '\u{3364}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 22, byte_len: 5 }) }, ++ Range { from: '\u{3365}', to: '\u{3365}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 22, byte_len: 5 }) }, ++ Range { from: '\u{3366}', to: '\u{3366}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 22, byte_len: 5 }) }, ++ Range { from: '\u{3367}', to: '\u{3367}', mapping: Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 22, byte_len: 5 }) }, ++ Range { from: '\u{3368}', to: '\u{3368}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 22, byte_len: 5 }) }, ++ Range { from: '\u{3369}', to: '\u{3369}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 22, byte_len: 5 }) }, ++ Range { from: '\u{336a}', to: '\u{336a}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 22, byte_len: 5 }) }, ++ Range { from: '\u{336b}', to: '\u{336b}', mapping: Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 22, byte_len: 5 }) }, ++ Range { from: '\u{336c}', to: '\u{336c}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 22, byte_len: 5 }) }, ++ Range { from: '\u{336d}', to: '\u{336d}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 22, byte_len: 5 }) }, ++ Range { from: '\u{336e}', to: '\u{336e}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 22, byte_len: 5 }) }, ++ Range { from: '\u{336f}', to: '\u{336f}', mapping: Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 22, byte_len: 5 }) }, ++ Range { from: '\u{3370}', to: '\u{3370}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 22, byte_len: 5 }) }, ++ Range { from: '\u{3371}', to: '\u{3371}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{3372}', to: '\u{3372}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{3373}', to: '\u{3373}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{3374}', to: '\u{3374}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{3375}', to: '\u{3375}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{3376}', to: '\u{3376}', mapping: Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{3377}', to: '\u{3377}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{3378}', to: '\u{3378}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{3379}', to: '\u{3379}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{337a}', to: '\u{337a}', mapping: Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{337b}', to: '\u{337b}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 22, byte_len: 6 }) }, ++ Range { from: '\u{337c}', to: '\u{337c}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 22, byte_len: 6 }) }, ++ Range { from: '\u{337d}', to: '\u{337d}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 22, byte_len: 6 }) }, ++ Range { from: '\u{337e}', to: '\u{337e}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 22, byte_len: 6 }) }, ++ Range { from: '\u{337f}', to: '\u{337f}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 22, byte_len: 12 }) }, ++ Range { from: '\u{3380}', to: '\u{3380}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{3381}', to: '\u{3381}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{3382}', to: '\u{3382}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{3383}', to: '\u{3383}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{3384}', to: '\u{3384}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{3385}', to: '\u{3385}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{3386}', to: '\u{3386}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{3387}', to: '\u{3387}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{3388}', to: '\u{3388}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{3389}', to: '\u{3389}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 22, byte_len: 4 }) }, ++ Range { from: '\u{338a}', to: '\u{338a}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{338b}', to: '\u{338b}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{338c}', to: '\u{338c}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{338d}', to: '\u{338d}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{338e}', to: '\u{338e}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{338f}', to: '\u{338f}', mapping: Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{3390}', to: '\u{3390}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{3391}', to: '\u{3391}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{3392}', to: '\u{3392}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{3393}', to: '\u{3393}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{3394}', to: '\u{3394}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{3395}', to: '\u{3395}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{3396}', to: '\u{3396}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{3397}', to: '\u{3397}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{3398}', to: '\u{3398}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{3399}', to: '\u{3399}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{339a}', to: '\u{339a}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{339b}', to: '\u{339b}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{339c}', to: '\u{339c}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{339d}', to: '\u{339d}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{339e}', to: '\u{339e}', mapping: Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{339f}', to: '\u{339f}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{33a0}', to: '\u{33a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{33a1}', to: '\u{33a1}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{33a2}', to: '\u{33a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{33a3}', to: '\u{33a3}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{33a4}', to: '\u{33a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{33a5}', to: '\u{33a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{33a6}', to: '\u{33a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{33a7}', to: '\u{33a7}', mapping: Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 22, byte_len: 5 }) }, ++ Range { from: '\u{33a8}', to: '\u{33a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 22, byte_len: 6 }) }, ++ Range { from: '\u{33a9}', to: '\u{33a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{33aa}', to: '\u{33aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{33ab}', to: '\u{33ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 22, byte_len: 3 }) }, ++ Range { from: '\u{33ac}', to: '\u{33ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 23, byte_len: 3 }) }, ++ Range { from: '\u{33ad}', to: '\u{33ad}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 23, byte_len: 3 }) }, ++ Range { from: '\u{33ae}', to: '\u{33ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 23, byte_len: 7 }) }, ++ Range { from: '\u{33af}', to: '\u{33af}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 23, byte_len: 8 }) }, ++ Range { from: '\u{33b0}', to: '\u{33b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33b1}', to: '\u{33b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33b2}', to: '\u{33b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 23, byte_len: 3 }) }, ++ Range { from: '\u{33b3}', to: '\u{33b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33b4}', to: '\u{33b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33b5}', to: '\u{33b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33b6}', to: '\u{33b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 23, byte_len: 3 }) }, ++ Range { from: '\u{33b7}', to: '\u{33b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33b8}', to: '\u{33b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33b9}', to: '\u{33b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33ba}', to: '\u{33ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33bb}', to: '\u{33bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33bc}', to: '\u{33bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 23, byte_len: 3 }) }, ++ Range { from: '\u{33bd}', to: '\u{33bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33be}', to: '\u{33be}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33bf}', to: '\u{33bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33c0}', to: '\u{33c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 23, byte_len: 3 }) }, ++ Range { from: '\u{33c1}', to: '\u{33c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 23, byte_len: 3 }) }, ++ Range { from: '\u{33c2}', to: '\u{33c2}', mapping: Disallowed }, ++ Range { from: '\u{33c3}', to: '\u{33c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33c4}', to: '\u{33c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33c5}', to: '\u{33c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33c6}', to: '\u{33c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 23, byte_len: 6 }) }, ++ Range { from: '\u{33c7}', to: '\u{33c7}', mapping: Disallowed }, ++ Range { from: '\u{33c8}', to: '\u{33c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33c9}', to: '\u{33c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33ca}', to: '\u{33ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33cb}', to: '\u{33cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33cc}', to: '\u{33cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33cd}', to: '\u{33cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33ce}', to: '\u{33ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{33cf}', to: '\u{33cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33d0}', to: '\u{33d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33d1}', to: '\u{33d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33d2}', to: '\u{33d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 23, byte_len: 3 }) }, ++ Range { from: '\u{33d3}', to: '\u{33d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33d4}', to: '\u{33d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 22, byte_len: 2 }) }, ++ Range { from: '\u{33d5}', to: '\u{33d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 23, byte_len: 3 }) }, ++ Range { from: '\u{33d6}', to: '\u{33d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 23, byte_len: 3 }) }, ++ Range { from: '\u{33d7}', to: '\u{33d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33d8}', to: '\u{33d8}', mapping: Disallowed }, ++ Range { from: '\u{33d9}', to: '\u{33d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 23, byte_len: 3 }) }, ++ Range { from: '\u{33da}', to: '\u{33da}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33db}', to: '\u{33db}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33dc}', to: '\u{33dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33dd}', to: '\u{33dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{33de}', to: '\u{33de}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33df}', to: '\u{33df}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33e0}', to: '\u{33e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 23, byte_len: 4 }) }, ++ Range { from: '\u{33e1}', to: '\u{33e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 23, byte_len: 4 }) }, ++ Range { from: '\u{33e2}', to: '\u{33e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 23, byte_len: 4 }) }, ++ Range { from: '\u{33e3}', to: '\u{33e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 23, byte_len: 4 }) }, ++ Range { from: '\u{33e4}', to: '\u{33e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 23, byte_len: 4 }) }, ++ Range { from: '\u{33e5}', to: '\u{33e5}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 23, byte_len: 4 }) }, ++ Range { from: '\u{33e6}', to: '\u{33e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 23, byte_len: 4 }) }, ++ Range { from: '\u{33e7}', to: '\u{33e7}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 23, byte_len: 4 }) }, ++ Range { from: '\u{33e8}', to: '\u{33e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 23, byte_len: 4 }) }, ++ Range { from: '\u{33e9}', to: '\u{33e9}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33ea}', to: '\u{33ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33eb}', to: '\u{33eb}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33ec}', to: '\u{33ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33ed}', to: '\u{33ed}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33ee}', to: '\u{33ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33ef}', to: '\u{33ef}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33f0}', to: '\u{33f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33f1}', to: '\u{33f1}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33f2}', to: '\u{33f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33f3}', to: '\u{33f3}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33f4}', to: '\u{33f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33f5}', to: '\u{33f5}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33f6}', to: '\u{33f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33f7}', to: '\u{33f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33f8}', to: '\u{33f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33f9}', to: '\u{33f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33fa}', to: '\u{33fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33fb}', to: '\u{33fb}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33fc}', to: '\u{33fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 23, byte_len: 5 }) }, ++ Range { from: '\u{33fd}', to: '\u{33fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 24, byte_len: 5 }) }, ++ Range { from: '\u{33fe}', to: '\u{33fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 24, byte_len: 5 }) }, ++ Range { from: '\u{33ff}', to: '\u{33ff}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{3400}', to: '\u{4db5}', mapping: Valid }, ++ Range { from: '\u{4db6}', to: '\u{4dbf}', mapping: Disallowed }, ++ Range { from: '\u{4dc0}', to: '\u{9fea}', mapping: Valid }, ++ Range { from: '\u{9feb}', to: '\u{9fff}', mapping: Disallowed }, ++ Range { from: '\u{a000}', to: '\u{a48c}', mapping: Valid }, ++ Range { from: '\u{a48d}', to: '\u{a48f}', mapping: Disallowed }, ++ Range { from: '\u{a490}', to: '\u{a4c6}', mapping: Valid }, ++ Range { from: '\u{a4c7}', to: '\u{a4cf}', mapping: Disallowed }, ++ Range { from: '\u{a4d0}', to: '\u{a62b}', mapping: Valid }, ++ Range { from: '\u{a62c}', to: '\u{a63f}', mapping: Disallowed }, ++ Range { from: '\u{a640}', to: '\u{a640}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a641}', to: '\u{a641}', mapping: Valid }, ++ Range { from: '\u{a642}', to: '\u{a642}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a643}', to: '\u{a643}', mapping: Valid }, ++ Range { from: '\u{a644}', to: '\u{a644}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a645}', to: '\u{a645}', mapping: Valid }, ++ Range { from: '\u{a646}', to: '\u{a646}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a647}', to: '\u{a647}', mapping: Valid }, ++ Range { from: '\u{a648}', to: '\u{a648}', mapping: Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a649}', to: '\u{a649}', mapping: Valid }, ++ Range { from: '\u{a64a}', to: '\u{a64a}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 5, byte_len: 3 }) }, ++ Range { from: '\u{a64b}', to: '\u{a64b}', mapping: Valid }, ++ Range { from: '\u{a64c}', to: '\u{a64c}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a64d}', to: '\u{a64d}', mapping: Valid }, ++ Range { from: '\u{a64e}', to: '\u{a64e}', mapping: Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a64f}', to: '\u{a64f}', mapping: Valid }, ++ Range { from: '\u{a650}', to: '\u{a650}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a651}', to: '\u{a651}', mapping: Valid }, ++ Range { from: '\u{a652}', to: '\u{a652}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a653}', to: '\u{a653}', mapping: Valid }, ++ Range { from: '\u{a654}', to: '\u{a654}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a655}', to: '\u{a655}', mapping: Valid }, ++ Range { from: '\u{a656}', to: '\u{a656}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a657}', to: '\u{a657}', mapping: Valid }, ++ Range { from: '\u{a658}', to: '\u{a658}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a659}', to: '\u{a659}', mapping: Valid }, ++ Range { from: '\u{a65a}', to: '\u{a65a}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a65b}', to: '\u{a65b}', mapping: Valid }, ++ Range { from: '\u{a65c}', to: '\u{a65c}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a65d}', to: '\u{a65d}', mapping: Valid }, ++ Range { from: '\u{a65e}', to: '\u{a65e}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a65f}', to: '\u{a65f}', mapping: Valid }, ++ Range { from: '\u{a660}', to: '\u{a660}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a661}', to: '\u{a661}', mapping: Valid }, ++ Range { from: '\u{a662}', to: '\u{a662}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a663}', to: '\u{a663}', mapping: Valid }, ++ Range { from: '\u{a664}', to: '\u{a664}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a665}', to: '\u{a665}', mapping: Valid }, ++ Range { from: '\u{a666}', to: '\u{a666}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a667}', to: '\u{a667}', mapping: Valid }, ++ Range { from: '\u{a668}', to: '\u{a668}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a669}', to: '\u{a669}', mapping: Valid }, ++ Range { from: '\u{a66a}', to: '\u{a66a}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a66b}', to: '\u{a66b}', mapping: Valid }, ++ Range { from: '\u{a66c}', to: '\u{a66c}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a66d}', to: '\u{a67f}', mapping: Valid }, ++ Range { from: '\u{a680}', to: '\u{a680}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a681}', to: '\u{a681}', mapping: Valid }, ++ Range { from: '\u{a682}', to: '\u{a682}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a683}', to: '\u{a683}', mapping: Valid }, ++ Range { from: '\u{a684}', to: '\u{a684}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a685}', to: '\u{a685}', mapping: Valid }, ++ Range { from: '\u{a686}', to: '\u{a686}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a687}', to: '\u{a687}', mapping: Valid }, ++ Range { from: '\u{a688}', to: '\u{a688}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a689}', to: '\u{a689}', mapping: Valid }, ++ Range { from: '\u{a68a}', to: '\u{a68a}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a68b}', to: '\u{a68b}', mapping: Valid }, ++ Range { from: '\u{a68c}', to: '\u{a68c}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a68d}', to: '\u{a68d}', mapping: Valid }, ++ Range { from: '\u{a68e}', to: '\u{a68e}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a68f}', to: '\u{a68f}', mapping: Valid }, ++ Range { from: '\u{a690}', to: '\u{a690}', mapping: Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a691}', to: '\u{a691}', mapping: Valid }, ++ Range { from: '\u{a692}', to: '\u{a692}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a693}', to: '\u{a693}', mapping: Valid }, ++ Range { from: '\u{a694}', to: '\u{a694}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a695}', to: '\u{a695}', mapping: Valid }, ++ Range { from: '\u{a696}', to: '\u{a696}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a697}', to: '\u{a697}', mapping: Valid }, ++ Range { from: '\u{a698}', to: '\u{a698}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a699}', to: '\u{a699}', mapping: Valid }, ++ Range { from: '\u{a69a}', to: '\u{a69a}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a69b}', to: '\u{a69b}', mapping: Valid }, ++ Range { from: '\u{a69c}', to: '\u{a69c}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{a69d}', to: '\u{a69d}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{a69e}', to: '\u{a6f7}', mapping: Valid }, ++ Range { from: '\u{a6f8}', to: '\u{a6ff}', mapping: Disallowed }, ++ Range { from: '\u{a700}', to: '\u{a721}', mapping: Valid }, ++ Range { from: '\u{a722}', to: '\u{a722}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a723}', to: '\u{a723}', mapping: Valid }, ++ Range { from: '\u{a724}', to: '\u{a724}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a725}', to: '\u{a725}', mapping: Valid }, ++ Range { from: '\u{a726}', to: '\u{a726}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a727}', to: '\u{a727}', mapping: Valid }, ++ Range { from: '\u{a728}', to: '\u{a728}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a729}', to: '\u{a729}', mapping: Valid }, ++ Range { from: '\u{a72a}', to: '\u{a72a}', mapping: Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a72b}', to: '\u{a72b}', mapping: Valid }, ++ Range { from: '\u{a72c}', to: '\u{a72c}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a72d}', to: '\u{a72d}', mapping: Valid }, ++ Range { from: '\u{a72e}', to: '\u{a72e}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a72f}', to: '\u{a731}', mapping: Valid }, ++ Range { from: '\u{a732}', to: '\u{a732}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a733}', to: '\u{a733}', mapping: Valid }, ++ Range { from: '\u{a734}', to: '\u{a734}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a735}', to: '\u{a735}', mapping: Valid }, ++ Range { from: '\u{a736}', to: '\u{a736}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a737}', to: '\u{a737}', mapping: Valid }, ++ Range { from: '\u{a738}', to: '\u{a738}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a739}', to: '\u{a739}', mapping: Valid }, ++ Range { from: '\u{a73a}', to: '\u{a73a}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a73b}', to: '\u{a73b}', mapping: Valid }, ++ Range { from: '\u{a73c}', to: '\u{a73c}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a73d}', to: '\u{a73d}', mapping: Valid }, ++ Range { from: '\u{a73e}', to: '\u{a73e}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a73f}', to: '\u{a73f}', mapping: Valid }, ++ Range { from: '\u{a740}', to: '\u{a740}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a741}', to: '\u{a741}', mapping: Valid }, ++ Range { from: '\u{a742}', to: '\u{a742}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a743}', to: '\u{a743}', mapping: Valid }, ++ Range { from: '\u{a744}', to: '\u{a744}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a745}', to: '\u{a745}', mapping: Valid }, ++ Range { from: '\u{a746}', to: '\u{a746}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a747}', to: '\u{a747}', mapping: Valid }, ++ Range { from: '\u{a748}', to: '\u{a748}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a749}', to: '\u{a749}', mapping: Valid }, ++ Range { from: '\u{a74a}', to: '\u{a74a}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a74b}', to: '\u{a74b}', mapping: Valid }, ++ Range { from: '\u{a74c}', to: '\u{a74c}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a74d}', to: '\u{a74d}', mapping: Valid }, ++ Range { from: '\u{a74e}', to: '\u{a74e}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a74f}', to: '\u{a74f}', mapping: Valid }, ++ Range { from: '\u{a750}', to: '\u{a750}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a751}', to: '\u{a751}', mapping: Valid }, ++ Range { from: '\u{a752}', to: '\u{a752}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a753}', to: '\u{a753}', mapping: Valid }, ++ Range { from: '\u{a754}', to: '\u{a754}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a755}', to: '\u{a755}', mapping: Valid }, ++ Range { from: '\u{a756}', to: '\u{a756}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a757}', to: '\u{a757}', mapping: Valid }, ++ Range { from: '\u{a758}', to: '\u{a758}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a759}', to: '\u{a759}', mapping: Valid }, ++ Range { from: '\u{a75a}', to: '\u{a75a}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a75b}', to: '\u{a75b}', mapping: Valid }, ++ Range { from: '\u{a75c}', to: '\u{a75c}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a75d}', to: '\u{a75d}', mapping: Valid }, ++ Range { from: '\u{a75e}', to: '\u{a75e}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a75f}', to: '\u{a75f}', mapping: Valid }, ++ Range { from: '\u{a760}', to: '\u{a760}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a761}', to: '\u{a761}', mapping: Valid }, ++ Range { from: '\u{a762}', to: '\u{a762}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a763}', to: '\u{a763}', mapping: Valid }, ++ Range { from: '\u{a764}', to: '\u{a764}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a765}', to: '\u{a765}', mapping: Valid }, ++ Range { from: '\u{a766}', to: '\u{a766}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a767}', to: '\u{a767}', mapping: Valid }, ++ Range { from: '\u{a768}', to: '\u{a768}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a769}', to: '\u{a769}', mapping: Valid }, ++ Range { from: '\u{a76a}', to: '\u{a76a}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a76b}', to: '\u{a76b}', mapping: Valid }, ++ Range { from: '\u{a76c}', to: '\u{a76c}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a76d}', to: '\u{a76d}', mapping: Valid }, ++ Range { from: '\u{a76e}', to: '\u{a76e}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a76f}', to: '\u{a76f}', mapping: Valid }, ++ Range { from: '\u{a770}', to: '\u{a770}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a771}', to: '\u{a778}', mapping: Valid }, ++ Range { from: '\u{a779}', to: '\u{a779}', mapping: Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a77a}', to: '\u{a77a}', mapping: Valid }, ++ Range { from: '\u{a77b}', to: '\u{a77b}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a77c}', to: '\u{a77c}', mapping: Valid }, ++ Range { from: '\u{a77d}', to: '\u{a77d}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a77e}', to: '\u{a77e}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a77f}', to: '\u{a77f}', mapping: Valid }, ++ Range { from: '\u{a780}', to: '\u{a780}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a781}', to: '\u{a781}', mapping: Valid }, ++ Range { from: '\u{a782}', to: '\u{a782}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{a783}', to: '\u{a783}', mapping: Valid }, ++ Range { from: '\u{a784}', to: '\u{a784}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{a785}', to: '\u{a785}', mapping: Valid }, ++ Range { from: '\u{a786}', to: '\u{a786}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{a787}', to: '\u{a78a}', mapping: Valid }, ++ Range { from: '\u{a78b}', to: '\u{a78b}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{a78c}', to: '\u{a78c}', mapping: Valid }, ++ Range { from: '\u{a78d}', to: '\u{a78d}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{a78e}', to: '\u{a78f}', mapping: Valid }, ++ Range { from: '\u{a790}', to: '\u{a790}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{a791}', to: '\u{a791}', mapping: Valid }, ++ Range { from: '\u{a792}', to: '\u{a792}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{a793}', to: '\u{a795}', mapping: Valid }, ++ Range { from: '\u{a796}', to: '\u{a796}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{a797}', to: '\u{a797}', mapping: Valid }, ++ Range { from: '\u{a798}', to: '\u{a798}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{a799}', to: '\u{a799}', mapping: Valid }, ++ Range { from: '\u{a79a}', to: '\u{a79a}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{a79b}', to: '\u{a79b}', mapping: Valid }, ++ Range { from: '\u{a79c}', to: '\u{a79c}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{a79d}', to: '\u{a79d}', mapping: Valid }, ++ Range { from: '\u{a79e}', to: '\u{a79e}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{a79f}', to: '\u{a79f}', mapping: Valid }, ++ Range { from: '\u{a7a0}', to: '\u{a7a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{a7a1}', to: '\u{a7a1}', mapping: Valid }, ++ Range { from: '\u{a7a2}', to: '\u{a7a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{a7a3}', to: '\u{a7a3}', mapping: Valid }, ++ Range { from: '\u{a7a4}', to: '\u{a7a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{a7a5}', to: '\u{a7a5}', mapping: Valid }, ++ Range { from: '\u{a7a6}', to: '\u{a7a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{a7a7}', to: '\u{a7a7}', mapping: Valid }, ++ Range { from: '\u{a7a8}', to: '\u{a7a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{a7a9}', to: '\u{a7a9}', mapping: Valid }, ++ Range { from: '\u{a7aa}', to: '\u{a7aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{a7ab}', to: '\u{a7ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{a7ac}', to: '\u{a7ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{a7ad}', to: '\u{a7ad}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 25, byte_len: 2 }) }, ++ Range { from: '\u{a7ae}', to: '\u{a7ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{a7af}', to: '\u{a7af}', mapping: Disallowed }, ++ Range { from: '\u{a7b0}', to: '\u{a7b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 25, byte_len: 2 }) }, ++ Range { from: '\u{a7b1}', to: '\u{a7b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 25, byte_len: 2 }) }, ++ Range { from: '\u{a7b2}', to: '\u{a7b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 5, byte_len: 2 }) }, ++ Range { from: '\u{a7b3}', to: '\u{a7b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{a7b4}', to: '\u{a7b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{a7b5}', to: '\u{a7b5}', mapping: Valid }, ++ Range { from: '\u{a7b6}', to: '\u{a7b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{a7b7}', to: '\u{a7b7}', mapping: Valid }, ++ Range { from: '\u{a7b8}', to: '\u{a7f6}', mapping: Disallowed }, ++ Range { from: '\u{a7f7}', to: '\u{a7f7}', mapping: Valid }, ++ Range { from: '\u{a7f8}', to: '\u{a7f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{a7f9}', to: '\u{a7f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{a7fa}', to: '\u{a82b}', mapping: Valid }, ++ Range { from: '\u{a82c}', to: '\u{a82f}', mapping: Disallowed }, ++ Range { from: '\u{a830}', to: '\u{a839}', mapping: Valid }, ++ Range { from: '\u{a83a}', to: '\u{a83f}', mapping: Disallowed }, ++ Range { from: '\u{a840}', to: '\u{a877}', mapping: Valid }, ++ Range { from: '\u{a878}', to: '\u{a87f}', mapping: Disallowed }, ++ Range { from: '\u{a880}', to: '\u{a8c5}', mapping: Valid }, ++ Range { from: '\u{a8c6}', to: '\u{a8cd}', mapping: Disallowed }, ++ Range { from: '\u{a8ce}', to: '\u{a8d9}', mapping: Valid }, ++ Range { from: '\u{a8da}', to: '\u{a8df}', mapping: Disallowed }, ++ Range { from: '\u{a8e0}', to: '\u{a8fd}', mapping: Valid }, ++ Range { from: '\u{a8fe}', to: '\u{a8ff}', mapping: Disallowed }, ++ Range { from: '\u{a900}', to: '\u{a953}', mapping: Valid }, ++ Range { from: '\u{a954}', to: '\u{a95e}', mapping: Disallowed }, ++ Range { from: '\u{a95f}', to: '\u{a97c}', mapping: Valid }, ++ Range { from: '\u{a97d}', to: '\u{a97f}', mapping: Disallowed }, ++ Range { from: '\u{a980}', to: '\u{a9cd}', mapping: Valid }, ++ Range { from: '\u{a9ce}', to: '\u{a9ce}', mapping: Disallowed }, ++ Range { from: '\u{a9cf}', to: '\u{a9d9}', mapping: Valid }, ++ Range { from: '\u{a9da}', to: '\u{a9dd}', mapping: Disallowed }, ++ Range { from: '\u{a9de}', to: '\u{a9fe}', mapping: Valid }, ++ Range { from: '\u{a9ff}', to: '\u{a9ff}', mapping: Disallowed }, ++ Range { from: '\u{aa00}', to: '\u{aa36}', mapping: Valid }, ++ Range { from: '\u{aa37}', to: '\u{aa3f}', mapping: Disallowed }, ++ Range { from: '\u{aa40}', to: '\u{aa4d}', mapping: Valid }, ++ Range { from: '\u{aa4e}', to: '\u{aa4f}', mapping: Disallowed }, ++ Range { from: '\u{aa50}', to: '\u{aa59}', mapping: Valid }, ++ Range { from: '\u{aa5a}', to: '\u{aa5b}', mapping: Disallowed }, ++ Range { from: '\u{aa5c}', to: '\u{aac2}', mapping: Valid }, ++ Range { from: '\u{aac3}', to: '\u{aada}', mapping: Disallowed }, ++ Range { from: '\u{aadb}', to: '\u{aaf6}', mapping: Valid }, ++ Range { from: '\u{aaf7}', to: '\u{ab00}', mapping: Disallowed }, ++ Range { from: '\u{ab01}', to: '\u{ab06}', mapping: Valid }, ++ Range { from: '\u{ab07}', to: '\u{ab08}', mapping: Disallowed }, ++ Range { from: '\u{ab09}', to: '\u{ab0e}', mapping: Valid }, ++ Range { from: '\u{ab0f}', to: '\u{ab10}', mapping: Disallowed }, ++ Range { from: '\u{ab11}', to: '\u{ab16}', mapping: Valid }, ++ Range { from: '\u{ab17}', to: '\u{ab1f}', mapping: Disallowed }, ++ Range { from: '\u{ab20}', to: '\u{ab26}', mapping: Valid }, ++ Range { from: '\u{ab27}', to: '\u{ab27}', mapping: Disallowed }, ++ Range { from: '\u{ab28}', to: '\u{ab2e}', mapping: Valid }, ++ Range { from: '\u{ab2f}', to: '\u{ab2f}', mapping: Disallowed }, ++ Range { from: '\u{ab30}', to: '\u{ab5b}', mapping: Valid }, ++ Range { from: '\u{ab5c}', to: '\u{ab5c}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 24, byte_len: 3 }) }, ++ Range { from: '\u{ab5d}', to: '\u{ab5d}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab5e}', to: '\u{ab5e}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 10, byte_len: 2 }) }, ++ Range { from: '\u{ab5f}', to: '\u{ab5f}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab60}', to: '\u{ab65}', mapping: Valid }, ++ Range { from: '\u{ab66}', to: '\u{ab6f}', mapping: Disallowed }, ++ Range { from: '\u{ab70}', to: '\u{ab70}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab71}', to: '\u{ab71}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab72}', to: '\u{ab72}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab73}', to: '\u{ab73}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab74}', to: '\u{ab74}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab75}', to: '\u{ab75}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab76}', to: '\u{ab76}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab77}', to: '\u{ab77}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab78}', to: '\u{ab78}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab79}', to: '\u{ab79}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab7a}', to: '\u{ab7a}', mapping: Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab7b}', to: '\u{ab7b}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab7c}', to: '\u{ab7c}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab7d}', to: '\u{ab7d}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab7e}', to: '\u{ab7e}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab7f}', to: '\u{ab7f}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab80}', to: '\u{ab80}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab81}', to: '\u{ab81}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab82}', to: '\u{ab82}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab83}', to: '\u{ab83}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab84}', to: '\u{ab84}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab85}', to: '\u{ab85}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab86}', to: '\u{ab86}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab87}', to: '\u{ab87}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab88}', to: '\u{ab88}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab89}', to: '\u{ab89}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab8a}', to: '\u{ab8a}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab8b}', to: '\u{ab8b}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab8c}', to: '\u{ab8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab8d}', to: '\u{ab8d}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab8e}', to: '\u{ab8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab8f}', to: '\u{ab8f}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab90}', to: '\u{ab90}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab91}', to: '\u{ab91}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab92}', to: '\u{ab92}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab93}', to: '\u{ab93}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab94}', to: '\u{ab94}', mapping: Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab95}', to: '\u{ab95}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab96}', to: '\u{ab96}', mapping: Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab97}', to: '\u{ab97}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab98}', to: '\u{ab98}', mapping: Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab99}', to: '\u{ab99}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab9a}', to: '\u{ab9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab9b}', to: '\u{ab9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab9c}', to: '\u{ab9c}', mapping: Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab9d}', to: '\u{ab9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab9e}', to: '\u{ab9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{ab9f}', to: '\u{ab9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{aba0}', to: '\u{aba0}', mapping: Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{aba1}', to: '\u{aba1}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{aba2}', to: '\u{aba2}', mapping: Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{aba3}', to: '\u{aba3}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{aba4}', to: '\u{aba4}', mapping: Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{aba5}', to: '\u{aba5}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{aba6}', to: '\u{aba6}', mapping: Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{aba7}', to: '\u{aba7}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{aba8}', to: '\u{aba8}', mapping: Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{aba9}', to: '\u{aba9}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{abaa}', to: '\u{abaa}', mapping: Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{abab}', to: '\u{abab}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{abac}', to: '\u{abac}', mapping: Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{abad}', to: '\u{abad}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{abae}', to: '\u{abae}', mapping: Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 25, byte_len: 3 }) }, ++ Range { from: '\u{abaf}', to: '\u{abaf}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{abb0}', to: '\u{abb0}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{abb1}', to: '\u{abb1}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{abb2}', to: '\u{abb2}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{abb3}', to: '\u{abb3}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{abb4}', to: '\u{abb4}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{abb5}', to: '\u{abb5}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{abb6}', to: '\u{abb6}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{abb7}', to: '\u{abb7}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{abb8}', to: '\u{abb8}', mapping: Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{abb9}', to: '\u{abb9}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{abba}', to: '\u{abba}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{abbb}', to: '\u{abbb}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{abbc}', to: '\u{abbc}', mapping: Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{abbd}', to: '\u{abbd}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{abbe}', to: '\u{abbe}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{abbf}', to: '\u{abbf}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{abc0}', to: '\u{abed}', mapping: Valid }, ++ Range { from: '\u{abee}', to: '\u{abef}', mapping: Disallowed }, ++ Range { from: '\u{abf0}', to: '\u{abf9}', mapping: Valid }, ++ Range { from: '\u{abfa}', to: '\u{abff}', mapping: Disallowed }, ++ Range { from: '\u{ac00}', to: '\u{d7a3}', mapping: Valid }, ++ Range { from: '\u{d7a4}', to: '\u{d7af}', mapping: Disallowed }, ++ Range { from: '\u{d7b0}', to: '\u{d7c6}', mapping: Valid }, ++ Range { from: '\u{d7c7}', to: '\u{d7ca}', mapping: Disallowed }, ++ Range { from: '\u{d7cb}', to: '\u{d7fb}', mapping: Valid }, ++ Range { from: '\u{d7fc}', to: '\u{f8ff}', mapping: Disallowed }, ++ Range { from: '\u{f900}', to: '\u{f900}', mapping: Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f901}', to: '\u{f901}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f902}', to: '\u{f902}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{f903}', to: '\u{f903}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f904}', to: '\u{f904}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f905}', to: '\u{f905}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f906}', to: '\u{f906}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f907}', to: '\u{f908}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{f909}', to: '\u{f909}', mapping: Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f90a}', to: '\u{f90a}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{f90b}', to: '\u{f90b}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f90c}', to: '\u{f90c}', mapping: Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f90d}', to: '\u{f90d}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f90e}', to: '\u{f90e}', mapping: Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f90f}', to: '\u{f90f}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f910}', to: '\u{f910}', mapping: Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f911}', to: '\u{f911}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f912}', to: '\u{f912}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f913}', to: '\u{f913}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f914}', to: '\u{f914}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f915}', to: '\u{f915}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f916}', to: '\u{f916}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f917}', to: '\u{f917}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f918}', to: '\u{f918}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f919}', to: '\u{f919}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f91a}', to: '\u{f91a}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f91b}', to: '\u{f91b}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f91c}', to: '\u{f91c}', mapping: Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f91d}', to: '\u{f91d}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f91e}', to: '\u{f91e}', mapping: Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f91f}', to: '\u{f91f}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f920}', to: '\u{f920}', mapping: Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f921}', to: '\u{f921}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f922}', to: '\u{f922}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f923}', to: '\u{f923}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f924}', to: '\u{f924}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f925}', to: '\u{f925}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f926}', to: '\u{f926}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f927}', to: '\u{f927}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f928}', to: '\u{f928}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f929}', to: '\u{f929}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f92a}', to: '\u{f92a}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f92b}', to: '\u{f92b}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f92c}', to: '\u{f92c}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f92d}', to: '\u{f92d}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f92e}', to: '\u{f92e}', mapping: Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f92f}', to: '\u{f92f}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f930}', to: '\u{f930}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f931}', to: '\u{f931}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f932}', to: '\u{f932}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f933}', to: '\u{f933}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f934}', to: '\u{f934}', mapping: Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{f935}', to: '\u{f935}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f936}', to: '\u{f936}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f937}', to: '\u{f937}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f938}', to: '\u{f938}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f939}', to: '\u{f939}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f93a}', to: '\u{f93a}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f93b}', to: '\u{f93b}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f93c}', to: '\u{f93c}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f93d}', to: '\u{f93d}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f93e}', to: '\u{f93e}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f93f}', to: '\u{f93f}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f940}', to: '\u{f940}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{f941}', to: '\u{f941}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f942}', to: '\u{f942}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f943}', to: '\u{f943}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f944}', to: '\u{f944}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f945}', to: '\u{f945}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f946}', to: '\u{f946}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f947}', to: '\u{f947}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f948}', to: '\u{f948}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f949}', to: '\u{f949}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f94a}', to: '\u{f94a}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f94b}', to: '\u{f94b}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f94c}', to: '\u{f94c}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f94d}', to: '\u{f94d}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f94e}', to: '\u{f94e}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f94f}', to: '\u{f94f}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f950}', to: '\u{f950}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f951}', to: '\u{f951}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f952}', to: '\u{f952}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f953}', to: '\u{f953}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f954}', to: '\u{f954}', mapping: Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f955}', to: '\u{f955}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f956}', to: '\u{f956}', mapping: Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f957}', to: '\u{f957}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f958}', to: '\u{f958}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f959}', to: '\u{f959}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f95a}', to: '\u{f95a}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f95b}', to: '\u{f95b}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f95c}', to: '\u{f95c}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f95d}', to: '\u{f95d}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f95e}', to: '\u{f95e}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f95f}', to: '\u{f95f}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f960}', to: '\u{f960}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f961}', to: '\u{f961}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f962}', to: '\u{f962}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f963}', to: '\u{f963}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f964}', to: '\u{f964}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f965}', to: '\u{f965}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f966}', to: '\u{f966}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f967}', to: '\u{f967}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f968}', to: '\u{f968}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f969}', to: '\u{f969}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f96a}', to: '\u{f96a}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f96b}', to: '\u{f96b}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f96c}', to: '\u{f96c}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f96d}', to: '\u{f96d}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f96e}', to: '\u{f96e}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f96f}', to: '\u{f96f}', mapping: Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f970}', to: '\u{f970}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f971}', to: '\u{f971}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{f972}', to: '\u{f972}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f973}', to: '\u{f973}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f974}', to: '\u{f974}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f975}', to: '\u{f975}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f976}', to: '\u{f976}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f977}', to: '\u{f977}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f978}', to: '\u{f978}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f979}', to: '\u{f979}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f97a}', to: '\u{f97a}', mapping: Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f97b}', to: '\u{f97b}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f97c}', to: '\u{f97c}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f97d}', to: '\u{f97d}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f97e}', to: '\u{f97e}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f97f}', to: '\u{f97f}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f980}', to: '\u{f980}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f981}', to: '\u{f981}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{f982}', to: '\u{f982}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f983}', to: '\u{f983}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f984}', to: '\u{f984}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f985}', to: '\u{f985}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f986}', to: '\u{f986}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f987}', to: '\u{f987}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f988}', to: '\u{f988}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f989}', to: '\u{f989}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f98a}', to: '\u{f98a}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{f98b}', to: '\u{f98b}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f98c}', to: '\u{f98c}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f98d}', to: '\u{f98d}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f98e}', to: '\u{f98e}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f98f}', to: '\u{f98f}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f990}', to: '\u{f990}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f991}', to: '\u{f991}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f992}', to: '\u{f992}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f993}', to: '\u{f993}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f994}', to: '\u{f994}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f995}', to: '\u{f995}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f996}', to: '\u{f996}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f997}', to: '\u{f997}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f998}', to: '\u{f998}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f999}', to: '\u{f999}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f99a}', to: '\u{f99a}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f99b}', to: '\u{f99b}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f99c}', to: '\u{f99c}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f99d}', to: '\u{f99d}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f99e}', to: '\u{f99e}', mapping: Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f99f}', to: '\u{f99f}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f9a0}', to: '\u{f9a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f9a1}', to: '\u{f9a1}', mapping: Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f9a2}', to: '\u{f9a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f9a3}', to: '\u{f9a3}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f9a4}', to: '\u{f9a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f9a5}', to: '\u{f9a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9a6}', to: '\u{f9a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9a7}', to: '\u{f9a7}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9a8}', to: '\u{f9a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9a9}', to: '\u{f9a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9aa}', to: '\u{f9aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f9ab}', to: '\u{f9ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9ac}', to: '\u{f9ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9ad}', to: '\u{f9ad}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9ae}', to: '\u{f9ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9af}', to: '\u{f9af}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9b0}', to: '\u{f9b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9b1}', to: '\u{f9b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9b2}', to: '\u{f9b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9b3}', to: '\u{f9b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9b4}', to: '\u{f9b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9b5}', to: '\u{f9b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9b6}', to: '\u{f9b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9b7}', to: '\u{f9b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9b8}', to: '\u{f9b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9b9}', to: '\u{f9b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9ba}', to: '\u{f9ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9bb}', to: '\u{f9bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9bc}', to: '\u{f9bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9bd}', to: '\u{f9bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9be}', to: '\u{f9be}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9bf}', to: '\u{f9bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{f9c0}', to: '\u{f9c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9c1}', to: '\u{f9c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9c2}', to: '\u{f9c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9c3}', to: '\u{f9c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9c4}', to: '\u{f9c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{f9c5}', to: '\u{f9c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9c6}', to: '\u{f9c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 91, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9c7}', to: '\u{f9c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9c8}', to: '\u{f9c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 97, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9c9}', to: '\u{f9c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9ca}', to: '\u{f9ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9cb}', to: '\u{f9cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9cc}', to: '\u{f9cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9cd}', to: '\u{f9cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9ce}', to: '\u{f9ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9cf}', to: '\u{f9cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9d0}', to: '\u{f9d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9d1}', to: '\u{f9d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{f9d2}', to: '\u{f9d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9d3}', to: '\u{f9d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9d4}', to: '\u{f9d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9d5}', to: '\u{f9d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9d6}', to: '\u{f9d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9d7}', to: '\u{f9d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9d8}', to: '\u{f9d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9d9}', to: '\u{f9d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9da}', to: '\u{f9da}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9db}', to: '\u{f9db}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{f9dc}', to: '\u{f9dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9dd}', to: '\u{f9dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9de}', to: '\u{f9de}', mapping: Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9df}', to: '\u{f9df}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9e0}', to: '\u{f9e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9e1}', to: '\u{f9e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9e2}', to: '\u{f9e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9e3}', to: '\u{f9e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9e4}', to: '\u{f9e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9e5}', to: '\u{f9e5}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9e6}', to: '\u{f9e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9e7}', to: '\u{f9e7}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9e8}', to: '\u{f9e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9e9}', to: '\u{f9e9}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{f9ea}', to: '\u{f9ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9eb}', to: '\u{f9eb}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9ec}', to: '\u{f9ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9ed}', to: '\u{f9ed}', mapping: Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9ee}', to: '\u{f9ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9ef}', to: '\u{f9ef}', mapping: Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9f0}', to: '\u{f9f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9f1}', to: '\u{f9f1}', mapping: Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9f2}', to: '\u{f9f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9f3}', to: '\u{f9f3}', mapping: Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9f4}', to: '\u{f9f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9f5}', to: '\u{f9f5}', mapping: Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9f6}', to: '\u{f9f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9f7}', to: '\u{f9f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{f9f8}', to: '\u{f9f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9f9}', to: '\u{f9f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9fa}', to: '\u{f9fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9fb}', to: '\u{f9fb}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9fc}', to: '\u{f9fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9fd}', to: '\u{f9fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9fe}', to: '\u{f9fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{f9ff}', to: '\u{f9ff}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{fa00}', to: '\u{fa00}', mapping: Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{fa01}', to: '\u{fa01}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa02}', to: '\u{fa02}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa03}', to: '\u{fa03}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa04}', to: '\u{fa04}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa05}', to: '\u{fa05}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa06}', to: '\u{fa06}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa07}', to: '\u{fa07}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa08}', to: '\u{fa08}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{fa09}', to: '\u{fa09}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa0a}', to: '\u{fa0a}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{fa0b}', to: '\u{fa0b}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa0c}', to: '\u{fa0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa0d}', to: '\u{fa0d}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa0e}', to: '\u{fa0f}', mapping: Valid }, ++ Range { from: '\u{fa10}', to: '\u{fa10}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa11}', to: '\u{fa11}', mapping: Valid }, ++ Range { from: '\u{fa12}', to: '\u{fa12}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa13}', to: '\u{fa14}', mapping: Valid }, ++ Range { from: '\u{fa15}', to: '\u{fa15}', mapping: Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa16}', to: '\u{fa16}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa17}', to: '\u{fa17}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa18}', to: '\u{fa18}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa19}', to: '\u{fa19}', mapping: Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa1a}', to: '\u{fa1a}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa1b}', to: '\u{fa1b}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa1c}', to: '\u{fa1c}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa1d}', to: '\u{fa1d}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa1e}', to: '\u{fa1e}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{fa1f}', to: '\u{fa1f}', mapping: Valid }, ++ Range { from: '\u{fa20}', to: '\u{fa20}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa21}', to: '\u{fa21}', mapping: Valid }, ++ Range { from: '\u{fa22}', to: '\u{fa22}', mapping: Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa23}', to: '\u{fa24}', mapping: Valid }, ++ Range { from: '\u{fa25}', to: '\u{fa25}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa26}', to: '\u{fa26}', mapping: Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa27}', to: '\u{fa29}', mapping: Valid }, ++ Range { from: '\u{fa2a}', to: '\u{fa2a}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa2b}', to: '\u{fa2b}', mapping: Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa2c}', to: '\u{fa2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa2d}', to: '\u{fa2d}', mapping: Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa2e}', to: '\u{fa2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa2f}', to: '\u{fa2f}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa30}', to: '\u{fa30}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa31}', to: '\u{fa31}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa32}', to: '\u{fa32}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa33}', to: '\u{fa33}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa34}', to: '\u{fa34}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa35}', to: '\u{fa35}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa36}', to: '\u{fa36}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa37}', to: '\u{fa37}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa38}', to: '\u{fa38}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa39}', to: '\u{fa39}', mapping: Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa3a}', to: '\u{fa3a}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa3b}', to: '\u{fa3b}', mapping: Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa3c}', to: '\u{fa3c}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{fa3d}', to: '\u{fa3d}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa3e}', to: '\u{fa3e}', mapping: Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa3f}', to: '\u{fa3f}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa40}', to: '\u{fa40}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa41}', to: '\u{fa41}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa42}', to: '\u{fa42}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa43}', to: '\u{fa43}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa44}', to: '\u{fa44}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa45}', to: '\u{fa45}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa46}', to: '\u{fa46}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa47}', to: '\u{fa47}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa48}', to: '\u{fa48}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa49}', to: '\u{fa49}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa4a}', to: '\u{fa4a}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa4b}', to: '\u{fa4b}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa4c}', to: '\u{fa4c}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{fa4d}', to: '\u{fa4d}', mapping: Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa4e}', to: '\u{fa4e}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa4f}', to: '\u{fa4f}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa50}', to: '\u{fa50}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa51}', to: '\u{fa51}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{fa52}', to: '\u{fa52}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa53}', to: '\u{fa53}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa54}', to: '\u{fa54}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa55}', to: '\u{fa55}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa56}', to: '\u{fa56}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa57}', to: '\u{fa57}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{fa58}', to: '\u{fa58}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa59}', to: '\u{fa59}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa5a}', to: '\u{fa5a}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa5b}', to: '\u{fa5b}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa5c}', to: '\u{fa5c}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa5d}', to: '\u{fa5e}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa5f}', to: '\u{fa5f}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa60}', to: '\u{fa60}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa61}', to: '\u{fa61}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa62}', to: '\u{fa62}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa63}', to: '\u{fa63}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa64}', to: '\u{fa64}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa65}', to: '\u{fa65}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa66}', to: '\u{fa66}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa67}', to: '\u{fa67}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa68}', to: '\u{fa68}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa69}', to: '\u{fa69}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa6a}', to: '\u{fa6a}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa6b}', to: '\u{fa6b}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa6c}', to: '\u{fa6c}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 30, byte_len: 4 }) }, ++ Range { from: '\u{fa6d}', to: '\u{fa6d}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa6e}', to: '\u{fa6f}', mapping: Disallowed }, ++ Range { from: '\u{fa70}', to: '\u{fa70}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa71}', to: '\u{fa71}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa72}', to: '\u{fa72}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa73}', to: '\u{fa73}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa74}', to: '\u{fa74}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa75}', to: '\u{fa75}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa76}', to: '\u{fa76}', mapping: Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa77}', to: '\u{fa77}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa78}', to: '\u{fa78}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa79}', to: '\u{fa79}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa7a}', to: '\u{fa7a}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa7b}', to: '\u{fa7b}', mapping: Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa7c}', to: '\u{fa7c}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa7d}', to: '\u{fa7d}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa7e}', to: '\u{fa7e}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa7f}', to: '\u{fa7f}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa80}', to: '\u{fa80}', mapping: Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa81}', to: '\u{fa81}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa82}', to: '\u{fa82}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa83}', to: '\u{fa83}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa84}', to: '\u{fa84}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa85}', to: '\u{fa85}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa86}', to: '\u{fa86}', mapping: Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa87}', to: '\u{fa87}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa88}', to: '\u{fa88}', mapping: Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa89}', to: '\u{fa89}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa8a}', to: '\u{fa8a}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa8b}', to: '\u{fa8b}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa8c}', to: '\u{fa8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa8d}', to: '\u{fa8d}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa8e}', to: '\u{fa8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa8f}', to: '\u{fa8f}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa90}', to: '\u{fa90}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa91}', to: '\u{fa91}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa92}', to: '\u{fa92}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{fa93}', to: '\u{fa93}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa94}', to: '\u{fa94}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa95}', to: '\u{fa95}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{fa96}', to: '\u{fa96}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{fa97}', to: '\u{fa97}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{fa98}', to: '\u{fa98}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa99}', to: '\u{fa99}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa9a}', to: '\u{fa9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa9b}', to: '\u{fa9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa9c}', to: '\u{fa9c}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fa9d}', to: '\u{fa9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa9e}', to: '\u{fa9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fa9f}', to: '\u{fa9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{faa0}', to: '\u{faa0}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{faa1}', to: '\u{faa1}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{faa2}', to: '\u{faa2}', mapping: Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{faa3}', to: '\u{faa3}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{faa4}', to: '\u{faa4}', mapping: Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{faa5}', to: '\u{faa5}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{faa6}', to: '\u{faa6}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{faa7}', to: '\u{faa7}', mapping: Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{faa8}', to: '\u{faa8}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{faa9}', to: '\u{faa9}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{faaa}', to: '\u{faaa}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{faab}', to: '\u{faab}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{faac}', to: '\u{faac}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{faad}', to: '\u{faad}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{faae}', to: '\u{faae}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{faaf}', to: '\u{faaf}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fab0}', to: '\u{fab0}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{fab1}', to: '\u{fab1}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fab2}', to: '\u{fab2}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fab3}', to: '\u{fab3}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fab4}', to: '\u{fab4}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fab5}', to: '\u{fab5}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fab6}', to: '\u{fab6}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fab7}', to: '\u{fab7}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fab8}', to: '\u{fab8}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fab9}', to: '\u{fab9}', mapping: Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{faba}', to: '\u{faba}', mapping: Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fabb}', to: '\u{fabb}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fabc}', to: '\u{fabc}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fabd}', to: '\u{fabd}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{fabe}', to: '\u{fabe}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fabf}', to: '\u{fabf}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fac0}', to: '\u{fac0}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fac1}', to: '\u{fac1}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fac2}', to: '\u{fac2}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fac3}', to: '\u{fac3}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fac4}', to: '\u{fac4}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fac5}', to: '\u{fac5}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fac6}', to: '\u{fac6}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fac7}', to: '\u{fac7}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fac8}', to: '\u{fac8}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{fac9}', to: '\u{fac9}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{faca}', to: '\u{faca}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{facb}', to: '\u{facb}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{facc}', to: '\u{facc}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{facd}', to: '\u{facd}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{face}', to: '\u{face}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{facf}', to: '\u{facf}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 30, byte_len: 4 }) }, ++ Range { from: '\u{fad0}', to: '\u{fad0}', mapping: Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 30, byte_len: 4 }) }, ++ Range { from: '\u{fad1}', to: '\u{fad1}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 30, byte_len: 4 }) }, ++ Range { from: '\u{fad2}', to: '\u{fad2}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fad3}', to: '\u{fad3}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fad4}', to: '\u{fad4}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fad5}', to: '\u{fad5}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 30, byte_len: 4 }) }, ++ Range { from: '\u{fad6}', to: '\u{fad6}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 30, byte_len: 4 }) }, ++ Range { from: '\u{fad7}', to: '\u{fad7}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 30, byte_len: 4 }) }, ++ Range { from: '\u{fad8}', to: '\u{fad8}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fad9}', to: '\u{fad9}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{fada}', to: '\u{faff}', mapping: Disallowed }, ++ Range { from: '\u{fb00}', to: '\u{fb00}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 30, byte_len: 2 }) }, ++ Range { from: '\u{fb01}', to: '\u{fb01}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 30, byte_len: 2 }) }, ++ Range { from: '\u{fb02}', to: '\u{fb02}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb03}', to: '\u{fb03}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 31, byte_len: 3 }) }, ++ Range { from: '\u{fb04}', to: '\u{fb04}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 31, byte_len: 3 }) }, ++ Range { from: '\u{fb05}', to: '\u{fb06}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb07}', to: '\u{fb12}', mapping: Disallowed }, ++ Range { from: '\u{fb13}', to: '\u{fb13}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb14}', to: '\u{fb14}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb15}', to: '\u{fb15}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb16}', to: '\u{fb16}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb17}', to: '\u{fb17}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb18}', to: '\u{fb1c}', mapping: Disallowed }, ++ Range { from: '\u{fb1d}', to: '\u{fb1d}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb1e}', to: '\u{fb1e}', mapping: Valid }, ++ Range { from: '\u{fb1f}', to: '\u{fb1f}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb20}', to: '\u{fb20}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb21}', to: '\u{fb21}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 8, byte_len: 2 }) }, ++ Range { from: '\u{fb22}', to: '\u{fb22}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 8, byte_len: 2 }) }, ++ Range { from: '\u{fb23}', to: '\u{fb23}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb24}', to: '\u{fb24}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb25}', to: '\u{fb25}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb26}', to: '\u{fb26}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb27}', to: '\u{fb27}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb28}', to: '\u{fb28}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb29}', to: '\u{fb29}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{fb2a}', to: '\u{fb2a}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb2b}', to: '\u{fb2b}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb2c}', to: '\u{fb2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 31, byte_len: 6 }) }, ++ Range { from: '\u{fb2d}', to: '\u{fb2d}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 31, byte_len: 6 }) }, ++ Range { from: '\u{fb2e}', to: '\u{fb2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb2f}', to: '\u{fb2f}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb30}', to: '\u{fb30}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb31}', to: '\u{fb31}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb32}', to: '\u{fb32}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb33}', to: '\u{fb33}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb34}', to: '\u{fb34}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb35}', to: '\u{fb35}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb36}', to: '\u{fb36}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb37}', to: '\u{fb37}', mapping: Disallowed }, ++ Range { from: '\u{fb38}', to: '\u{fb38}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb39}', to: '\u{fb39}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb3a}', to: '\u{fb3a}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb3b}', to: '\u{fb3b}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb3c}', to: '\u{fb3c}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb3d}', to: '\u{fb3d}', mapping: Disallowed }, ++ Range { from: '\u{fb3e}', to: '\u{fb3e}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb3f}', to: '\u{fb3f}', mapping: Disallowed }, ++ Range { from: '\u{fb40}', to: '\u{fb40}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb41}', to: '\u{fb41}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb42}', to: '\u{fb42}', mapping: Disallowed }, ++ Range { from: '\u{fb43}', to: '\u{fb43}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb44}', to: '\u{fb44}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb45}', to: '\u{fb45}', mapping: Disallowed }, ++ Range { from: '\u{fb46}', to: '\u{fb46}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb47}', to: '\u{fb47}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb48}', to: '\u{fb48}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb49}', to: '\u{fb49}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb4a}', to: '\u{fb4a}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb4b}', to: '\u{fb4b}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb4c}', to: '\u{fb4c}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb4d}', to: '\u{fb4d}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb4e}', to: '\u{fb4e}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb4f}', to: '\u{fb4f}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 31, byte_len: 4 }) }, ++ Range { from: '\u{fb50}', to: '\u{fb51}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb52}', to: '\u{fb55}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb56}', to: '\u{fb59}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb5a}', to: '\u{fb5d}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb5e}', to: '\u{fb61}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb62}', to: '\u{fb65}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb66}', to: '\u{fb69}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb6a}', to: '\u{fb6d}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb6e}', to: '\u{fb71}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb72}', to: '\u{fb75}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb76}', to: '\u{fb79}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb7a}', to: '\u{fb7d}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb7e}', to: '\u{fb81}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb82}', to: '\u{fb83}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb84}', to: '\u{fb85}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb86}', to: '\u{fb87}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb88}', to: '\u{fb89}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb8a}', to: '\u{fb8b}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb8c}', to: '\u{fb8d}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb8e}', to: '\u{fb91}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb92}', to: '\u{fb95}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb96}', to: '\u{fb99}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb9a}', to: '\u{fb9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fb9e}', to: '\u{fb9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fba0}', to: '\u{fba3}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fba4}', to: '\u{fba5}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fba6}', to: '\u{fba9}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fbaa}', to: '\u{fbad}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fbae}', to: '\u{fbaf}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fbb0}', to: '\u{fbb1}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fbb2}', to: '\u{fbc1}', mapping: Valid }, ++ Range { from: '\u{fbc2}', to: '\u{fbd2}', mapping: Disallowed }, ++ Range { from: '\u{fbd3}', to: '\u{fbd6}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fbd7}', to: '\u{fbd8}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fbd9}', to: '\u{fbda}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fbdb}', to: '\u{fbdc}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{fbdd}', to: '\u{fbdd}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 3, byte_len: 4 }) }, ++ Range { from: '\u{fbde}', to: '\u{fbdf}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 32, byte_len: 2 }) }, ++ Range { from: '\u{fbe0}', to: '\u{fbe1}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 32, byte_len: 2 }) }, ++ Range { from: '\u{fbe2}', to: '\u{fbe3}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 32, byte_len: 2 }) }, ++ Range { from: '\u{fbe4}', to: '\u{fbe7}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 32, byte_len: 2 }) }, ++ Range { from: '\u{fbe8}', to: '\u{fbe9}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 32, byte_len: 2 }) }, ++ Range { from: '\u{fbea}', to: '\u{fbeb}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fbec}', to: '\u{fbed}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fbee}', to: '\u{fbef}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fbf0}', to: '\u{fbf1}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fbf2}', to: '\u{fbf3}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fbf4}', to: '\u{fbf5}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fbf6}', to: '\u{fbf8}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fbf9}', to: '\u{fbfb}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fbfc}', to: '\u{fbff}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 32, byte_len: 2 }) }, ++ Range { from: '\u{fc00}', to: '\u{fc00}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc01}', to: '\u{fc01}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc02}', to: '\u{fc02}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc03}', to: '\u{fc03}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc04}', to: '\u{fc04}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc05}', to: '\u{fc05}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc06}', to: '\u{fc06}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc07}', to: '\u{fc07}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc08}', to: '\u{fc08}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc09}', to: '\u{fc09}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc0a}', to: '\u{fc0a}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc0b}', to: '\u{fc0b}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc0c}', to: '\u{fc0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc0d}', to: '\u{fc0d}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc0e}', to: '\u{fc0e}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc0f}', to: '\u{fc0f}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc10}', to: '\u{fc10}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc11}', to: '\u{fc11}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc12}', to: '\u{fc12}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc13}', to: '\u{fc13}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc14}', to: '\u{fc14}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc15}', to: '\u{fc15}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc16}', to: '\u{fc16}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc17}', to: '\u{fc17}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc18}', to: '\u{fc18}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc19}', to: '\u{fc19}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc1a}', to: '\u{fc1a}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc1b}', to: '\u{fc1b}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc1c}', to: '\u{fc1c}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc1d}', to: '\u{fc1d}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc1e}', to: '\u{fc1e}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc1f}', to: '\u{fc1f}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc20}', to: '\u{fc20}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc21}', to: '\u{fc21}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc22}', to: '\u{fc22}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc23}', to: '\u{fc23}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc24}', to: '\u{fc24}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc25}', to: '\u{fc25}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc26}', to: '\u{fc26}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc27}', to: '\u{fc27}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc28}', to: '\u{fc28}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc29}', to: '\u{fc29}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc2a}', to: '\u{fc2a}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc2b}', to: '\u{fc2b}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc2c}', to: '\u{fc2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc2d}', to: '\u{fc2d}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc2e}', to: '\u{fc2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc2f}', to: '\u{fc2f}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc30}', to: '\u{fc30}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc31}', to: '\u{fc31}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc32}', to: '\u{fc32}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc33}', to: '\u{fc33}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc34}', to: '\u{fc34}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc35}', to: '\u{fc35}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc36}', to: '\u{fc36}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc37}', to: '\u{fc37}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc38}', to: '\u{fc38}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc39}', to: '\u{fc39}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc3a}', to: '\u{fc3a}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc3b}', to: '\u{fc3b}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc3c}', to: '\u{fc3c}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc3d}', to: '\u{fc3d}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc3e}', to: '\u{fc3e}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc3f}', to: '\u{fc3f}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc40}', to: '\u{fc40}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc41}', to: '\u{fc41}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc42}', to: '\u{fc42}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc43}', to: '\u{fc43}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc44}', to: '\u{fc44}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc45}', to: '\u{fc45}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc46}', to: '\u{fc46}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc47}', to: '\u{fc47}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc48}', to: '\u{fc48}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc49}', to: '\u{fc49}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc4a}', to: '\u{fc4a}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc4b}', to: '\u{fc4b}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc4c}', to: '\u{fc4c}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc4d}', to: '\u{fc4d}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc4e}', to: '\u{fc4e}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc4f}', to: '\u{fc4f}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc50}', to: '\u{fc50}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc51}', to: '\u{fc51}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc52}', to: '\u{fc52}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc53}', to: '\u{fc53}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc54}', to: '\u{fc54}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc55}', to: '\u{fc55}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc56}', to: '\u{fc56}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc57}', to: '\u{fc57}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc58}', to: '\u{fc58}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc59}', to: '\u{fc59}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc5a}', to: '\u{fc5a}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc5b}', to: '\u{fc5b}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc5c}', to: '\u{fc5c}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc5d}', to: '\u{fc5d}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc5e}', to: '\u{fc5e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 33, byte_len: 5 }) }, ++ Range { from: '\u{fc5f}', to: '\u{fc5f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 33, byte_len: 5 }) }, ++ Range { from: '\u{fc60}', to: '\u{fc60}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 33, byte_len: 5 }) }, ++ Range { from: '\u{fc61}', to: '\u{fc61}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 33, byte_len: 5 }) }, ++ Range { from: '\u{fc62}', to: '\u{fc62}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 33, byte_len: 5 }) }, ++ Range { from: '\u{fc63}', to: '\u{fc63}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 33, byte_len: 5 }) }, ++ Range { from: '\u{fc64}', to: '\u{fc64}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc65}', to: '\u{fc65}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc66}', to: '\u{fc66}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc67}', to: '\u{fc67}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc68}', to: '\u{fc68}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc69}', to: '\u{fc69}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc6a}', to: '\u{fc6a}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc6b}', to: '\u{fc6b}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc6c}', to: '\u{fc6c}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc6d}', to: '\u{fc6d}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc6e}', to: '\u{fc6e}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc6f}', to: '\u{fc6f}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc70}', to: '\u{fc70}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc71}', to: '\u{fc71}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc72}', to: '\u{fc72}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc73}', to: '\u{fc73}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc74}', to: '\u{fc74}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc75}', to: '\u{fc75}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc76}', to: '\u{fc76}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc77}', to: '\u{fc77}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc78}', to: '\u{fc78}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc79}', to: '\u{fc79}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc7a}', to: '\u{fc7a}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc7b}', to: '\u{fc7b}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc7c}', to: '\u{fc7c}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc7d}', to: '\u{fc7d}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc7e}', to: '\u{fc7e}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc7f}', to: '\u{fc7f}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc80}', to: '\u{fc80}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc81}', to: '\u{fc81}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc82}', to: '\u{fc82}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc83}', to: '\u{fc83}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc84}', to: '\u{fc84}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc85}', to: '\u{fc85}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc86}', to: '\u{fc86}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc87}', to: '\u{fc87}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc88}', to: '\u{fc88}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc89}', to: '\u{fc89}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc8a}', to: '\u{fc8a}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc8b}', to: '\u{fc8b}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc8c}', to: '\u{fc8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc8d}', to: '\u{fc8d}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc8e}', to: '\u{fc8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc8f}', to: '\u{fc8f}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc90}', to: '\u{fc90}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc91}', to: '\u{fc91}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc92}', to: '\u{fc92}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fc93}', to: '\u{fc93}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc94}', to: '\u{fc94}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fc95}', to: '\u{fc95}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc96}', to: '\u{fc96}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fc97}', to: '\u{fc97}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc98}', to: '\u{fc98}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc99}', to: '\u{fc99}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fc9a}', to: '\u{fc9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc9b}', to: '\u{fc9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fc9c}', to: '\u{fc9c}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc9d}', to: '\u{fc9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc9e}', to: '\u{fc9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fc9f}', to: '\u{fc9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fca0}', to: '\u{fca0}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fca1}', to: '\u{fca1}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fca2}', to: '\u{fca2}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fca3}', to: '\u{fca3}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fca4}', to: '\u{fca4}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fca5}', to: '\u{fca5}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fca6}', to: '\u{fca6}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fca7}', to: '\u{fca7}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fca8}', to: '\u{fca8}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fca9}', to: '\u{fca9}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcaa}', to: '\u{fcaa}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcab}', to: '\u{fcab}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcac}', to: '\u{fcac}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcad}', to: '\u{fcad}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcae}', to: '\u{fcae}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcaf}', to: '\u{fcaf}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcb0}', to: '\u{fcb0}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcb1}', to: '\u{fcb1}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcb2}', to: '\u{fcb2}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fcb3}', to: '\u{fcb3}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcb4}', to: '\u{fcb4}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcb5}', to: '\u{fcb5}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcb6}', to: '\u{fcb6}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcb7}', to: '\u{fcb7}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcb8}', to: '\u{fcb8}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcb9}', to: '\u{fcb9}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcba}', to: '\u{fcba}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcbb}', to: '\u{fcbb}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcbc}', to: '\u{fcbc}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcbd}', to: '\u{fcbd}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcbe}', to: '\u{fcbe}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcbf}', to: '\u{fcbf}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcc0}', to: '\u{fcc0}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcc1}', to: '\u{fcc1}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcc2}', to: '\u{fcc2}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcc3}', to: '\u{fcc3}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fcc4}', to: '\u{fcc4}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcc5}', to: '\u{fcc5}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcc6}', to: '\u{fcc6}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcc7}', to: '\u{fcc7}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcc8}', to: '\u{fcc8}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcc9}', to: '\u{fcc9}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcca}', to: '\u{fcca}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fccb}', to: '\u{fccb}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fccc}', to: '\u{fccc}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fccd}', to: '\u{fccd}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fcce}', to: '\u{fcce}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fccf}', to: '\u{fccf}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcd0}', to: '\u{fcd0}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcd1}', to: '\u{fcd1}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcd2}', to: '\u{fcd2}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcd3}', to: '\u{fcd3}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcd4}', to: '\u{fcd4}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcd5}', to: '\u{fcd5}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcd6}', to: '\u{fcd6}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fcd7}', to: '\u{fcd7}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcd8}', to: '\u{fcd8}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcd9}', to: '\u{fcd9}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fcda}', to: '\u{fcda}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcdb}', to: '\u{fcdb}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcdc}', to: '\u{fcdc}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcdd}', to: '\u{fcdd}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcde}', to: '\u{fcde}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fcdf}', to: '\u{fcdf}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fce0}', to: '\u{fce0}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fce1}', to: '\u{fce1}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fce2}', to: '\u{fce2}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fce3}', to: '\u{fce3}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fce4}', to: '\u{fce4}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fce5}', to: '\u{fce5}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fce6}', to: '\u{fce6}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fce7}', to: '\u{fce7}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fce8}', to: '\u{fce8}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fce9}', to: '\u{fce9}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fcea}', to: '\u{fcea}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fceb}', to: '\u{fceb}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcec}', to: '\u{fcec}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fced}', to: '\u{fced}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcee}', to: '\u{fcee}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcef}', to: '\u{fcef}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fcf0}', to: '\u{fcf0}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 33, byte_len: 4 }) }, ++ Range { from: '\u{fcf1}', to: '\u{fcf1}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fcf2}', to: '\u{fcf2}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 34, byte_len: 6 }) }, ++ Range { from: '\u{fcf3}', to: '\u{fcf3}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 34, byte_len: 6 }) }, ++ Range { from: '\u{fcf4}', to: '\u{fcf4}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 34, byte_len: 6 }) }, ++ Range { from: '\u{fcf5}', to: '\u{fcf5}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fcf6}', to: '\u{fcf6}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fcf7}', to: '\u{fcf7}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fcf8}', to: '\u{fcf8}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fcf9}', to: '\u{fcf9}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fcfa}', to: '\u{fcfa}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fcfb}', to: '\u{fcfb}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fcfc}', to: '\u{fcfc}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fcfd}', to: '\u{fcfd}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fcfe}', to: '\u{fcfe}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fcff}', to: '\u{fcff}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd00}', to: '\u{fd00}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd01}', to: '\u{fd01}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd02}', to: '\u{fd02}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd03}', to: '\u{fd03}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd04}', to: '\u{fd04}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd05}', to: '\u{fd05}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd06}', to: '\u{fd06}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd07}', to: '\u{fd07}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd08}', to: '\u{fd08}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd09}', to: '\u{fd09}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd0a}', to: '\u{fd0a}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd0b}', to: '\u{fd0b}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd0c}', to: '\u{fd0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd0d}', to: '\u{fd0d}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd0e}', to: '\u{fd0e}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd0f}', to: '\u{fd0f}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd10}', to: '\u{fd10}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd11}', to: '\u{fd11}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd12}', to: '\u{fd12}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd13}', to: '\u{fd13}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd14}', to: '\u{fd14}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd15}', to: '\u{fd15}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd16}', to: '\u{fd16}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd17}', to: '\u{fd17}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd18}', to: '\u{fd18}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd19}', to: '\u{fd19}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd1a}', to: '\u{fd1a}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd1b}', to: '\u{fd1b}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd1c}', to: '\u{fd1c}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd1d}', to: '\u{fd1d}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd1e}', to: '\u{fd1e}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd1f}', to: '\u{fd1f}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd20}', to: '\u{fd20}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd21}', to: '\u{fd21}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd22}', to: '\u{fd22}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd23}', to: '\u{fd23}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd24}', to: '\u{fd24}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd25}', to: '\u{fd25}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd26}', to: '\u{fd26}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd27}', to: '\u{fd27}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd28}', to: '\u{fd28}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd29}', to: '\u{fd29}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd2a}', to: '\u{fd2a}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd2b}', to: '\u{fd2b}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd2c}', to: '\u{fd2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd2d}', to: '\u{fd2d}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd2e}', to: '\u{fd2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd2f}', to: '\u{fd2f}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd30}', to: '\u{fd30}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd31}', to: '\u{fd31}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd32}', to: '\u{fd32}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd33}', to: '\u{fd33}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fd34}', to: '\u{fd34}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fd35}', to: '\u{fd35}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fd36}', to: '\u{fd36}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fd37}', to: '\u{fd37}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd38}', to: '\u{fd38}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd39}', to: '\u{fd39}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd3a}', to: '\u{fd3a}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fd3b}', to: '\u{fd3b}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 32, byte_len: 4 }) }, ++ Range { from: '\u{fd3c}', to: '\u{fd3d}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 34, byte_len: 4 }) }, ++ Range { from: '\u{fd3e}', to: '\u{fd3f}', mapping: Valid }, ++ Range { from: '\u{fd40}', to: '\u{fd4f}', mapping: Disallowed }, ++ Range { from: '\u{fd50}', to: '\u{fd50}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 34, byte_len: 6 }) }, ++ Range { from: '\u{fd51}', to: '\u{fd52}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 34, byte_len: 6 }) }, ++ Range { from: '\u{fd53}', to: '\u{fd53}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 34, byte_len: 6 }) }, ++ Range { from: '\u{fd54}', to: '\u{fd54}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 34, byte_len: 6 }) }, ++ Range { from: '\u{fd55}', to: '\u{fd55}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 34, byte_len: 6 }) }, ++ Range { from: '\u{fd56}', to: '\u{fd56}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 34, byte_len: 6 }) }, ++ Range { from: '\u{fd57}', to: '\u{fd57}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 34, byte_len: 6 }) }, ++ Range { from: '\u{fd58}', to: '\u{fd59}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 34, byte_len: 6 }) }, ++ Range { from: '\u{fd5a}', to: '\u{fd5a}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 34, byte_len: 6 }) }, ++ Range { from: '\u{fd5b}', to: '\u{fd5b}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 34, byte_len: 6 }) }, ++ Range { from: '\u{fd5c}', to: '\u{fd5c}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 34, byte_len: 6 }) }, ++ Range { from: '\u{fd5d}', to: '\u{fd5d}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd5e}', to: '\u{fd5e}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd5f}', to: '\u{fd60}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd61}', to: '\u{fd61}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd62}', to: '\u{fd63}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd64}', to: '\u{fd65}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd66}', to: '\u{fd66}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd67}', to: '\u{fd68}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd69}', to: '\u{fd69}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd6a}', to: '\u{fd6b}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd6c}', to: '\u{fd6d}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd6e}', to: '\u{fd6e}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd6f}', to: '\u{fd70}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd71}', to: '\u{fd72}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd73}', to: '\u{fd73}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd74}', to: '\u{fd74}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd75}', to: '\u{fd75}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd76}', to: '\u{fd77}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd78}', to: '\u{fd78}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd79}', to: '\u{fd79}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd7a}', to: '\u{fd7a}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd7b}', to: '\u{fd7b}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd7c}', to: '\u{fd7d}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd7e}', to: '\u{fd7e}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd7f}', to: '\u{fd7f}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd80}', to: '\u{fd80}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd81}', to: '\u{fd81}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd82}', to: '\u{fd82}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd83}', to: '\u{fd84}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd85}', to: '\u{fd86}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd87}', to: '\u{fd88}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd89}', to: '\u{fd89}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd8a}', to: '\u{fd8a}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd8b}', to: '\u{fd8b}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd8c}', to: '\u{fd8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd8d}', to: '\u{fd8d}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd8e}', to: '\u{fd8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd8f}', to: '\u{fd8f}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd90}', to: '\u{fd91}', mapping: Disallowed }, ++ Range { from: '\u{fd92}', to: '\u{fd92}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd93}', to: '\u{fd93}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd94}', to: '\u{fd94}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd95}', to: '\u{fd95}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd96}', to: '\u{fd96}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fd97}', to: '\u{fd98}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fd99}', to: '\u{fd99}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fd9a}', to: '\u{fd9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fd9b}', to: '\u{fd9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fd9c}', to: '\u{fd9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fd9e}', to: '\u{fd9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fd9f}', to: '\u{fd9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fda0}', to: '\u{fda0}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fda1}', to: '\u{fda1}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fda2}', to: '\u{fda2}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fda3}', to: '\u{fda3}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fda4}', to: '\u{fda4}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fda5}', to: '\u{fda5}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fda6}', to: '\u{fda6}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fda7}', to: '\u{fda7}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fda8}', to: '\u{fda8}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fda9}', to: '\u{fda9}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdaa}', to: '\u{fdaa}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdab}', to: '\u{fdab}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdac}', to: '\u{fdac}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdad}', to: '\u{fdad}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdae}', to: '\u{fdae}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdaf}', to: '\u{fdaf}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdb0}', to: '\u{fdb0}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdb1}', to: '\u{fdb1}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdb2}', to: '\u{fdb2}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdb3}', to: '\u{fdb3}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdb4}', to: '\u{fdb4}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fdb5}', to: '\u{fdb5}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fdb6}', to: '\u{fdb6}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdb7}', to: '\u{fdb7}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdb8}', to: '\u{fdb8}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdb9}', to: '\u{fdb9}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdba}', to: '\u{fdba}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdbb}', to: '\u{fdbb}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdbc}', to: '\u{fdbc}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdbd}', to: '\u{fdbd}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdbe}', to: '\u{fdbe}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdbf}', to: '\u{fdbf}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdc0}', to: '\u{fdc0}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdc1}', to: '\u{fdc1}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdc2}', to: '\u{fdc2}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdc3}', to: '\u{fdc3}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdc4}', to: '\u{fdc4}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fdc5}', to: '\u{fdc5}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 35, byte_len: 6 }) }, ++ Range { from: '\u{fdc6}', to: '\u{fdc6}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdc7}', to: '\u{fdc7}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdc8}', to: '\u{fdef}', mapping: Disallowed }, ++ Range { from: '\u{fdf0}', to: '\u{fdf0}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdf1}', to: '\u{fdf1}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 36, byte_len: 6 }) }, ++ Range { from: '\u{fdf2}', to: '\u{fdf2}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 37, byte_len: 8 }) }, ++ Range { from: '\u{fdf3}', to: '\u{fdf3}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 37, byte_len: 8 }) }, ++ Range { from: '\u{fdf4}', to: '\u{fdf4}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 37, byte_len: 8 }) }, ++ Range { from: '\u{fdf5}', to: '\u{fdf5}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 37, byte_len: 8 }) }, ++ Range { from: '\u{fdf6}', to: '\u{fdf6}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 37, byte_len: 8 }) }, ++ Range { from: '\u{fdf7}', to: '\u{fdf7}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 37, byte_len: 8 }) }, ++ Range { from: '\u{fdf8}', to: '\u{fdf8}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 37, byte_len: 8 }) }, ++ Range { from: '\u{fdf9}', to: '\u{fdf9}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 37, byte_len: 6 }) }, ++ Range { from: '\u{fdfa}', to: '\u{fdfa}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 37, byte_len: 33 }) }, ++ Range { from: '\u{fdfb}', to: '\u{fdfb}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 37, byte_len: 15 }) }, ++ Range { from: '\u{fdfc}', to: '\u{fdfc}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 37, byte_len: 8 }) }, ++ Range { from: '\u{fdfd}', to: '\u{fdfd}', mapping: Valid }, ++ Range { from: '\u{fdfe}', to: '\u{fdff}', mapping: Disallowed }, ++ Range { from: '\u{fe00}', to: '\u{fe0f}', mapping: Ignored }, ++ Range { from: '\u{fe10}', to: '\u{fe10}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe11}', to: '\u{fe11}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe12}', to: '\u{fe12}', mapping: Disallowed }, ++ Range { from: '\u{fe13}', to: '\u{fe13}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe14}', to: '\u{fe14}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 2, byte_len: 1 }) }, ++ Range { from: '\u{fe15}', to: '\u{fe15}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe16}', to: '\u{fe16}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe17}', to: '\u{fe17}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe18}', to: '\u{fe18}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe19}', to: '\u{fe1f}', mapping: Disallowed }, ++ Range { from: '\u{fe20}', to: '\u{fe2f}', mapping: Valid }, ++ Range { from: '\u{fe30}', to: '\u{fe30}', mapping: Disallowed }, ++ Range { from: '\u{fe31}', to: '\u{fe31}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe32}', to: '\u{fe32}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe33}', to: '\u{fe34}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe35}', to: '\u{fe35}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{fe36}', to: '\u{fe36}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{fe37}', to: '\u{fe37}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe38}', to: '\u{fe38}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe39}', to: '\u{fe39}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe3a}', to: '\u{fe3a}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe3b}', to: '\u{fe3b}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe3c}', to: '\u{fe3c}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe3d}', to: '\u{fe3d}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe3e}', to: '\u{fe3e}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe3f}', to: '\u{fe3f}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{fe40}', to: '\u{fe40}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{fe41}', to: '\u{fe41}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe42}', to: '\u{fe42}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe43}', to: '\u{fe43}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe44}', to: '\u{fe44}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe45}', to: '\u{fe46}', mapping: Valid }, ++ Range { from: '\u{fe47}', to: '\u{fe47}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe48}', to: '\u{fe48}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe49}', to: '\u{fe4c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 8, byte_len: 3 }) }, ++ Range { from: '\u{fe4d}', to: '\u{fe4f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe50}', to: '\u{fe50}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe51}', to: '\u{fe51}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe52}', to: '\u{fe53}', mapping: Disallowed }, ++ Range { from: '\u{fe54}', to: '\u{fe54}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 2, byte_len: 1 }) }, ++ Range { from: '\u{fe55}', to: '\u{fe55}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe56}', to: '\u{fe56}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe57}', to: '\u{fe57}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe58}', to: '\u{fe58}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe59}', to: '\u{fe59}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{fe5a}', to: '\u{fe5a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{fe5b}', to: '\u{fe5b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe5c}', to: '\u{fe5c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe5d}', to: '\u{fe5d}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe5e}', to: '\u{fe5e}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe5f}', to: '\u{fe5f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe60}', to: '\u{fe60}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe61}', to: '\u{fe61}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe62}', to: '\u{fe62}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{fe63}', to: '\u{fe63}', mapping: Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe64}', to: '\u{fe64}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe65}', to: '\u{fe65}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe66}', to: '\u{fe66}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{fe67}', to: '\u{fe67}', mapping: Disallowed }, ++ Range { from: '\u{fe68}', to: '\u{fe68}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe69}', to: '\u{fe69}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe6a}', to: '\u{fe6a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe6b}', to: '\u{fe6b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{fe6c}', to: '\u{fe6f}', mapping: Disallowed }, ++ Range { from: '\u{fe70}', to: '\u{fe70}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe71}', to: '\u{fe71}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 37, byte_len: 4 }) }, ++ Range { from: '\u{fe72}', to: '\u{fe72}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe73}', to: '\u{fe73}', mapping: Valid }, ++ Range { from: '\u{fe74}', to: '\u{fe74}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe75}', to: '\u{fe75}', mapping: Disallowed }, ++ Range { from: '\u{fe76}', to: '\u{fe76}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe77}', to: '\u{fe77}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 37, byte_len: 4 }) }, ++ Range { from: '\u{fe78}', to: '\u{fe78}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe79}', to: '\u{fe79}', mapping: Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 37, byte_len: 4 }) }, ++ Range { from: '\u{fe7a}', to: '\u{fe7a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe7b}', to: '\u{fe7b}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 37, byte_len: 4 }) }, ++ Range { from: '\u{fe7c}', to: '\u{fe7c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe7d}', to: '\u{fe7d}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 37, byte_len: 4 }) }, ++ Range { from: '\u{fe7e}', to: '\u{fe7e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{fe7f}', to: '\u{fe7f}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 37, byte_len: 4 }) }, ++ Range { from: '\u{fe80}', to: '\u{fe80}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{fe81}', to: '\u{fe82}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{fe83}', to: '\u{fe84}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{fe85}', to: '\u{fe86}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{fe87}', to: '\u{fe88}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{fe89}', to: '\u{fe8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{fe8d}', to: '\u{fe8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{fe8f}', to: '\u{fe92}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{fe93}', to: '\u{fe94}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{fe95}', to: '\u{fe98}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{fe99}', to: '\u{fe9c}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{fe9d}', to: '\u{fea0}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{fea1}', to: '\u{fea4}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{fea5}', to: '\u{fea8}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{fea9}', to: '\u{feaa}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{feab}', to: '\u{feac}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{fead}', to: '\u{feae}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{feaf}', to: '\u{feb0}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{feb1}', to: '\u{feb4}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{feb5}', to: '\u{feb8}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{feb9}', to: '\u{febc}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{febd}', to: '\u{fec0}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{fec1}', to: '\u{fec4}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{fec5}', to: '\u{fec8}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{fec9}', to: '\u{fecc}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{fecd}', to: '\u{fed0}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{fed1}', to: '\u{fed4}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{fed5}', to: '\u{fed8}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{fed9}', to: '\u{fedc}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{fedd}', to: '\u{fee0}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{fee1}', to: '\u{fee4}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{fee5}', to: '\u{fee8}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{fee9}', to: '\u{feec}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{feed}', to: '\u{feee}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{feef}', to: '\u{fef0}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 32, byte_len: 2 }) }, ++ Range { from: '\u{fef1}', to: '\u{fef4}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{fef5}', to: '\u{fef6}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{fef7}', to: '\u{fef8}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{fef9}', to: '\u{fefa}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{fefb}', to: '\u{fefc}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{fefd}', to: '\u{fefe}', mapping: Disallowed }, ++ Range { from: '\u{feff}', to: '\u{feff}', mapping: Ignored }, ++ Range { from: '\u{ff00}', to: '\u{ff00}', mapping: Disallowed }, ++ Range { from: '\u{ff01}', to: '\u{ff01}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{ff02}', to: '\u{ff02}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 38, byte_len: 1 }) }, ++ Range { from: '\u{ff03}', to: '\u{ff03}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{ff04}', to: '\u{ff04}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{ff05}', to: '\u{ff05}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{ff06}', to: '\u{ff06}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{ff07}', to: '\u{ff07}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 38, byte_len: 1 }) }, ++ Range { from: '\u{ff08}', to: '\u{ff08}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{ff09}', to: '\u{ff09}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{ff0a}', to: '\u{ff0a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{ff0b}', to: '\u{ff0b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{ff0c}', to: '\u{ff0c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{ff0d}', to: '\u{ff0d}', mapping: Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{ff0e}', to: '\u{ff0e}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 14, byte_len: 1 }) }, ++ Range { from: '\u{ff0f}', to: '\u{ff0f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 38, byte_len: 1 }) }, ++ Range { from: '\u{ff10}', to: '\u{ff10}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{ff11}', to: '\u{ff11}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff12}', to: '\u{ff12}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff13}', to: '\u{ff13}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff14}', to: '\u{ff14}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{ff15}', to: '\u{ff15}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{ff16}', to: '\u{ff16}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{ff17}', to: '\u{ff17}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{ff18}', to: '\u{ff18}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{ff19}', to: '\u{ff19}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{ff1a}', to: '\u{ff1a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{ff1b}', to: '\u{ff1b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 2, byte_len: 1 }) }, ++ Range { from: '\u{ff1c}', to: '\u{ff1c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{ff1d}', to: '\u{ff1d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 175, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{ff1e}', to: '\u{ff1e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{ff1f}', to: '\u{ff1f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{ff20}', to: '\u{ff20}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{ff21}', to: '\u{ff21}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff22}', to: '\u{ff22}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff23}', to: '\u{ff23}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff24}', to: '\u{ff24}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff25}', to: '\u{ff25}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff26}', to: '\u{ff26}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff27}', to: '\u{ff27}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff28}', to: '\u{ff28}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff29}', to: '\u{ff29}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff2a}', to: '\u{ff2a}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff2b}', to: '\u{ff2b}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff2c}', to: '\u{ff2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff2d}', to: '\u{ff2d}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff2e}', to: '\u{ff2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff2f}', to: '\u{ff2f}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff30}', to: '\u{ff30}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff31}', to: '\u{ff31}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff32}', to: '\u{ff32}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff33}', to: '\u{ff33}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff34}', to: '\u{ff34}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff35}', to: '\u{ff35}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff36}', to: '\u{ff36}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff37}', to: '\u{ff37}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff38}', to: '\u{ff38}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff39}', to: '\u{ff39}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff3a}', to: '\u{ff3a}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff3b}', to: '\u{ff3b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{ff3c}', to: '\u{ff3c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{ff3d}', to: '\u{ff3d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{ff3e}', to: '\u{ff3e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 38, byte_len: 1 }) }, ++ Range { from: '\u{ff3f}', to: '\u{ff3f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{ff40}', to: '\u{ff40}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{ff41}', to: '\u{ff41}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff42}', to: '\u{ff42}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff43}', to: '\u{ff43}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff44}', to: '\u{ff44}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff45}', to: '\u{ff45}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff46}', to: '\u{ff46}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff47}', to: '\u{ff47}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff48}', to: '\u{ff48}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff49}', to: '\u{ff49}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff4a}', to: '\u{ff4a}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff4b}', to: '\u{ff4b}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff4c}', to: '\u{ff4c}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff4d}', to: '\u{ff4d}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff4e}', to: '\u{ff4e}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff4f}', to: '\u{ff4f}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff50}', to: '\u{ff50}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff51}', to: '\u{ff51}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff52}', to: '\u{ff52}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff53}', to: '\u{ff53}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff54}', to: '\u{ff54}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff55}', to: '\u{ff55}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff56}', to: '\u{ff56}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff57}', to: '\u{ff57}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff58}', to: '\u{ff58}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff59}', to: '\u{ff59}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff5a}', to: '\u{ff5a}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{ff5b}', to: '\u{ff5b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{ff5c}', to: '\u{ff5c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 38, byte_len: 1 }) }, ++ Range { from: '\u{ff5d}', to: '\u{ff5d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 37, byte_len: 1 }) }, ++ Range { from: '\u{ff5e}', to: '\u{ff5e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 38, byte_len: 1 }) }, ++ Range { from: '\u{ff5f}', to: '\u{ff5f}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ff60}', to: '\u{ff60}', mapping: Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ff61}', to: '\u{ff61}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 14, byte_len: 1 }) }, ++ Range { from: '\u{ff62}', to: '\u{ff62}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{ff63}', to: '\u{ff63}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{ff64}', to: '\u{ff64}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 37, byte_len: 3 }) }, ++ Range { from: '\u{ff65}', to: '\u{ff65}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ff66}', to: '\u{ff66}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff67}', to: '\u{ff67}', mapping: Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ff68}', to: '\u{ff68}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ff69}', to: '\u{ff69}', mapping: Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ff6a}', to: '\u{ff6a}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ff6b}', to: '\u{ff6b}', mapping: Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ff6c}', to: '\u{ff6c}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ff6d}', to: '\u{ff6d}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ff6e}', to: '\u{ff6e}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ff6f}', to: '\u{ff6f}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ff70}', to: '\u{ff70}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ff71}', to: '\u{ff71}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{ff72}', to: '\u{ff72}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{ff73}', to: '\u{ff73}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{ff74}', to: '\u{ff74}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{ff75}', to: '\u{ff75}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{ff76}', to: '\u{ff76}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{ff77}', to: '\u{ff77}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{ff78}', to: '\u{ff78}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{ff79}', to: '\u{ff79}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{ff7a}', to: '\u{ff7a}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{ff7b}', to: '\u{ff7b}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{ff7c}', to: '\u{ff7c}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{ff7d}', to: '\u{ff7d}', mapping: Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{ff7e}', to: '\u{ff7e}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{ff7f}', to: '\u{ff7f}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{ff80}', to: '\u{ff80}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{ff81}', to: '\u{ff81}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{ff82}', to: '\u{ff82}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{ff83}', to: '\u{ff83}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff84}', to: '\u{ff84}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff85}', to: '\u{ff85}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff86}', to: '\u{ff86}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff87}', to: '\u{ff87}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff88}', to: '\u{ff88}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff89}', to: '\u{ff89}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff8a}', to: '\u{ff8a}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff8b}', to: '\u{ff8b}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff8c}', to: '\u{ff8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff8d}', to: '\u{ff8d}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff8e}', to: '\u{ff8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff8f}', to: '\u{ff8f}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff90}', to: '\u{ff90}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff91}', to: '\u{ff91}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff92}', to: '\u{ff92}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff93}', to: '\u{ff93}', mapping: Mapped(StringTableSlice { byte_start_lo: 49, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff94}', to: '\u{ff94}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff95}', to: '\u{ff95}', mapping: Mapped(StringTableSlice { byte_start_lo: 55, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff96}', to: '\u{ff96}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff97}', to: '\u{ff97}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff98}', to: '\u{ff98}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff99}', to: '\u{ff99}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff9a}', to: '\u{ff9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff9b}', to: '\u{ff9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff9c}', to: '\u{ff9c}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 18, byte_len: 3 }) }, ++ Range { from: '\u{ff9d}', to: '\u{ff9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ff9e}', to: '\u{ff9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ff9f}', to: '\u{ff9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ffa0}', to: '\u{ffa0}', mapping: Disallowed }, ++ Range { from: '\u{ffa1}', to: '\u{ffa1}', mapping: Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffa2}', to: '\u{ffa2}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffa3}', to: '\u{ffa3}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffa4}', to: '\u{ffa4}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffa5}', to: '\u{ffa5}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffa6}', to: '\u{ffa6}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffa7}', to: '\u{ffa7}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffa8}', to: '\u{ffa8}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffa9}', to: '\u{ffa9}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffaa}', to: '\u{ffaa}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffab}', to: '\u{ffab}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffac}', to: '\u{ffac}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffad}', to: '\u{ffad}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffae}', to: '\u{ffae}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffaf}', to: '\u{ffaf}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffb0}', to: '\u{ffb0}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffb1}', to: '\u{ffb1}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffb2}', to: '\u{ffb2}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffb3}', to: '\u{ffb3}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffb4}', to: '\u{ffb4}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffb5}', to: '\u{ffb5}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffb6}', to: '\u{ffb6}', mapping: Mapped(StringTableSlice { byte_start_lo: 98, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffb7}', to: '\u{ffb7}', mapping: Mapped(StringTableSlice { byte_start_lo: 101, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffb8}', to: '\u{ffb8}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffb9}', to: '\u{ffb9}', mapping: Mapped(StringTableSlice { byte_start_lo: 107, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffba}', to: '\u{ffba}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffbb}', to: '\u{ffbb}', mapping: Mapped(StringTableSlice { byte_start_lo: 113, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffbc}', to: '\u{ffbc}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffbd}', to: '\u{ffbd}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffbe}', to: '\u{ffbe}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffbf}', to: '\u{ffc1}', mapping: Disallowed }, ++ Range { from: '\u{ffc2}', to: '\u{ffc2}', mapping: Mapped(StringTableSlice { byte_start_lo: 125, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffc3}', to: '\u{ffc3}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffc4}', to: '\u{ffc4}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffc5}', to: '\u{ffc5}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffc6}', to: '\u{ffc6}', mapping: Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffc7}', to: '\u{ffc7}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffc8}', to: '\u{ffc9}', mapping: Disallowed }, ++ Range { from: '\u{ffca}', to: '\u{ffca}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffcb}', to: '\u{ffcb}', mapping: Mapped(StringTableSlice { byte_start_lo: 146, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffcc}', to: '\u{ffcc}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffcd}', to: '\u{ffcd}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffce}', to: '\u{ffce}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffcf}', to: '\u{ffcf}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffd0}', to: '\u{ffd1}', mapping: Disallowed }, ++ Range { from: '\u{ffd2}', to: '\u{ffd2}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffd3}', to: '\u{ffd3}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffd4}', to: '\u{ffd4}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffd5}', to: '\u{ffd5}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffd6}', to: '\u{ffd6}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffd7}', to: '\u{ffd7}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffd8}', to: '\u{ffd9}', mapping: Disallowed }, ++ Range { from: '\u{ffda}', to: '\u{ffda}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffdb}', to: '\u{ffdb}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffdc}', to: '\u{ffdc}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 14, byte_len: 3 }) }, ++ Range { from: '\u{ffdd}', to: '\u{ffdf}', mapping: Disallowed }, ++ Range { from: '\u{ffe0}', to: '\u{ffe0}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{ffe1}', to: '\u{ffe1}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{ffe2}', to: '\u{ffe2}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{ffe3}', to: '\u{ffe3}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 0, byte_len: 3 }) }, ++ Range { from: '\u{ffe4}', to: '\u{ffe4}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{ffe5}', to: '\u{ffe5}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{ffe6}', to: '\u{ffe6}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ffe7}', to: '\u{ffe7}', mapping: Disallowed }, ++ Range { from: '\u{ffe8}', to: '\u{ffe8}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ffe9}', to: '\u{ffe9}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ffea}', to: '\u{ffea}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ffeb}', to: '\u{ffeb}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ffec}', to: '\u{ffec}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ffed}', to: '\u{ffed}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ffee}', to: '\u{ffee}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 38, byte_len: 3 }) }, ++ Range { from: '\u{ffef}', to: '\u{ffff}', mapping: Disallowed }, ++ Range { from: '\u{10000}', to: '\u{1000b}', mapping: Valid }, ++ Range { from: '\u{1000c}', to: '\u{1000c}', mapping: Disallowed }, ++ Range { from: '\u{1000d}', to: '\u{10026}', mapping: Valid }, ++ Range { from: '\u{10027}', to: '\u{10027}', mapping: Disallowed }, ++ Range { from: '\u{10028}', to: '\u{1003a}', mapping: Valid }, ++ Range { from: '\u{1003b}', to: '\u{1003b}', mapping: Disallowed }, ++ Range { from: '\u{1003c}', to: '\u{1003d}', mapping: Valid }, ++ Range { from: '\u{1003e}', to: '\u{1003e}', mapping: Disallowed }, ++ Range { from: '\u{1003f}', to: '\u{1004d}', mapping: Valid }, ++ Range { from: '\u{1004e}', to: '\u{1004f}', mapping: Disallowed }, ++ Range { from: '\u{10050}', to: '\u{1005d}', mapping: Valid }, ++ Range { from: '\u{1005e}', to: '\u{1007f}', mapping: Disallowed }, ++ Range { from: '\u{10080}', to: '\u{100fa}', mapping: Valid }, ++ Range { from: '\u{100fb}', to: '\u{100ff}', mapping: Disallowed }, ++ Range { from: '\u{10100}', to: '\u{10102}', mapping: Valid }, ++ Range { from: '\u{10103}', to: '\u{10106}', mapping: Disallowed }, ++ Range { from: '\u{10107}', to: '\u{10133}', mapping: Valid }, ++ Range { from: '\u{10134}', to: '\u{10136}', mapping: Disallowed }, ++ Range { from: '\u{10137}', to: '\u{1018e}', mapping: Valid }, ++ Range { from: '\u{1018f}', to: '\u{1018f}', mapping: Disallowed }, ++ Range { from: '\u{10190}', to: '\u{1019b}', mapping: Valid }, ++ Range { from: '\u{1019c}', to: '\u{1019f}', mapping: Disallowed }, ++ Range { from: '\u{101a0}', to: '\u{101a0}', mapping: Valid }, ++ Range { from: '\u{101a1}', to: '\u{101cf}', mapping: Disallowed }, ++ Range { from: '\u{101d0}', to: '\u{101fd}', mapping: Valid }, ++ Range { from: '\u{101fe}', to: '\u{1027f}', mapping: Disallowed }, ++ Range { from: '\u{10280}', to: '\u{1029c}', mapping: Valid }, ++ Range { from: '\u{1029d}', to: '\u{1029f}', mapping: Disallowed }, ++ Range { from: '\u{102a0}', to: '\u{102d0}', mapping: Valid }, ++ Range { from: '\u{102d1}', to: '\u{102df}', mapping: Disallowed }, ++ Range { from: '\u{102e0}', to: '\u{102fb}', mapping: Valid }, ++ Range { from: '\u{102fc}', to: '\u{102ff}', mapping: Disallowed }, ++ Range { from: '\u{10300}', to: '\u{10323}', mapping: Valid }, ++ Range { from: '\u{10324}', to: '\u{1032c}', mapping: Disallowed }, ++ Range { from: '\u{1032d}', to: '\u{1034a}', mapping: Valid }, ++ Range { from: '\u{1034b}', to: '\u{1034f}', mapping: Disallowed }, ++ Range { from: '\u{10350}', to: '\u{1037a}', mapping: Valid }, ++ Range { from: '\u{1037b}', to: '\u{1037f}', mapping: Disallowed }, ++ Range { from: '\u{10380}', to: '\u{1039d}', mapping: Valid }, ++ Range { from: '\u{1039e}', to: '\u{1039e}', mapping: Disallowed }, ++ Range { from: '\u{1039f}', to: '\u{103c3}', mapping: Valid }, ++ Range { from: '\u{103c4}', to: '\u{103c7}', mapping: Disallowed }, ++ Range { from: '\u{103c8}', to: '\u{103d5}', mapping: Valid }, ++ Range { from: '\u{103d6}', to: '\u{103ff}', mapping: Disallowed }, ++ Range { from: '\u{10400}', to: '\u{10400}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{10401}', to: '\u{10401}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{10402}', to: '\u{10402}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{10403}', to: '\u{10403}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{10404}', to: '\u{10404}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{10405}', to: '\u{10405}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{10406}', to: '\u{10406}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{10407}', to: '\u{10407}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{10408}', to: '\u{10408}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{10409}', to: '\u{10409}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{1040a}', to: '\u{1040a}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{1040b}', to: '\u{1040b}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{1040c}', to: '\u{1040c}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{1040d}', to: '\u{1040d}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{1040e}', to: '\u{1040e}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{1040f}', to: '\u{1040f}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{10410}', to: '\u{10410}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{10411}', to: '\u{10411}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{10412}', to: '\u{10412}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{10413}', to: '\u{10413}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{10414}', to: '\u{10414}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{10415}', to: '\u{10415}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{10416}', to: '\u{10416}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{10417}', to: '\u{10417}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{10418}', to: '\u{10418}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{10419}', to: '\u{10419}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{1041a}', to: '\u{1041a}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 38, byte_len: 4 }) }, ++ Range { from: '\u{1041b}', to: '\u{1041b}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{1041c}', to: '\u{1041c}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{1041d}', to: '\u{1041d}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{1041e}', to: '\u{1041e}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{1041f}', to: '\u{1041f}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10420}', to: '\u{10420}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10421}', to: '\u{10421}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10422}', to: '\u{10422}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10423}', to: '\u{10423}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10424}', to: '\u{10424}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10425}', to: '\u{10425}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10426}', to: '\u{10426}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10427}', to: '\u{10427}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10428}', to: '\u{1049d}', mapping: Valid }, ++ Range { from: '\u{1049e}', to: '\u{1049f}', mapping: Disallowed }, ++ Range { from: '\u{104a0}', to: '\u{104a9}', mapping: Valid }, ++ Range { from: '\u{104aa}', to: '\u{104af}', mapping: Disallowed }, ++ Range { from: '\u{104b0}', to: '\u{104b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104b1}', to: '\u{104b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104b2}', to: '\u{104b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104b3}', to: '\u{104b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104b4}', to: '\u{104b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104b5}', to: '\u{104b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104b6}', to: '\u{104b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104b7}', to: '\u{104b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104b8}', to: '\u{104b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104b9}', to: '\u{104b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104ba}', to: '\u{104ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104bb}', to: '\u{104bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104bc}', to: '\u{104bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104bd}', to: '\u{104bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104be}', to: '\u{104be}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104bf}', to: '\u{104bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104c0}', to: '\u{104c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104c1}', to: '\u{104c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104c2}', to: '\u{104c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104c3}', to: '\u{104c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104c4}', to: '\u{104c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104c5}', to: '\u{104c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104c6}', to: '\u{104c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104c7}', to: '\u{104c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104c8}', to: '\u{104c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104c9}', to: '\u{104c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104ca}', to: '\u{104ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104cb}', to: '\u{104cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104cc}', to: '\u{104cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104cd}', to: '\u{104cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104ce}', to: '\u{104ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104cf}', to: '\u{104cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104d0}', to: '\u{104d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104d1}', to: '\u{104d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104d2}', to: '\u{104d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104d3}', to: '\u{104d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{104d4}', to: '\u{104d7}', mapping: Disallowed }, ++ Range { from: '\u{104d8}', to: '\u{104fb}', mapping: Valid }, ++ Range { from: '\u{104fc}', to: '\u{104ff}', mapping: Disallowed }, ++ Range { from: '\u{10500}', to: '\u{10527}', mapping: Valid }, ++ Range { from: '\u{10528}', to: '\u{1052f}', mapping: Disallowed }, ++ Range { from: '\u{10530}', to: '\u{10563}', mapping: Valid }, ++ Range { from: '\u{10564}', to: '\u{1056e}', mapping: Disallowed }, ++ Range { from: '\u{1056f}', to: '\u{1056f}', mapping: Valid }, ++ Range { from: '\u{10570}', to: '\u{105ff}', mapping: Disallowed }, ++ Range { from: '\u{10600}', to: '\u{10736}', mapping: Valid }, ++ Range { from: '\u{10737}', to: '\u{1073f}', mapping: Disallowed }, ++ Range { from: '\u{10740}', to: '\u{10755}', mapping: Valid }, ++ Range { from: '\u{10756}', to: '\u{1075f}', mapping: Disallowed }, ++ Range { from: '\u{10760}', to: '\u{10767}', mapping: Valid }, ++ Range { from: '\u{10768}', to: '\u{107ff}', mapping: Disallowed }, ++ Range { from: '\u{10800}', to: '\u{10805}', mapping: Valid }, ++ Range { from: '\u{10806}', to: '\u{10807}', mapping: Disallowed }, ++ Range { from: '\u{10808}', to: '\u{10808}', mapping: Valid }, ++ Range { from: '\u{10809}', to: '\u{10809}', mapping: Disallowed }, ++ Range { from: '\u{1080a}', to: '\u{10835}', mapping: Valid }, ++ Range { from: '\u{10836}', to: '\u{10836}', mapping: Disallowed }, ++ Range { from: '\u{10837}', to: '\u{10838}', mapping: Valid }, ++ Range { from: '\u{10839}', to: '\u{1083b}', mapping: Disallowed }, ++ Range { from: '\u{1083c}', to: '\u{1083c}', mapping: Valid }, ++ Range { from: '\u{1083d}', to: '\u{1083e}', mapping: Disallowed }, ++ Range { from: '\u{1083f}', to: '\u{10855}', mapping: Valid }, ++ Range { from: '\u{10856}', to: '\u{10856}', mapping: Disallowed }, ++ Range { from: '\u{10857}', to: '\u{1089e}', mapping: Valid }, ++ Range { from: '\u{1089f}', to: '\u{108a6}', mapping: Disallowed }, ++ Range { from: '\u{108a7}', to: '\u{108af}', mapping: Valid }, ++ Range { from: '\u{108b0}', to: '\u{108df}', mapping: Disallowed }, ++ Range { from: '\u{108e0}', to: '\u{108f2}', mapping: Valid }, ++ Range { from: '\u{108f3}', to: '\u{108f3}', mapping: Disallowed }, ++ Range { from: '\u{108f4}', to: '\u{108f5}', mapping: Valid }, ++ Range { from: '\u{108f6}', to: '\u{108fa}', mapping: Disallowed }, ++ Range { from: '\u{108fb}', to: '\u{1091b}', mapping: Valid }, ++ Range { from: '\u{1091c}', to: '\u{1091e}', mapping: Disallowed }, ++ Range { from: '\u{1091f}', to: '\u{10939}', mapping: Valid }, ++ Range { from: '\u{1093a}', to: '\u{1093e}', mapping: Disallowed }, ++ Range { from: '\u{1093f}', to: '\u{1093f}', mapping: Valid }, ++ Range { from: '\u{10940}', to: '\u{1097f}', mapping: Disallowed }, ++ Range { from: '\u{10980}', to: '\u{109b7}', mapping: Valid }, ++ Range { from: '\u{109b8}', to: '\u{109bb}', mapping: Disallowed }, ++ Range { from: '\u{109bc}', to: '\u{109cf}', mapping: Valid }, ++ Range { from: '\u{109d0}', to: '\u{109d1}', mapping: Disallowed }, ++ Range { from: '\u{109d2}', to: '\u{10a03}', mapping: Valid }, ++ Range { from: '\u{10a04}', to: '\u{10a04}', mapping: Disallowed }, ++ Range { from: '\u{10a05}', to: '\u{10a06}', mapping: Valid }, ++ Range { from: '\u{10a07}', to: '\u{10a0b}', mapping: Disallowed }, ++ Range { from: '\u{10a0c}', to: '\u{10a13}', mapping: Valid }, ++ Range { from: '\u{10a14}', to: '\u{10a14}', mapping: Disallowed }, ++ Range { from: '\u{10a15}', to: '\u{10a17}', mapping: Valid }, ++ Range { from: '\u{10a18}', to: '\u{10a18}', mapping: Disallowed }, ++ Range { from: '\u{10a19}', to: '\u{10a33}', mapping: Valid }, ++ Range { from: '\u{10a34}', to: '\u{10a37}', mapping: Disallowed }, ++ Range { from: '\u{10a38}', to: '\u{10a3a}', mapping: Valid }, ++ Range { from: '\u{10a3b}', to: '\u{10a3e}', mapping: Disallowed }, ++ Range { from: '\u{10a3f}', to: '\u{10a47}', mapping: Valid }, ++ Range { from: '\u{10a48}', to: '\u{10a4f}', mapping: Disallowed }, ++ Range { from: '\u{10a50}', to: '\u{10a58}', mapping: Valid }, ++ Range { from: '\u{10a59}', to: '\u{10a5f}', mapping: Disallowed }, ++ Range { from: '\u{10a60}', to: '\u{10a9f}', mapping: Valid }, ++ Range { from: '\u{10aa0}', to: '\u{10abf}', mapping: Disallowed }, ++ Range { from: '\u{10ac0}', to: '\u{10ae6}', mapping: Valid }, ++ Range { from: '\u{10ae7}', to: '\u{10aea}', mapping: Disallowed }, ++ Range { from: '\u{10aeb}', to: '\u{10af6}', mapping: Valid }, ++ Range { from: '\u{10af7}', to: '\u{10aff}', mapping: Disallowed }, ++ Range { from: '\u{10b00}', to: '\u{10b35}', mapping: Valid }, ++ Range { from: '\u{10b36}', to: '\u{10b38}', mapping: Disallowed }, ++ Range { from: '\u{10b39}', to: '\u{10b55}', mapping: Valid }, ++ Range { from: '\u{10b56}', to: '\u{10b57}', mapping: Disallowed }, ++ Range { from: '\u{10b58}', to: '\u{10b72}', mapping: Valid }, ++ Range { from: '\u{10b73}', to: '\u{10b77}', mapping: Disallowed }, ++ Range { from: '\u{10b78}', to: '\u{10b91}', mapping: Valid }, ++ Range { from: '\u{10b92}', to: '\u{10b98}', mapping: Disallowed }, ++ Range { from: '\u{10b99}', to: '\u{10b9c}', mapping: Valid }, ++ Range { from: '\u{10b9d}', to: '\u{10ba8}', mapping: Disallowed }, ++ Range { from: '\u{10ba9}', to: '\u{10baf}', mapping: Valid }, ++ Range { from: '\u{10bb0}', to: '\u{10bff}', mapping: Disallowed }, ++ Range { from: '\u{10c00}', to: '\u{10c48}', mapping: Valid }, ++ Range { from: '\u{10c49}', to: '\u{10c7f}', mapping: Disallowed }, ++ Range { from: '\u{10c80}', to: '\u{10c80}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10c81}', to: '\u{10c81}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10c82}', to: '\u{10c82}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10c83}', to: '\u{10c83}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10c84}', to: '\u{10c84}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10c85}', to: '\u{10c85}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10c86}', to: '\u{10c86}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10c87}', to: '\u{10c87}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10c88}', to: '\u{10c88}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10c89}', to: '\u{10c89}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10c8a}', to: '\u{10c8a}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10c8b}', to: '\u{10c8b}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10c8c}', to: '\u{10c8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10c8d}', to: '\u{10c8d}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10c8e}', to: '\u{10c8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 39, byte_len: 4 }) }, ++ Range { from: '\u{10c8f}', to: '\u{10c8f}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10c90}', to: '\u{10c90}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10c91}', to: '\u{10c91}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10c92}', to: '\u{10c92}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10c93}', to: '\u{10c93}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10c94}', to: '\u{10c94}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10c95}', to: '\u{10c95}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10c96}', to: '\u{10c96}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10c97}', to: '\u{10c97}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10c98}', to: '\u{10c98}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10c99}', to: '\u{10c99}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10c9a}', to: '\u{10c9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10c9b}', to: '\u{10c9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10c9c}', to: '\u{10c9c}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10c9d}', to: '\u{10c9d}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10c9e}', to: '\u{10c9e}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10c9f}', to: '\u{10c9f}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10ca0}', to: '\u{10ca0}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10ca1}', to: '\u{10ca1}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10ca2}', to: '\u{10ca2}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10ca3}', to: '\u{10ca3}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10ca4}', to: '\u{10ca4}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10ca5}', to: '\u{10ca5}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10ca6}', to: '\u{10ca6}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10ca7}', to: '\u{10ca7}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10ca8}', to: '\u{10ca8}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10ca9}', to: '\u{10ca9}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10caa}', to: '\u{10caa}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10cab}', to: '\u{10cab}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10cac}', to: '\u{10cac}', mapping: Mapped(StringTableSlice { byte_start_lo: 116, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10cad}', to: '\u{10cad}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10cae}', to: '\u{10cae}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10caf}', to: '\u{10caf}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10cb0}', to: '\u{10cb0}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10cb1}', to: '\u{10cb1}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10cb2}', to: '\u{10cb2}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{10cb3}', to: '\u{10cbf}', mapping: Disallowed }, ++ Range { from: '\u{10cc0}', to: '\u{10cf2}', mapping: Valid }, ++ Range { from: '\u{10cf3}', to: '\u{10cf9}', mapping: Disallowed }, ++ Range { from: '\u{10cfa}', to: '\u{10cff}', mapping: Valid }, ++ Range { from: '\u{10d00}', to: '\u{10e5f}', mapping: Disallowed }, ++ Range { from: '\u{10e60}', to: '\u{10e7e}', mapping: Valid }, ++ Range { from: '\u{10e7f}', to: '\u{10fff}', mapping: Disallowed }, ++ Range { from: '\u{11000}', to: '\u{1104d}', mapping: Valid }, ++ Range { from: '\u{1104e}', to: '\u{11051}', mapping: Disallowed }, ++ Range { from: '\u{11052}', to: '\u{1106f}', mapping: Valid }, ++ Range { from: '\u{11070}', to: '\u{1107e}', mapping: Disallowed }, ++ Range { from: '\u{1107f}', to: '\u{110bc}', mapping: Valid }, ++ Range { from: '\u{110bd}', to: '\u{110bd}', mapping: Disallowed }, ++ Range { from: '\u{110be}', to: '\u{110c1}', mapping: Valid }, ++ Range { from: '\u{110c2}', to: '\u{110cf}', mapping: Disallowed }, ++ Range { from: '\u{110d0}', to: '\u{110e8}', mapping: Valid }, ++ Range { from: '\u{110e9}', to: '\u{110ef}', mapping: Disallowed }, ++ Range { from: '\u{110f0}', to: '\u{110f9}', mapping: Valid }, ++ Range { from: '\u{110fa}', to: '\u{110ff}', mapping: Disallowed }, ++ Range { from: '\u{11100}', to: '\u{11134}', mapping: Valid }, ++ Range { from: '\u{11135}', to: '\u{11135}', mapping: Disallowed }, ++ Range { from: '\u{11136}', to: '\u{11143}', mapping: Valid }, ++ Range { from: '\u{11144}', to: '\u{1114f}', mapping: Disallowed }, ++ Range { from: '\u{11150}', to: '\u{11176}', mapping: Valid }, ++ Range { from: '\u{11177}', to: '\u{1117f}', mapping: Disallowed }, ++ Range { from: '\u{11180}', to: '\u{111cd}', mapping: Valid }, ++ Range { from: '\u{111ce}', to: '\u{111cf}', mapping: Disallowed }, ++ Range { from: '\u{111d0}', to: '\u{111df}', mapping: Valid }, ++ Range { from: '\u{111e0}', to: '\u{111e0}', mapping: Disallowed }, ++ Range { from: '\u{111e1}', to: '\u{111f4}', mapping: Valid }, ++ Range { from: '\u{111f5}', to: '\u{111ff}', mapping: Disallowed }, ++ Range { from: '\u{11200}', to: '\u{11211}', mapping: Valid }, ++ Range { from: '\u{11212}', to: '\u{11212}', mapping: Disallowed }, ++ Range { from: '\u{11213}', to: '\u{1123e}', mapping: Valid }, ++ Range { from: '\u{1123f}', to: '\u{1127f}', mapping: Disallowed }, ++ Range { from: '\u{11280}', to: '\u{11286}', mapping: Valid }, ++ Range { from: '\u{11287}', to: '\u{11287}', mapping: Disallowed }, ++ Range { from: '\u{11288}', to: '\u{11288}', mapping: Valid }, ++ Range { from: '\u{11289}', to: '\u{11289}', mapping: Disallowed }, ++ Range { from: '\u{1128a}', to: '\u{1128d}', mapping: Valid }, ++ Range { from: '\u{1128e}', to: '\u{1128e}', mapping: Disallowed }, ++ Range { from: '\u{1128f}', to: '\u{1129d}', mapping: Valid }, ++ Range { from: '\u{1129e}', to: '\u{1129e}', mapping: Disallowed }, ++ Range { from: '\u{1129f}', to: '\u{112a9}', mapping: Valid }, ++ Range { from: '\u{112aa}', to: '\u{112af}', mapping: Disallowed }, ++ Range { from: '\u{112b0}', to: '\u{112ea}', mapping: Valid }, ++ Range { from: '\u{112eb}', to: '\u{112ef}', mapping: Disallowed }, ++ Range { from: '\u{112f0}', to: '\u{112f9}', mapping: Valid }, ++ Range { from: '\u{112fa}', to: '\u{112ff}', mapping: Disallowed }, ++ Range { from: '\u{11300}', to: '\u{11303}', mapping: Valid }, ++ Range { from: '\u{11304}', to: '\u{11304}', mapping: Disallowed }, ++ Range { from: '\u{11305}', to: '\u{1130c}', mapping: Valid }, ++ Range { from: '\u{1130d}', to: '\u{1130e}', mapping: Disallowed }, ++ Range { from: '\u{1130f}', to: '\u{11310}', mapping: Valid }, ++ Range { from: '\u{11311}', to: '\u{11312}', mapping: Disallowed }, ++ Range { from: '\u{11313}', to: '\u{11328}', mapping: Valid }, ++ Range { from: '\u{11329}', to: '\u{11329}', mapping: Disallowed }, ++ Range { from: '\u{1132a}', to: '\u{11330}', mapping: Valid }, ++ Range { from: '\u{11331}', to: '\u{11331}', mapping: Disallowed }, ++ Range { from: '\u{11332}', to: '\u{11333}', mapping: Valid }, ++ Range { from: '\u{11334}', to: '\u{11334}', mapping: Disallowed }, ++ Range { from: '\u{11335}', to: '\u{11339}', mapping: Valid }, ++ Range { from: '\u{1133a}', to: '\u{1133b}', mapping: Disallowed }, ++ Range { from: '\u{1133c}', to: '\u{11344}', mapping: Valid }, ++ Range { from: '\u{11345}', to: '\u{11346}', mapping: Disallowed }, ++ Range { from: '\u{11347}', to: '\u{11348}', mapping: Valid }, ++ Range { from: '\u{11349}', to: '\u{1134a}', mapping: Disallowed }, ++ Range { from: '\u{1134b}', to: '\u{1134d}', mapping: Valid }, ++ Range { from: '\u{1134e}', to: '\u{1134f}', mapping: Disallowed }, ++ Range { from: '\u{11350}', to: '\u{11350}', mapping: Valid }, ++ Range { from: '\u{11351}', to: '\u{11356}', mapping: Disallowed }, ++ Range { from: '\u{11357}', to: '\u{11357}', mapping: Valid }, ++ Range { from: '\u{11358}', to: '\u{1135c}', mapping: Disallowed }, ++ Range { from: '\u{1135d}', to: '\u{11363}', mapping: Valid }, ++ Range { from: '\u{11364}', to: '\u{11365}', mapping: Disallowed }, ++ Range { from: '\u{11366}', to: '\u{1136c}', mapping: Valid }, ++ Range { from: '\u{1136d}', to: '\u{1136f}', mapping: Disallowed }, ++ Range { from: '\u{11370}', to: '\u{11374}', mapping: Valid }, ++ Range { from: '\u{11375}', to: '\u{113ff}', mapping: Disallowed }, ++ Range { from: '\u{11400}', to: '\u{11459}', mapping: Valid }, ++ Range { from: '\u{1145a}', to: '\u{1145a}', mapping: Disallowed }, ++ Range { from: '\u{1145b}', to: '\u{1145b}', mapping: Valid }, ++ Range { from: '\u{1145c}', to: '\u{1145c}', mapping: Disallowed }, ++ Range { from: '\u{1145d}', to: '\u{1145d}', mapping: Valid }, ++ Range { from: '\u{1145e}', to: '\u{1147f}', mapping: Disallowed }, ++ Range { from: '\u{11480}', to: '\u{114c7}', mapping: Valid }, ++ Range { from: '\u{114c8}', to: '\u{114cf}', mapping: Disallowed }, ++ Range { from: '\u{114d0}', to: '\u{114d9}', mapping: Valid }, ++ Range { from: '\u{114da}', to: '\u{1157f}', mapping: Disallowed }, ++ Range { from: '\u{11580}', to: '\u{115b5}', mapping: Valid }, ++ Range { from: '\u{115b6}', to: '\u{115b7}', mapping: Disallowed }, ++ Range { from: '\u{115b8}', to: '\u{115dd}', mapping: Valid }, ++ Range { from: '\u{115de}', to: '\u{115ff}', mapping: Disallowed }, ++ Range { from: '\u{11600}', to: '\u{11644}', mapping: Valid }, ++ Range { from: '\u{11645}', to: '\u{1164f}', mapping: Disallowed }, ++ Range { from: '\u{11650}', to: '\u{11659}', mapping: Valid }, ++ Range { from: '\u{1165a}', to: '\u{1165f}', mapping: Disallowed }, ++ Range { from: '\u{11660}', to: '\u{1166c}', mapping: Valid }, ++ Range { from: '\u{1166d}', to: '\u{1167f}', mapping: Disallowed }, ++ Range { from: '\u{11680}', to: '\u{116b7}', mapping: Valid }, ++ Range { from: '\u{116b8}', to: '\u{116bf}', mapping: Disallowed }, ++ Range { from: '\u{116c0}', to: '\u{116c9}', mapping: Valid }, ++ Range { from: '\u{116ca}', to: '\u{116ff}', mapping: Disallowed }, ++ Range { from: '\u{11700}', to: '\u{11719}', mapping: Valid }, ++ Range { from: '\u{1171a}', to: '\u{1171c}', mapping: Disallowed }, ++ Range { from: '\u{1171d}', to: '\u{1172b}', mapping: Valid }, ++ Range { from: '\u{1172c}', to: '\u{1172f}', mapping: Disallowed }, ++ Range { from: '\u{11730}', to: '\u{1173f}', mapping: Valid }, ++ Range { from: '\u{11740}', to: '\u{1189f}', mapping: Disallowed }, ++ Range { from: '\u{118a0}', to: '\u{118a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118a1}', to: '\u{118a1}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118a2}', to: '\u{118a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118a3}', to: '\u{118a3}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118a4}', to: '\u{118a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118a5}', to: '\u{118a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118a6}', to: '\u{118a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118a7}', to: '\u{118a7}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118a8}', to: '\u{118a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118a9}', to: '\u{118a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118aa}', to: '\u{118aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118ab}', to: '\u{118ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118ac}', to: '\u{118ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118ad}', to: '\u{118ad}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118ae}', to: '\u{118ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118af}', to: '\u{118af}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118b0}', to: '\u{118b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118b1}', to: '\u{118b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118b2}', to: '\u{118b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118b3}', to: '\u{118b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118b4}', to: '\u{118b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118b5}', to: '\u{118b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118b6}', to: '\u{118b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118b7}', to: '\u{118b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 236, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118b8}', to: '\u{118b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118b9}', to: '\u{118b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118ba}', to: '\u{118ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118bb}', to: '\u{118bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 40, byte_len: 4 }) }, ++ Range { from: '\u{118bc}', to: '\u{118bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{118bd}', to: '\u{118bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{118be}', to: '\u{118be}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{118bf}', to: '\u{118bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{118c0}', to: '\u{118f2}', mapping: Valid }, ++ Range { from: '\u{118f3}', to: '\u{118fe}', mapping: Disallowed }, ++ Range { from: '\u{118ff}', to: '\u{118ff}', mapping: Valid }, ++ Range { from: '\u{11900}', to: '\u{119ff}', mapping: Disallowed }, ++ Range { from: '\u{11a00}', to: '\u{11a47}', mapping: Valid }, ++ Range { from: '\u{11a48}', to: '\u{11a4f}', mapping: Disallowed }, ++ Range { from: '\u{11a50}', to: '\u{11a83}', mapping: Valid }, ++ Range { from: '\u{11a84}', to: '\u{11a85}', mapping: Disallowed }, ++ Range { from: '\u{11a86}', to: '\u{11a9c}', mapping: Valid }, ++ Range { from: '\u{11a9d}', to: '\u{11a9d}', mapping: Disallowed }, ++ Range { from: '\u{11a9e}', to: '\u{11aa2}', mapping: Valid }, ++ Range { from: '\u{11aa3}', to: '\u{11abf}', mapping: Disallowed }, ++ Range { from: '\u{11ac0}', to: '\u{11af8}', mapping: Valid }, ++ Range { from: '\u{11af9}', to: '\u{11bff}', mapping: Disallowed }, ++ Range { from: '\u{11c00}', to: '\u{11c08}', mapping: Valid }, ++ Range { from: '\u{11c09}', to: '\u{11c09}', mapping: Disallowed }, ++ Range { from: '\u{11c0a}', to: '\u{11c36}', mapping: Valid }, ++ Range { from: '\u{11c37}', to: '\u{11c37}', mapping: Disallowed }, ++ Range { from: '\u{11c38}', to: '\u{11c45}', mapping: Valid }, ++ Range { from: '\u{11c46}', to: '\u{11c4f}', mapping: Disallowed }, ++ Range { from: '\u{11c50}', to: '\u{11c6c}', mapping: Valid }, ++ Range { from: '\u{11c6d}', to: '\u{11c6f}', mapping: Disallowed }, ++ Range { from: '\u{11c70}', to: '\u{11c8f}', mapping: Valid }, ++ Range { from: '\u{11c90}', to: '\u{11c91}', mapping: Disallowed }, ++ Range { from: '\u{11c92}', to: '\u{11ca7}', mapping: Valid }, ++ Range { from: '\u{11ca8}', to: '\u{11ca8}', mapping: Disallowed }, ++ Range { from: '\u{11ca9}', to: '\u{11cb6}', mapping: Valid }, ++ Range { from: '\u{11cb7}', to: '\u{11cff}', mapping: Disallowed }, ++ Range { from: '\u{11d00}', to: '\u{11d06}', mapping: Valid }, ++ Range { from: '\u{11d07}', to: '\u{11d07}', mapping: Disallowed }, ++ Range { from: '\u{11d08}', to: '\u{11d09}', mapping: Valid }, ++ Range { from: '\u{11d0a}', to: '\u{11d0a}', mapping: Disallowed }, ++ Range { from: '\u{11d0b}', to: '\u{11d36}', mapping: Valid }, ++ Range { from: '\u{11d37}', to: '\u{11d39}', mapping: Disallowed }, ++ Range { from: '\u{11d3a}', to: '\u{11d3a}', mapping: Valid }, ++ Range { from: '\u{11d3b}', to: '\u{11d3b}', mapping: Disallowed }, ++ Range { from: '\u{11d3c}', to: '\u{11d3d}', mapping: Valid }, ++ Range { from: '\u{11d3e}', to: '\u{11d3e}', mapping: Disallowed }, ++ Range { from: '\u{11d3f}', to: '\u{11d47}', mapping: Valid }, ++ Range { from: '\u{11d48}', to: '\u{11d4f}', mapping: Disallowed }, ++ Range { from: '\u{11d50}', to: '\u{11d59}', mapping: Valid }, ++ Range { from: '\u{11d5a}', to: '\u{11fff}', mapping: Disallowed }, ++ Range { from: '\u{12000}', to: '\u{12399}', mapping: Valid }, ++ Range { from: '\u{1239a}', to: '\u{123ff}', mapping: Disallowed }, ++ Range { from: '\u{12400}', to: '\u{1246e}', mapping: Valid }, ++ Range { from: '\u{1246f}', to: '\u{1246f}', mapping: Disallowed }, ++ Range { from: '\u{12470}', to: '\u{12474}', mapping: Valid }, ++ Range { from: '\u{12475}', to: '\u{1247f}', mapping: Disallowed }, ++ Range { from: '\u{12480}', to: '\u{12543}', mapping: Valid }, ++ Range { from: '\u{12544}', to: '\u{12fff}', mapping: Disallowed }, ++ Range { from: '\u{13000}', to: '\u{1342e}', mapping: Valid }, ++ Range { from: '\u{1342f}', to: '\u{143ff}', mapping: Disallowed }, ++ Range { from: '\u{14400}', to: '\u{14646}', mapping: Valid }, ++ Range { from: '\u{14647}', to: '\u{167ff}', mapping: Disallowed }, ++ Range { from: '\u{16800}', to: '\u{16a38}', mapping: Valid }, ++ Range { from: '\u{16a39}', to: '\u{16a3f}', mapping: Disallowed }, ++ Range { from: '\u{16a40}', to: '\u{16a5e}', mapping: Valid }, ++ Range { from: '\u{16a5f}', to: '\u{16a5f}', mapping: Disallowed }, ++ Range { from: '\u{16a60}', to: '\u{16a69}', mapping: Valid }, ++ Range { from: '\u{16a6a}', to: '\u{16a6d}', mapping: Disallowed }, ++ Range { from: '\u{16a6e}', to: '\u{16a6f}', mapping: Valid }, ++ Range { from: '\u{16a70}', to: '\u{16acf}', mapping: Disallowed }, ++ Range { from: '\u{16ad0}', to: '\u{16aed}', mapping: Valid }, ++ Range { from: '\u{16aee}', to: '\u{16aef}', mapping: Disallowed }, ++ Range { from: '\u{16af0}', to: '\u{16af5}', mapping: Valid }, ++ Range { from: '\u{16af6}', to: '\u{16aff}', mapping: Disallowed }, ++ Range { from: '\u{16b00}', to: '\u{16b45}', mapping: Valid }, ++ Range { from: '\u{16b46}', to: '\u{16b4f}', mapping: Disallowed }, ++ Range { from: '\u{16b50}', to: '\u{16b59}', mapping: Valid }, ++ Range { from: '\u{16b5a}', to: '\u{16b5a}', mapping: Disallowed }, ++ Range { from: '\u{16b5b}', to: '\u{16b61}', mapping: Valid }, ++ Range { from: '\u{16b62}', to: '\u{16b62}', mapping: Disallowed }, ++ Range { from: '\u{16b63}', to: '\u{16b77}', mapping: Valid }, ++ Range { from: '\u{16b78}', to: '\u{16b7c}', mapping: Disallowed }, ++ Range { from: '\u{16b7d}', to: '\u{16b8f}', mapping: Valid }, ++ Range { from: '\u{16b90}', to: '\u{16eff}', mapping: Disallowed }, ++ Range { from: '\u{16f00}', to: '\u{16f44}', mapping: Valid }, ++ Range { from: '\u{16f45}', to: '\u{16f4f}', mapping: Disallowed }, ++ Range { from: '\u{16f50}', to: '\u{16f7e}', mapping: Valid }, ++ Range { from: '\u{16f7f}', to: '\u{16f8e}', mapping: Disallowed }, ++ Range { from: '\u{16f8f}', to: '\u{16f9f}', mapping: Valid }, ++ Range { from: '\u{16fa0}', to: '\u{16fdf}', mapping: Disallowed }, ++ Range { from: '\u{16fe0}', to: '\u{16fe1}', mapping: Valid }, ++ Range { from: '\u{16fe2}', to: '\u{16fff}', mapping: Disallowed }, ++ Range { from: '\u{17000}', to: '\u{187ec}', mapping: Valid }, ++ Range { from: '\u{187ed}', to: '\u{187ff}', mapping: Disallowed }, ++ Range { from: '\u{18800}', to: '\u{18af2}', mapping: Valid }, ++ Range { from: '\u{18af3}', to: '\u{1afff}', mapping: Disallowed }, ++ Range { from: '\u{1b000}', to: '\u{1b11e}', mapping: Valid }, ++ Range { from: '\u{1b11f}', to: '\u{1b16f}', mapping: Disallowed }, ++ Range { from: '\u{1b170}', to: '\u{1b2fb}', mapping: Valid }, ++ Range { from: '\u{1b2fc}', to: '\u{1bbff}', mapping: Disallowed }, ++ Range { from: '\u{1bc00}', to: '\u{1bc6a}', mapping: Valid }, ++ Range { from: '\u{1bc6b}', to: '\u{1bc6f}', mapping: Disallowed }, ++ Range { from: '\u{1bc70}', to: '\u{1bc7c}', mapping: Valid }, ++ Range { from: '\u{1bc7d}', to: '\u{1bc7f}', mapping: Disallowed }, ++ Range { from: '\u{1bc80}', to: '\u{1bc88}', mapping: Valid }, ++ Range { from: '\u{1bc89}', to: '\u{1bc8f}', mapping: Disallowed }, ++ Range { from: '\u{1bc90}', to: '\u{1bc99}', mapping: Valid }, ++ Range { from: '\u{1bc9a}', to: '\u{1bc9b}', mapping: Disallowed }, ++ Range { from: '\u{1bc9c}', to: '\u{1bc9f}', mapping: Valid }, ++ Range { from: '\u{1bca0}', to: '\u{1bca3}', mapping: Ignored }, ++ Range { from: '\u{1bca4}', to: '\u{1cfff}', mapping: Disallowed }, ++ Range { from: '\u{1d000}', to: '\u{1d0f5}', mapping: Valid }, ++ Range { from: '\u{1d0f6}', to: '\u{1d0ff}', mapping: Disallowed }, ++ Range { from: '\u{1d100}', to: '\u{1d126}', mapping: Valid }, ++ Range { from: '\u{1d127}', to: '\u{1d128}', mapping: Disallowed }, ++ Range { from: '\u{1d129}', to: '\u{1d15d}', mapping: Valid }, ++ Range { from: '\u{1d15e}', to: '\u{1d15e}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 41, byte_len: 8 }) }, ++ Range { from: '\u{1d15f}', to: '\u{1d15f}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 41, byte_len: 8 }) }, ++ Range { from: '\u{1d160}', to: '\u{1d160}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 41, byte_len: 12 }) }, ++ Range { from: '\u{1d161}', to: '\u{1d161}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 41, byte_len: 12 }) }, ++ Range { from: '\u{1d162}', to: '\u{1d162}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 41, byte_len: 12 }) }, ++ Range { from: '\u{1d163}', to: '\u{1d163}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 41, byte_len: 12 }) }, ++ Range { from: '\u{1d164}', to: '\u{1d164}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 41, byte_len: 12 }) }, ++ Range { from: '\u{1d165}', to: '\u{1d172}', mapping: Valid }, ++ Range { from: '\u{1d173}', to: '\u{1d17a}', mapping: Disallowed }, ++ Range { from: '\u{1d17b}', to: '\u{1d1ba}', mapping: Valid }, ++ Range { from: '\u{1d1bb}', to: '\u{1d1bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 41, byte_len: 8 }) }, ++ Range { from: '\u{1d1bc}', to: '\u{1d1bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 41, byte_len: 8 }) }, ++ Range { from: '\u{1d1bd}', to: '\u{1d1bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 41, byte_len: 12 }) }, ++ Range { from: '\u{1d1be}', to: '\u{1d1be}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 41, byte_len: 12 }) }, ++ Range { from: '\u{1d1bf}', to: '\u{1d1bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 41, byte_len: 12 }) }, ++ Range { from: '\u{1d1c0}', to: '\u{1d1c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 41, byte_len: 12 }) }, ++ Range { from: '\u{1d1c1}', to: '\u{1d1e8}', mapping: Valid }, ++ Range { from: '\u{1d1e9}', to: '\u{1d1ff}', mapping: Disallowed }, ++ Range { from: '\u{1d200}', to: '\u{1d245}', mapping: Valid }, ++ Range { from: '\u{1d246}', to: '\u{1d2ff}', mapping: Disallowed }, ++ Range { from: '\u{1d300}', to: '\u{1d356}', mapping: Valid }, ++ Range { from: '\u{1d357}', to: '\u{1d35f}', mapping: Disallowed }, ++ Range { from: '\u{1d360}', to: '\u{1d371}', mapping: Valid }, ++ Range { from: '\u{1d372}', to: '\u{1d3ff}', mapping: Disallowed }, ++ Range { from: '\u{1d400}', to: '\u{1d400}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d401}', to: '\u{1d401}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d402}', to: '\u{1d402}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d403}', to: '\u{1d403}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d404}', to: '\u{1d404}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d405}', to: '\u{1d405}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d406}', to: '\u{1d406}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d407}', to: '\u{1d407}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d408}', to: '\u{1d408}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d409}', to: '\u{1d409}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d40a}', to: '\u{1d40a}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d40b}', to: '\u{1d40b}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d40c}', to: '\u{1d40c}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d40d}', to: '\u{1d40d}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d40e}', to: '\u{1d40e}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d40f}', to: '\u{1d40f}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d410}', to: '\u{1d410}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d411}', to: '\u{1d411}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d412}', to: '\u{1d412}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d413}', to: '\u{1d413}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d414}', to: '\u{1d414}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d415}', to: '\u{1d415}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d416}', to: '\u{1d416}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d417}', to: '\u{1d417}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d418}', to: '\u{1d418}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d419}', to: '\u{1d419}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d41a}', to: '\u{1d41a}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d41b}', to: '\u{1d41b}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d41c}', to: '\u{1d41c}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d41d}', to: '\u{1d41d}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d41e}', to: '\u{1d41e}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d41f}', to: '\u{1d41f}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d420}', to: '\u{1d420}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d421}', to: '\u{1d421}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d422}', to: '\u{1d422}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d423}', to: '\u{1d423}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d424}', to: '\u{1d424}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d425}', to: '\u{1d425}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d426}', to: '\u{1d426}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d427}', to: '\u{1d427}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d428}', to: '\u{1d428}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d429}', to: '\u{1d429}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d42a}', to: '\u{1d42a}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d42b}', to: '\u{1d42b}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d42c}', to: '\u{1d42c}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d42d}', to: '\u{1d42d}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d42e}', to: '\u{1d42e}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d42f}', to: '\u{1d42f}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d430}', to: '\u{1d430}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d431}', to: '\u{1d431}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d432}', to: '\u{1d432}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d433}', to: '\u{1d433}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d434}', to: '\u{1d434}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d435}', to: '\u{1d435}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d436}', to: '\u{1d436}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d437}', to: '\u{1d437}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d438}', to: '\u{1d438}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d439}', to: '\u{1d439}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d43a}', to: '\u{1d43a}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d43b}', to: '\u{1d43b}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d43c}', to: '\u{1d43c}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d43d}', to: '\u{1d43d}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d43e}', to: '\u{1d43e}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d43f}', to: '\u{1d43f}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d440}', to: '\u{1d440}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d441}', to: '\u{1d441}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d442}', to: '\u{1d442}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d443}', to: '\u{1d443}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d444}', to: '\u{1d444}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d445}', to: '\u{1d445}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d446}', to: '\u{1d446}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d447}', to: '\u{1d447}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d448}', to: '\u{1d448}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d449}', to: '\u{1d449}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d44a}', to: '\u{1d44a}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d44b}', to: '\u{1d44b}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d44c}', to: '\u{1d44c}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d44d}', to: '\u{1d44d}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d44e}', to: '\u{1d44e}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d44f}', to: '\u{1d44f}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d450}', to: '\u{1d450}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d451}', to: '\u{1d451}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d452}', to: '\u{1d452}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d453}', to: '\u{1d453}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d454}', to: '\u{1d454}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d455}', to: '\u{1d455}', mapping: Disallowed }, ++ Range { from: '\u{1d456}', to: '\u{1d456}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d457}', to: '\u{1d457}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d458}', to: '\u{1d458}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d459}', to: '\u{1d459}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d45a}', to: '\u{1d45a}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d45b}', to: '\u{1d45b}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d45c}', to: '\u{1d45c}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d45d}', to: '\u{1d45d}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d45e}', to: '\u{1d45e}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d45f}', to: '\u{1d45f}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d460}', to: '\u{1d460}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d461}', to: '\u{1d461}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d462}', to: '\u{1d462}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d463}', to: '\u{1d463}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d464}', to: '\u{1d464}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d465}', to: '\u{1d465}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d466}', to: '\u{1d466}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d467}', to: '\u{1d467}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d468}', to: '\u{1d468}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d469}', to: '\u{1d469}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d46a}', to: '\u{1d46a}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d46b}', to: '\u{1d46b}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d46c}', to: '\u{1d46c}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d46d}', to: '\u{1d46d}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d46e}', to: '\u{1d46e}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d46f}', to: '\u{1d46f}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d470}', to: '\u{1d470}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d471}', to: '\u{1d471}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d472}', to: '\u{1d472}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d473}', to: '\u{1d473}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d474}', to: '\u{1d474}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d475}', to: '\u{1d475}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d476}', to: '\u{1d476}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d477}', to: '\u{1d477}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d478}', to: '\u{1d478}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d479}', to: '\u{1d479}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d47a}', to: '\u{1d47a}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d47b}', to: '\u{1d47b}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d47c}', to: '\u{1d47c}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d47d}', to: '\u{1d47d}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d47e}', to: '\u{1d47e}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d47f}', to: '\u{1d47f}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d480}', to: '\u{1d480}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d481}', to: '\u{1d481}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d482}', to: '\u{1d482}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d483}', to: '\u{1d483}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d484}', to: '\u{1d484}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d485}', to: '\u{1d485}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d486}', to: '\u{1d486}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d487}', to: '\u{1d487}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d488}', to: '\u{1d488}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d489}', to: '\u{1d489}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d48a}', to: '\u{1d48a}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d48b}', to: '\u{1d48b}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d48c}', to: '\u{1d48c}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d48d}', to: '\u{1d48d}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d48e}', to: '\u{1d48e}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d48f}', to: '\u{1d48f}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d490}', to: '\u{1d490}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d491}', to: '\u{1d491}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d492}', to: '\u{1d492}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d493}', to: '\u{1d493}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d494}', to: '\u{1d494}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d495}', to: '\u{1d495}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d496}', to: '\u{1d496}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d497}', to: '\u{1d497}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d498}', to: '\u{1d498}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d499}', to: '\u{1d499}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d49a}', to: '\u{1d49a}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d49b}', to: '\u{1d49b}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d49c}', to: '\u{1d49c}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d49d}', to: '\u{1d49d}', mapping: Disallowed }, ++ Range { from: '\u{1d49e}', to: '\u{1d49e}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d49f}', to: '\u{1d49f}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4a0}', to: '\u{1d4a1}', mapping: Disallowed }, ++ Range { from: '\u{1d4a2}', to: '\u{1d4a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4a3}', to: '\u{1d4a4}', mapping: Disallowed }, ++ Range { from: '\u{1d4a5}', to: '\u{1d4a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4a6}', to: '\u{1d4a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4a7}', to: '\u{1d4a8}', mapping: Disallowed }, ++ Range { from: '\u{1d4a9}', to: '\u{1d4a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4aa}', to: '\u{1d4aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4ab}', to: '\u{1d4ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4ac}', to: '\u{1d4ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4ad}', to: '\u{1d4ad}', mapping: Disallowed }, ++ Range { from: '\u{1d4ae}', to: '\u{1d4ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4af}', to: '\u{1d4af}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4b0}', to: '\u{1d4b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4b1}', to: '\u{1d4b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4b2}', to: '\u{1d4b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4b3}', to: '\u{1d4b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4b4}', to: '\u{1d4b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4b5}', to: '\u{1d4b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4b6}', to: '\u{1d4b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4b7}', to: '\u{1d4b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4b8}', to: '\u{1d4b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4b9}', to: '\u{1d4b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4ba}', to: '\u{1d4ba}', mapping: Disallowed }, ++ Range { from: '\u{1d4bb}', to: '\u{1d4bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4bc}', to: '\u{1d4bc}', mapping: Disallowed }, ++ Range { from: '\u{1d4bd}', to: '\u{1d4bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4be}', to: '\u{1d4be}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4bf}', to: '\u{1d4bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4c0}', to: '\u{1d4c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4c1}', to: '\u{1d4c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4c2}', to: '\u{1d4c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4c3}', to: '\u{1d4c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4c4}', to: '\u{1d4c4}', mapping: Disallowed }, ++ Range { from: '\u{1d4c5}', to: '\u{1d4c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4c6}', to: '\u{1d4c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4c7}', to: '\u{1d4c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4c8}', to: '\u{1d4c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4c9}', to: '\u{1d4c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4ca}', to: '\u{1d4ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4cb}', to: '\u{1d4cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4cc}', to: '\u{1d4cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4cd}', to: '\u{1d4cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4ce}', to: '\u{1d4ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4cf}', to: '\u{1d4cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4d0}', to: '\u{1d4d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4d1}', to: '\u{1d4d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4d2}', to: '\u{1d4d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4d3}', to: '\u{1d4d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4d4}', to: '\u{1d4d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4d5}', to: '\u{1d4d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4d6}', to: '\u{1d4d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4d7}', to: '\u{1d4d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4d8}', to: '\u{1d4d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4d9}', to: '\u{1d4d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4da}', to: '\u{1d4da}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4db}', to: '\u{1d4db}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4dc}', to: '\u{1d4dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4dd}', to: '\u{1d4dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4de}', to: '\u{1d4de}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4df}', to: '\u{1d4df}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4e0}', to: '\u{1d4e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4e1}', to: '\u{1d4e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4e2}', to: '\u{1d4e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4e3}', to: '\u{1d4e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4e4}', to: '\u{1d4e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4e5}', to: '\u{1d4e5}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4e6}', to: '\u{1d4e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4e7}', to: '\u{1d4e7}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4e8}', to: '\u{1d4e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4e9}', to: '\u{1d4e9}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4ea}', to: '\u{1d4ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4eb}', to: '\u{1d4eb}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4ec}', to: '\u{1d4ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4ed}', to: '\u{1d4ed}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4ee}', to: '\u{1d4ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4ef}', to: '\u{1d4ef}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4f0}', to: '\u{1d4f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4f1}', to: '\u{1d4f1}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4f2}', to: '\u{1d4f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4f3}', to: '\u{1d4f3}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4f4}', to: '\u{1d4f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4f5}', to: '\u{1d4f5}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4f6}', to: '\u{1d4f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4f7}', to: '\u{1d4f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4f8}', to: '\u{1d4f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4f9}', to: '\u{1d4f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4fa}', to: '\u{1d4fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4fb}', to: '\u{1d4fb}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4fc}', to: '\u{1d4fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4fd}', to: '\u{1d4fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4fe}', to: '\u{1d4fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d4ff}', to: '\u{1d4ff}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d500}', to: '\u{1d500}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d501}', to: '\u{1d501}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d502}', to: '\u{1d502}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d503}', to: '\u{1d503}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d504}', to: '\u{1d504}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d505}', to: '\u{1d505}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d506}', to: '\u{1d506}', mapping: Disallowed }, ++ Range { from: '\u{1d507}', to: '\u{1d507}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d508}', to: '\u{1d508}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d509}', to: '\u{1d509}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d50a}', to: '\u{1d50a}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d50b}', to: '\u{1d50c}', mapping: Disallowed }, ++ Range { from: '\u{1d50d}', to: '\u{1d50d}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d50e}', to: '\u{1d50e}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d50f}', to: '\u{1d50f}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d510}', to: '\u{1d510}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d511}', to: '\u{1d511}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d512}', to: '\u{1d512}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d513}', to: '\u{1d513}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d514}', to: '\u{1d514}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d515}', to: '\u{1d515}', mapping: Disallowed }, ++ Range { from: '\u{1d516}', to: '\u{1d516}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d517}', to: '\u{1d517}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d518}', to: '\u{1d518}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d519}', to: '\u{1d519}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d51a}', to: '\u{1d51a}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d51b}', to: '\u{1d51b}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d51c}', to: '\u{1d51c}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d51d}', to: '\u{1d51d}', mapping: Disallowed }, ++ Range { from: '\u{1d51e}', to: '\u{1d51e}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d51f}', to: '\u{1d51f}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d520}', to: '\u{1d520}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d521}', to: '\u{1d521}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d522}', to: '\u{1d522}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d523}', to: '\u{1d523}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d524}', to: '\u{1d524}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d525}', to: '\u{1d525}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d526}', to: '\u{1d526}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d527}', to: '\u{1d527}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d528}', to: '\u{1d528}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d529}', to: '\u{1d529}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d52a}', to: '\u{1d52a}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d52b}', to: '\u{1d52b}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d52c}', to: '\u{1d52c}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d52d}', to: '\u{1d52d}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d52e}', to: '\u{1d52e}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d52f}', to: '\u{1d52f}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d530}', to: '\u{1d530}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d531}', to: '\u{1d531}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d532}', to: '\u{1d532}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d533}', to: '\u{1d533}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d534}', to: '\u{1d534}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d535}', to: '\u{1d535}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d536}', to: '\u{1d536}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d537}', to: '\u{1d537}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d538}', to: '\u{1d538}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d539}', to: '\u{1d539}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d53a}', to: '\u{1d53a}', mapping: Disallowed }, ++ Range { from: '\u{1d53b}', to: '\u{1d53b}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d53c}', to: '\u{1d53c}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d53d}', to: '\u{1d53d}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d53e}', to: '\u{1d53e}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d53f}', to: '\u{1d53f}', mapping: Disallowed }, ++ Range { from: '\u{1d540}', to: '\u{1d540}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d541}', to: '\u{1d541}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d542}', to: '\u{1d542}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d543}', to: '\u{1d543}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d544}', to: '\u{1d544}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d545}', to: '\u{1d545}', mapping: Disallowed }, ++ Range { from: '\u{1d546}', to: '\u{1d546}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d547}', to: '\u{1d549}', mapping: Disallowed }, ++ Range { from: '\u{1d54a}', to: '\u{1d54a}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d54b}', to: '\u{1d54b}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d54c}', to: '\u{1d54c}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d54d}', to: '\u{1d54d}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d54e}', to: '\u{1d54e}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d54f}', to: '\u{1d54f}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d550}', to: '\u{1d550}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d551}', to: '\u{1d551}', mapping: Disallowed }, ++ Range { from: '\u{1d552}', to: '\u{1d552}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d553}', to: '\u{1d553}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d554}', to: '\u{1d554}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d555}', to: '\u{1d555}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d556}', to: '\u{1d556}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d557}', to: '\u{1d557}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d558}', to: '\u{1d558}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d559}', to: '\u{1d559}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d55a}', to: '\u{1d55a}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d55b}', to: '\u{1d55b}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d55c}', to: '\u{1d55c}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d55d}', to: '\u{1d55d}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d55e}', to: '\u{1d55e}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d55f}', to: '\u{1d55f}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d560}', to: '\u{1d560}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d561}', to: '\u{1d561}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d562}', to: '\u{1d562}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d563}', to: '\u{1d563}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d564}', to: '\u{1d564}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d565}', to: '\u{1d565}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d566}', to: '\u{1d566}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d567}', to: '\u{1d567}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d568}', to: '\u{1d568}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d569}', to: '\u{1d569}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d56a}', to: '\u{1d56a}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d56b}', to: '\u{1d56b}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d56c}', to: '\u{1d56c}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d56d}', to: '\u{1d56d}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d56e}', to: '\u{1d56e}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d56f}', to: '\u{1d56f}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d570}', to: '\u{1d570}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d571}', to: '\u{1d571}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d572}', to: '\u{1d572}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d573}', to: '\u{1d573}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d574}', to: '\u{1d574}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d575}', to: '\u{1d575}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d576}', to: '\u{1d576}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d577}', to: '\u{1d577}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d578}', to: '\u{1d578}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d579}', to: '\u{1d579}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d57a}', to: '\u{1d57a}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d57b}', to: '\u{1d57b}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d57c}', to: '\u{1d57c}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d57d}', to: '\u{1d57d}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d57e}', to: '\u{1d57e}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d57f}', to: '\u{1d57f}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d580}', to: '\u{1d580}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d581}', to: '\u{1d581}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d582}', to: '\u{1d582}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d583}', to: '\u{1d583}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d584}', to: '\u{1d584}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d585}', to: '\u{1d585}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d586}', to: '\u{1d586}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d587}', to: '\u{1d587}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d588}', to: '\u{1d588}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d589}', to: '\u{1d589}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d58a}', to: '\u{1d58a}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d58b}', to: '\u{1d58b}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d58c}', to: '\u{1d58c}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d58d}', to: '\u{1d58d}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d58e}', to: '\u{1d58e}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d58f}', to: '\u{1d58f}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d590}', to: '\u{1d590}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d591}', to: '\u{1d591}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d592}', to: '\u{1d592}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d593}', to: '\u{1d593}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d594}', to: '\u{1d594}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d595}', to: '\u{1d595}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d596}', to: '\u{1d596}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d597}', to: '\u{1d597}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d598}', to: '\u{1d598}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d599}', to: '\u{1d599}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d59a}', to: '\u{1d59a}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d59b}', to: '\u{1d59b}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d59c}', to: '\u{1d59c}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d59d}', to: '\u{1d59d}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d59e}', to: '\u{1d59e}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d59f}', to: '\u{1d59f}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5a0}', to: '\u{1d5a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5a1}', to: '\u{1d5a1}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5a2}', to: '\u{1d5a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5a3}', to: '\u{1d5a3}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5a4}', to: '\u{1d5a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5a5}', to: '\u{1d5a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5a6}', to: '\u{1d5a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5a7}', to: '\u{1d5a7}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5a8}', to: '\u{1d5a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5a9}', to: '\u{1d5a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5aa}', to: '\u{1d5aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5ab}', to: '\u{1d5ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5ac}', to: '\u{1d5ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5ad}', to: '\u{1d5ad}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5ae}', to: '\u{1d5ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5af}', to: '\u{1d5af}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5b0}', to: '\u{1d5b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5b1}', to: '\u{1d5b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5b2}', to: '\u{1d5b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5b3}', to: '\u{1d5b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5b4}', to: '\u{1d5b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5b5}', to: '\u{1d5b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5b6}', to: '\u{1d5b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5b7}', to: '\u{1d5b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5b8}', to: '\u{1d5b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5b9}', to: '\u{1d5b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5ba}', to: '\u{1d5ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5bb}', to: '\u{1d5bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5bc}', to: '\u{1d5bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5bd}', to: '\u{1d5bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5be}', to: '\u{1d5be}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5bf}', to: '\u{1d5bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5c0}', to: '\u{1d5c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5c1}', to: '\u{1d5c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5c2}', to: '\u{1d5c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5c3}', to: '\u{1d5c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5c4}', to: '\u{1d5c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5c5}', to: '\u{1d5c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5c6}', to: '\u{1d5c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5c7}', to: '\u{1d5c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5c8}', to: '\u{1d5c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5c9}', to: '\u{1d5c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5ca}', to: '\u{1d5ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5cb}', to: '\u{1d5cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5cc}', to: '\u{1d5cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5cd}', to: '\u{1d5cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5ce}', to: '\u{1d5ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5cf}', to: '\u{1d5cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5d0}', to: '\u{1d5d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5d1}', to: '\u{1d5d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5d2}', to: '\u{1d5d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5d3}', to: '\u{1d5d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5d4}', to: '\u{1d5d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5d5}', to: '\u{1d5d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5d6}', to: '\u{1d5d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5d7}', to: '\u{1d5d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5d8}', to: '\u{1d5d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5d9}', to: '\u{1d5d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5da}', to: '\u{1d5da}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5db}', to: '\u{1d5db}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5dc}', to: '\u{1d5dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5dd}', to: '\u{1d5dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5de}', to: '\u{1d5de}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5df}', to: '\u{1d5df}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5e0}', to: '\u{1d5e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5e1}', to: '\u{1d5e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5e2}', to: '\u{1d5e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5e3}', to: '\u{1d5e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5e4}', to: '\u{1d5e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5e5}', to: '\u{1d5e5}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5e6}', to: '\u{1d5e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5e7}', to: '\u{1d5e7}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5e8}', to: '\u{1d5e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5e9}', to: '\u{1d5e9}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5ea}', to: '\u{1d5ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5eb}', to: '\u{1d5eb}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5ec}', to: '\u{1d5ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5ed}', to: '\u{1d5ed}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5ee}', to: '\u{1d5ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5ef}', to: '\u{1d5ef}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5f0}', to: '\u{1d5f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5f1}', to: '\u{1d5f1}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5f2}', to: '\u{1d5f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5f3}', to: '\u{1d5f3}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5f4}', to: '\u{1d5f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5f5}', to: '\u{1d5f5}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5f6}', to: '\u{1d5f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5f7}', to: '\u{1d5f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5f8}', to: '\u{1d5f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5f9}', to: '\u{1d5f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5fa}', to: '\u{1d5fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5fb}', to: '\u{1d5fb}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5fc}', to: '\u{1d5fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5fd}', to: '\u{1d5fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5fe}', to: '\u{1d5fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d5ff}', to: '\u{1d5ff}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d600}', to: '\u{1d600}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d601}', to: '\u{1d601}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d602}', to: '\u{1d602}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d603}', to: '\u{1d603}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d604}', to: '\u{1d604}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d605}', to: '\u{1d605}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d606}', to: '\u{1d606}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d607}', to: '\u{1d607}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d608}', to: '\u{1d608}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d609}', to: '\u{1d609}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d60a}', to: '\u{1d60a}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d60b}', to: '\u{1d60b}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d60c}', to: '\u{1d60c}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d60d}', to: '\u{1d60d}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d60e}', to: '\u{1d60e}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d60f}', to: '\u{1d60f}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d610}', to: '\u{1d610}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d611}', to: '\u{1d611}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d612}', to: '\u{1d612}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d613}', to: '\u{1d613}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d614}', to: '\u{1d614}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d615}', to: '\u{1d615}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d616}', to: '\u{1d616}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d617}', to: '\u{1d617}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d618}', to: '\u{1d618}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d619}', to: '\u{1d619}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d61a}', to: '\u{1d61a}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d61b}', to: '\u{1d61b}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d61c}', to: '\u{1d61c}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d61d}', to: '\u{1d61d}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d61e}', to: '\u{1d61e}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d61f}', to: '\u{1d61f}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d620}', to: '\u{1d620}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d621}', to: '\u{1d621}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d622}', to: '\u{1d622}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d623}', to: '\u{1d623}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d624}', to: '\u{1d624}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d625}', to: '\u{1d625}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d626}', to: '\u{1d626}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d627}', to: '\u{1d627}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d628}', to: '\u{1d628}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d629}', to: '\u{1d629}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d62a}', to: '\u{1d62a}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d62b}', to: '\u{1d62b}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d62c}', to: '\u{1d62c}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d62d}', to: '\u{1d62d}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d62e}', to: '\u{1d62e}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d62f}', to: '\u{1d62f}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d630}', to: '\u{1d630}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d631}', to: '\u{1d631}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d632}', to: '\u{1d632}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d633}', to: '\u{1d633}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d634}', to: '\u{1d634}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d635}', to: '\u{1d635}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d636}', to: '\u{1d636}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d637}', to: '\u{1d637}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d638}', to: '\u{1d638}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d639}', to: '\u{1d639}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d63a}', to: '\u{1d63a}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d63b}', to: '\u{1d63b}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d63c}', to: '\u{1d63c}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d63d}', to: '\u{1d63d}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d63e}', to: '\u{1d63e}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d63f}', to: '\u{1d63f}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d640}', to: '\u{1d640}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d641}', to: '\u{1d641}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d642}', to: '\u{1d642}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d643}', to: '\u{1d643}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d644}', to: '\u{1d644}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d645}', to: '\u{1d645}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d646}', to: '\u{1d646}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d647}', to: '\u{1d647}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d648}', to: '\u{1d648}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d649}', to: '\u{1d649}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d64a}', to: '\u{1d64a}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d64b}', to: '\u{1d64b}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d64c}', to: '\u{1d64c}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d64d}', to: '\u{1d64d}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d64e}', to: '\u{1d64e}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d64f}', to: '\u{1d64f}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d650}', to: '\u{1d650}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d651}', to: '\u{1d651}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d652}', to: '\u{1d652}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d653}', to: '\u{1d653}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d654}', to: '\u{1d654}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d655}', to: '\u{1d655}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d656}', to: '\u{1d656}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d657}', to: '\u{1d657}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d658}', to: '\u{1d658}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d659}', to: '\u{1d659}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d65a}', to: '\u{1d65a}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d65b}', to: '\u{1d65b}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d65c}', to: '\u{1d65c}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d65d}', to: '\u{1d65d}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d65e}', to: '\u{1d65e}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d65f}', to: '\u{1d65f}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d660}', to: '\u{1d660}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d661}', to: '\u{1d661}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d662}', to: '\u{1d662}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d663}', to: '\u{1d663}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d664}', to: '\u{1d664}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d665}', to: '\u{1d665}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d666}', to: '\u{1d666}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d667}', to: '\u{1d667}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d668}', to: '\u{1d668}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d669}', to: '\u{1d669}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d66a}', to: '\u{1d66a}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d66b}', to: '\u{1d66b}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d66c}', to: '\u{1d66c}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d66d}', to: '\u{1d66d}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d66e}', to: '\u{1d66e}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d66f}', to: '\u{1d66f}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d670}', to: '\u{1d670}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d671}', to: '\u{1d671}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d672}', to: '\u{1d672}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d673}', to: '\u{1d673}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d674}', to: '\u{1d674}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d675}', to: '\u{1d675}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d676}', to: '\u{1d676}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d677}', to: '\u{1d677}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d678}', to: '\u{1d678}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d679}', to: '\u{1d679}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d67a}', to: '\u{1d67a}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d67b}', to: '\u{1d67b}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d67c}', to: '\u{1d67c}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d67d}', to: '\u{1d67d}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d67e}', to: '\u{1d67e}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d67f}', to: '\u{1d67f}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d680}', to: '\u{1d680}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d681}', to: '\u{1d681}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d682}', to: '\u{1d682}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d683}', to: '\u{1d683}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d684}', to: '\u{1d684}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d685}', to: '\u{1d685}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d686}', to: '\u{1d686}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d687}', to: '\u{1d687}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d688}', to: '\u{1d688}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d689}', to: '\u{1d689}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d68a}', to: '\u{1d68a}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d68b}', to: '\u{1d68b}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d68c}', to: '\u{1d68c}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d68d}', to: '\u{1d68d}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d68e}', to: '\u{1d68e}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d68f}', to: '\u{1d68f}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d690}', to: '\u{1d690}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d691}', to: '\u{1d691}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d692}', to: '\u{1d692}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d693}', to: '\u{1d693}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d694}', to: '\u{1d694}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d695}', to: '\u{1d695}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d696}', to: '\u{1d696}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d697}', to: '\u{1d697}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d698}', to: '\u{1d698}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d699}', to: '\u{1d699}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d69a}', to: '\u{1d69a}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d69b}', to: '\u{1d69b}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d69c}', to: '\u{1d69c}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d69d}', to: '\u{1d69d}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d69e}', to: '\u{1d69e}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d69f}', to: '\u{1d69f}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d6a0}', to: '\u{1d6a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d6a1}', to: '\u{1d6a1}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d6a2}', to: '\u{1d6a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d6a3}', to: '\u{1d6a3}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d6a4}', to: '\u{1d6a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 41, byte_len: 2 }) }, ++ Range { from: '\u{1d6a5}', to: '\u{1d6a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 41, byte_len: 2 }) }, ++ Range { from: '\u{1d6a6}', to: '\u{1d6a7}', mapping: Disallowed }, ++ Range { from: '\u{1d6a8}', to: '\u{1d6a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6a9}', to: '\u{1d6a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6aa}', to: '\u{1d6aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6ab}', to: '\u{1d6ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6ac}', to: '\u{1d6ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6ad}', to: '\u{1d6ad}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6ae}', to: '\u{1d6ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6af}', to: '\u{1d6af}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6b0}', to: '\u{1d6b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d6b1}', to: '\u{1d6b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6b2}', to: '\u{1d6b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6b3}', to: '\u{1d6b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{1d6b4}', to: '\u{1d6b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6b5}', to: '\u{1d6b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6b6}', to: '\u{1d6b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6b7}', to: '\u{1d6b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6b8}', to: '\u{1d6b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6b9}', to: '\u{1d6b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6ba}', to: '\u{1d6ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6bb}', to: '\u{1d6bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6bc}', to: '\u{1d6bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6bd}', to: '\u{1d6bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6be}', to: '\u{1d6be}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6bf}', to: '\u{1d6bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6c0}', to: '\u{1d6c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6c1}', to: '\u{1d6c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 41, byte_len: 3 }) }, ++ Range { from: '\u{1d6c2}', to: '\u{1d6c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6c3}', to: '\u{1d6c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6c4}', to: '\u{1d6c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6c5}', to: '\u{1d6c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6c6}', to: '\u{1d6c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6c7}', to: '\u{1d6c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6c8}', to: '\u{1d6c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6c9}', to: '\u{1d6c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6ca}', to: '\u{1d6ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d6cb}', to: '\u{1d6cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6cc}', to: '\u{1d6cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6cd}', to: '\u{1d6cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{1d6ce}', to: '\u{1d6ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6cf}', to: '\u{1d6cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6d0}', to: '\u{1d6d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6d1}', to: '\u{1d6d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6d2}', to: '\u{1d6d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6d3}', to: '\u{1d6d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6d5}', to: '\u{1d6d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6d6}', to: '\u{1d6d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6d7}', to: '\u{1d6d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6d8}', to: '\u{1d6d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6d9}', to: '\u{1d6d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6da}', to: '\u{1d6da}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6db}', to: '\u{1d6db}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 41, byte_len: 3 }) }, ++ Range { from: '\u{1d6dc}', to: '\u{1d6dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6dd}', to: '\u{1d6dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6de}', to: '\u{1d6de}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6df}', to: '\u{1d6df}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6e0}', to: '\u{1d6e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6e1}', to: '\u{1d6e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6e2}', to: '\u{1d6e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6e3}', to: '\u{1d6e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6e4}', to: '\u{1d6e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6e5}', to: '\u{1d6e5}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6e6}', to: '\u{1d6e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6e7}', to: '\u{1d6e7}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6e8}', to: '\u{1d6e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6e9}', to: '\u{1d6e9}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6ea}', to: '\u{1d6ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d6eb}', to: '\u{1d6eb}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6ec}', to: '\u{1d6ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6ed}', to: '\u{1d6ed}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{1d6ee}', to: '\u{1d6ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6ef}', to: '\u{1d6ef}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6f0}', to: '\u{1d6f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6f1}', to: '\u{1d6f1}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6f2}', to: '\u{1d6f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6f3}', to: '\u{1d6f3}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6f4}', to: '\u{1d6f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6f5}', to: '\u{1d6f5}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6f6}', to: '\u{1d6f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6f7}', to: '\u{1d6f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6f8}', to: '\u{1d6f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6f9}', to: '\u{1d6f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6fa}', to: '\u{1d6fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6fb}', to: '\u{1d6fb}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 41, byte_len: 3 }) }, ++ Range { from: '\u{1d6fc}', to: '\u{1d6fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6fd}', to: '\u{1d6fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6fe}', to: '\u{1d6fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d6ff}', to: '\u{1d6ff}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d700}', to: '\u{1d700}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d701}', to: '\u{1d701}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d702}', to: '\u{1d702}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d703}', to: '\u{1d703}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d704}', to: '\u{1d704}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d705}', to: '\u{1d705}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d706}', to: '\u{1d706}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d707}', to: '\u{1d707}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{1d708}', to: '\u{1d708}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d709}', to: '\u{1d709}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d70a}', to: '\u{1d70a}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d70b}', to: '\u{1d70b}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d70c}', to: '\u{1d70c}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d70d}', to: '\u{1d70e}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d70f}', to: '\u{1d70f}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d710}', to: '\u{1d710}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d711}', to: '\u{1d711}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d712}', to: '\u{1d712}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d713}', to: '\u{1d713}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d714}', to: '\u{1d714}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d715}', to: '\u{1d715}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 41, byte_len: 3 }) }, ++ Range { from: '\u{1d716}', to: '\u{1d716}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d717}', to: '\u{1d717}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d718}', to: '\u{1d718}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d719}', to: '\u{1d719}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d71a}', to: '\u{1d71a}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d71b}', to: '\u{1d71b}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d71c}', to: '\u{1d71c}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d71d}', to: '\u{1d71d}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d71e}', to: '\u{1d71e}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d71f}', to: '\u{1d71f}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d720}', to: '\u{1d720}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d721}', to: '\u{1d721}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d722}', to: '\u{1d722}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d723}', to: '\u{1d723}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d724}', to: '\u{1d724}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d725}', to: '\u{1d725}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d726}', to: '\u{1d726}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d727}', to: '\u{1d727}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{1d728}', to: '\u{1d728}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d729}', to: '\u{1d729}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d72a}', to: '\u{1d72a}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d72b}', to: '\u{1d72b}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d72c}', to: '\u{1d72c}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d72d}', to: '\u{1d72d}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d72e}', to: '\u{1d72e}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d72f}', to: '\u{1d72f}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d730}', to: '\u{1d730}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d731}', to: '\u{1d731}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d732}', to: '\u{1d732}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d733}', to: '\u{1d733}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d734}', to: '\u{1d734}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d735}', to: '\u{1d735}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 41, byte_len: 3 }) }, ++ Range { from: '\u{1d736}', to: '\u{1d736}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d737}', to: '\u{1d737}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d738}', to: '\u{1d738}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d739}', to: '\u{1d739}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d73a}', to: '\u{1d73a}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d73b}', to: '\u{1d73b}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d73c}', to: '\u{1d73c}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d73d}', to: '\u{1d73d}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d73e}', to: '\u{1d73e}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d73f}', to: '\u{1d73f}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d740}', to: '\u{1d740}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d741}', to: '\u{1d741}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{1d742}', to: '\u{1d742}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d743}', to: '\u{1d743}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d744}', to: '\u{1d744}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d745}', to: '\u{1d745}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d746}', to: '\u{1d746}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d747}', to: '\u{1d748}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d749}', to: '\u{1d749}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d74a}', to: '\u{1d74a}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d74b}', to: '\u{1d74b}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d74c}', to: '\u{1d74c}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d74d}', to: '\u{1d74d}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d74e}', to: '\u{1d74e}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d74f}', to: '\u{1d74f}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 41, byte_len: 3 }) }, ++ Range { from: '\u{1d750}', to: '\u{1d750}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d751}', to: '\u{1d751}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d752}', to: '\u{1d752}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d753}', to: '\u{1d753}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d754}', to: '\u{1d754}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d755}', to: '\u{1d755}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d756}', to: '\u{1d756}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d757}', to: '\u{1d757}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d758}', to: '\u{1d758}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d759}', to: '\u{1d759}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d75a}', to: '\u{1d75a}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d75b}', to: '\u{1d75b}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d75c}', to: '\u{1d75c}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d75d}', to: '\u{1d75d}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d75e}', to: '\u{1d75e}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d75f}', to: '\u{1d75f}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d760}', to: '\u{1d760}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d761}', to: '\u{1d761}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{1d762}', to: '\u{1d762}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d763}', to: '\u{1d763}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d764}', to: '\u{1d764}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d765}', to: '\u{1d765}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d766}', to: '\u{1d766}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d767}', to: '\u{1d767}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d768}', to: '\u{1d768}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d769}', to: '\u{1d769}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d76a}', to: '\u{1d76a}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d76b}', to: '\u{1d76b}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d76c}', to: '\u{1d76c}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d76d}', to: '\u{1d76d}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d76e}', to: '\u{1d76e}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d76f}', to: '\u{1d76f}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 41, byte_len: 3 }) }, ++ Range { from: '\u{1d770}', to: '\u{1d770}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d771}', to: '\u{1d771}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d772}', to: '\u{1d772}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d773}', to: '\u{1d773}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d774}', to: '\u{1d774}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d775}', to: '\u{1d775}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d776}', to: '\u{1d776}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d777}', to: '\u{1d777}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d778}', to: '\u{1d778}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d779}', to: '\u{1d779}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d77a}', to: '\u{1d77a}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d77b}', to: '\u{1d77b}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{1d77c}', to: '\u{1d77c}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d77d}', to: '\u{1d77d}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d77e}', to: '\u{1d77e}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d77f}', to: '\u{1d77f}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d780}', to: '\u{1d780}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d781}', to: '\u{1d782}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d783}', to: '\u{1d783}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d784}', to: '\u{1d784}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d785}', to: '\u{1d785}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d786}', to: '\u{1d786}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d787}', to: '\u{1d787}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d788}', to: '\u{1d788}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d789}', to: '\u{1d789}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 41, byte_len: 3 }) }, ++ Range { from: '\u{1d78a}', to: '\u{1d78a}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d78b}', to: '\u{1d78b}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d78c}', to: '\u{1d78c}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d78d}', to: '\u{1d78d}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d78e}', to: '\u{1d78e}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d78f}', to: '\u{1d78f}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d790}', to: '\u{1d790}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d791}', to: '\u{1d791}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d792}', to: '\u{1d792}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d793}', to: '\u{1d793}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d794}', to: '\u{1d794}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d795}', to: '\u{1d795}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d796}', to: '\u{1d796}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d797}', to: '\u{1d797}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d798}', to: '\u{1d798}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d799}', to: '\u{1d799}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d79a}', to: '\u{1d79a}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d79b}', to: '\u{1d79b}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{1d79c}', to: '\u{1d79c}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d79d}', to: '\u{1d79d}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d79e}', to: '\u{1d79e}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d79f}', to: '\u{1d79f}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7a0}', to: '\u{1d7a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7a1}', to: '\u{1d7a1}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7a2}', to: '\u{1d7a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7a3}', to: '\u{1d7a3}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7a4}', to: '\u{1d7a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7a5}', to: '\u{1d7a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7a6}', to: '\u{1d7a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7a7}', to: '\u{1d7a7}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7a8}', to: '\u{1d7a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7a9}', to: '\u{1d7a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 41, byte_len: 3 }) }, ++ Range { from: '\u{1d7aa}', to: '\u{1d7aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7ab}', to: '\u{1d7ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7ac}', to: '\u{1d7ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7ad}', to: '\u{1d7ad}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7ae}', to: '\u{1d7ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7af}', to: '\u{1d7af}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7b0}', to: '\u{1d7b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7b1}', to: '\u{1d7b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7b2}', to: '\u{1d7b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 1, byte_len: 2 }) }, ++ Range { from: '\u{1d7b3}', to: '\u{1d7b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7b4}', to: '\u{1d7b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7b5}', to: '\u{1d7b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{1d7b6}', to: '\u{1d7b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7b7}', to: '\u{1d7b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7b8}', to: '\u{1d7b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7b9}', to: '\u{1d7b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7ba}', to: '\u{1d7ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7bb}', to: '\u{1d7bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7bd}', to: '\u{1d7bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7be}', to: '\u{1d7be}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7bf}', to: '\u{1d7bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7c0}', to: '\u{1d7c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7c1}', to: '\u{1d7c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7c2}', to: '\u{1d7c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7c3}', to: '\u{1d7c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 41, byte_len: 3 }) }, ++ Range { from: '\u{1d7c4}', to: '\u{1d7c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7c5}', to: '\u{1d7c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7c6}', to: '\u{1d7c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7c7}', to: '\u{1d7c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7c8}', to: '\u{1d7c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7c9}', to: '\u{1d7c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7ca}', to: '\u{1d7cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 2, byte_len: 2 }) }, ++ Range { from: '\u{1d7cc}', to: '\u{1d7cd}', mapping: Disallowed }, ++ Range { from: '\u{1d7ce}', to: '\u{1d7ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7cf}', to: '\u{1d7cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d7d0}', to: '\u{1d7d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d7d1}', to: '\u{1d7d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d7d2}', to: '\u{1d7d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7d3}', to: '\u{1d7d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7d4}', to: '\u{1d7d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7d5}', to: '\u{1d7d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7d6}', to: '\u{1d7d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7d7}', to: '\u{1d7d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7d8}', to: '\u{1d7d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7d9}', to: '\u{1d7d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d7da}', to: '\u{1d7da}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d7db}', to: '\u{1d7db}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d7dc}', to: '\u{1d7dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7dd}', to: '\u{1d7dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7de}', to: '\u{1d7de}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7df}', to: '\u{1d7df}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7e0}', to: '\u{1d7e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7e1}', to: '\u{1d7e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7e2}', to: '\u{1d7e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7e3}', to: '\u{1d7e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d7e4}', to: '\u{1d7e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d7e5}', to: '\u{1d7e5}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d7e6}', to: '\u{1d7e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7e7}', to: '\u{1d7e7}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7e8}', to: '\u{1d7e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7e9}', to: '\u{1d7e9}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7ea}', to: '\u{1d7ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7eb}', to: '\u{1d7eb}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7ec}', to: '\u{1d7ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7ed}', to: '\u{1d7ed}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d7ee}', to: '\u{1d7ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d7ef}', to: '\u{1d7ef}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d7f0}', to: '\u{1d7f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7f1}', to: '\u{1d7f1}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7f2}', to: '\u{1d7f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7f3}', to: '\u{1d7f3}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7f4}', to: '\u{1d7f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7f5}', to: '\u{1d7f5}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7f6}', to: '\u{1d7f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7f7}', to: '\u{1d7f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d7f8}', to: '\u{1d7f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d7f9}', to: '\u{1d7f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1d7fa}', to: '\u{1d7fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7fb}', to: '\u{1d7fb}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7fc}', to: '\u{1d7fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7fd}', to: '\u{1d7fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7fe}', to: '\u{1d7fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 169, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d7ff}', to: '\u{1d7ff}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 8, byte_len: 1 }) }, ++ Range { from: '\u{1d800}', to: '\u{1da8b}', mapping: Valid }, ++ Range { from: '\u{1da8c}', to: '\u{1da9a}', mapping: Disallowed }, ++ Range { from: '\u{1da9b}', to: '\u{1da9f}', mapping: Valid }, ++ Range { from: '\u{1daa0}', to: '\u{1daa0}', mapping: Disallowed }, ++ Range { from: '\u{1daa1}', to: '\u{1daaf}', mapping: Valid }, ++ Range { from: '\u{1dab0}', to: '\u{1dfff}', mapping: Disallowed }, ++ Range { from: '\u{1e000}', to: '\u{1e006}', mapping: Valid }, ++ Range { from: '\u{1e007}', to: '\u{1e007}', mapping: Disallowed }, ++ Range { from: '\u{1e008}', to: '\u{1e018}', mapping: Valid }, ++ Range { from: '\u{1e019}', to: '\u{1e01a}', mapping: Disallowed }, ++ Range { from: '\u{1e01b}', to: '\u{1e021}', mapping: Valid }, ++ Range { from: '\u{1e022}', to: '\u{1e022}', mapping: Disallowed }, ++ Range { from: '\u{1e023}', to: '\u{1e024}', mapping: Valid }, ++ Range { from: '\u{1e025}', to: '\u{1e025}', mapping: Disallowed }, ++ Range { from: '\u{1e026}', to: '\u{1e02a}', mapping: Valid }, ++ Range { from: '\u{1e02b}', to: '\u{1e7ff}', mapping: Disallowed }, ++ Range { from: '\u{1e800}', to: '\u{1e8c4}', mapping: Valid }, ++ Range { from: '\u{1e8c5}', to: '\u{1e8c6}', mapping: Disallowed }, ++ Range { from: '\u{1e8c7}', to: '\u{1e8d6}', mapping: Valid }, ++ Range { from: '\u{1e8d7}', to: '\u{1e8ff}', mapping: Disallowed }, ++ Range { from: '\u{1e900}', to: '\u{1e900}', mapping: Mapped(StringTableSlice { byte_start_lo: 166, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e901}', to: '\u{1e901}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e902}', to: '\u{1e902}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e903}', to: '\u{1e903}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e904}', to: '\u{1e904}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e905}', to: '\u{1e905}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e906}', to: '\u{1e906}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e907}', to: '\u{1e907}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e908}', to: '\u{1e908}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e909}', to: '\u{1e909}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e90a}', to: '\u{1e90a}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e90b}', to: '\u{1e90b}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e90c}', to: '\u{1e90c}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e90d}', to: '\u{1e90d}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e90e}', to: '\u{1e90e}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e90f}', to: '\u{1e90f}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e910}', to: '\u{1e910}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e911}', to: '\u{1e911}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e912}', to: '\u{1e912}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e913}', to: '\u{1e913}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e914}', to: '\u{1e914}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e915}', to: '\u{1e915}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e916}', to: '\u{1e916}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 41, byte_len: 4 }) }, ++ Range { from: '\u{1e917}', to: '\u{1e917}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 42, byte_len: 4 }) }, ++ Range { from: '\u{1e918}', to: '\u{1e918}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 42, byte_len: 4 }) }, ++ Range { from: '\u{1e919}', to: '\u{1e919}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 42, byte_len: 4 }) }, ++ Range { from: '\u{1e91a}', to: '\u{1e91a}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 42, byte_len: 4 }) }, ++ Range { from: '\u{1e91b}', to: '\u{1e91b}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 42, byte_len: 4 }) }, ++ Range { from: '\u{1e91c}', to: '\u{1e91c}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 42, byte_len: 4 }) }, ++ Range { from: '\u{1e91d}', to: '\u{1e91d}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 42, byte_len: 4 }) }, ++ Range { from: '\u{1e91e}', to: '\u{1e91e}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 42, byte_len: 4 }) }, ++ Range { from: '\u{1e91f}', to: '\u{1e91f}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 42, byte_len: 4 }) }, ++ Range { from: '\u{1e920}', to: '\u{1e920}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 42, byte_len: 4 }) }, ++ Range { from: '\u{1e921}', to: '\u{1e921}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 42, byte_len: 4 }) }, ++ Range { from: '\u{1e922}', to: '\u{1e94a}', mapping: Valid }, ++ Range { from: '\u{1e94b}', to: '\u{1e94f}', mapping: Disallowed }, ++ Range { from: '\u{1e950}', to: '\u{1e959}', mapping: Valid }, ++ Range { from: '\u{1e95a}', to: '\u{1e95d}', mapping: Disallowed }, ++ Range { from: '\u{1e95e}', to: '\u{1e95f}', mapping: Valid }, ++ Range { from: '\u{1e960}', to: '\u{1edff}', mapping: Disallowed }, ++ Range { from: '\u{1ee00}', to: '\u{1ee00}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee01}', to: '\u{1ee01}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee02}', to: '\u{1ee02}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee03}', to: '\u{1ee03}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee04}', to: '\u{1ee04}', mapping: Disallowed }, ++ Range { from: '\u{1ee05}', to: '\u{1ee05}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee06}', to: '\u{1ee06}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee07}', to: '\u{1ee07}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee08}', to: '\u{1ee08}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee09}', to: '\u{1ee09}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee0a}', to: '\u{1ee0a}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee0b}', to: '\u{1ee0b}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee0c}', to: '\u{1ee0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee0d}', to: '\u{1ee0d}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee0e}', to: '\u{1ee0e}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee0f}', to: '\u{1ee0f}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee10}', to: '\u{1ee10}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee11}', to: '\u{1ee11}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee12}', to: '\u{1ee12}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee13}', to: '\u{1ee13}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee14}', to: '\u{1ee14}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee15}', to: '\u{1ee15}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee16}', to: '\u{1ee16}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee17}', to: '\u{1ee17}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee18}', to: '\u{1ee18}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee19}', to: '\u{1ee19}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee1a}', to: '\u{1ee1a}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee1b}', to: '\u{1ee1b}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee1c}', to: '\u{1ee1c}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1ee1d}', to: '\u{1ee1d}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{1ee1e}', to: '\u{1ee1e}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1ee1f}', to: '\u{1ee1f}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1ee20}', to: '\u{1ee20}', mapping: Disallowed }, ++ Range { from: '\u{1ee21}', to: '\u{1ee21}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee22}', to: '\u{1ee22}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee23}', to: '\u{1ee23}', mapping: Disallowed }, ++ Range { from: '\u{1ee24}', to: '\u{1ee24}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee25}', to: '\u{1ee26}', mapping: Disallowed }, ++ Range { from: '\u{1ee27}', to: '\u{1ee27}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee28}', to: '\u{1ee28}', mapping: Disallowed }, ++ Range { from: '\u{1ee29}', to: '\u{1ee29}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee2a}', to: '\u{1ee2a}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee2b}', to: '\u{1ee2b}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee2c}', to: '\u{1ee2c}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee2d}', to: '\u{1ee2d}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee2e}', to: '\u{1ee2e}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee2f}', to: '\u{1ee2f}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee30}', to: '\u{1ee30}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee31}', to: '\u{1ee31}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee32}', to: '\u{1ee32}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee33}', to: '\u{1ee33}', mapping: Disallowed }, ++ Range { from: '\u{1ee34}', to: '\u{1ee34}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee35}', to: '\u{1ee35}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee36}', to: '\u{1ee36}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee37}', to: '\u{1ee37}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee38}', to: '\u{1ee38}', mapping: Disallowed }, ++ Range { from: '\u{1ee39}', to: '\u{1ee39}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee3a}', to: '\u{1ee3a}', mapping: Disallowed }, ++ Range { from: '\u{1ee3b}', to: '\u{1ee3b}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee3c}', to: '\u{1ee41}', mapping: Disallowed }, ++ Range { from: '\u{1ee42}', to: '\u{1ee42}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee43}', to: '\u{1ee46}', mapping: Disallowed }, ++ Range { from: '\u{1ee47}', to: '\u{1ee47}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee48}', to: '\u{1ee48}', mapping: Disallowed }, ++ Range { from: '\u{1ee49}', to: '\u{1ee49}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee4a}', to: '\u{1ee4a}', mapping: Disallowed }, ++ Range { from: '\u{1ee4b}', to: '\u{1ee4b}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee4c}', to: '\u{1ee4c}', mapping: Disallowed }, ++ Range { from: '\u{1ee4d}', to: '\u{1ee4d}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee4e}', to: '\u{1ee4e}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee4f}', to: '\u{1ee4f}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee50}', to: '\u{1ee50}', mapping: Disallowed }, ++ Range { from: '\u{1ee51}', to: '\u{1ee51}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee52}', to: '\u{1ee52}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee53}', to: '\u{1ee53}', mapping: Disallowed }, ++ Range { from: '\u{1ee54}', to: '\u{1ee54}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee55}', to: '\u{1ee56}', mapping: Disallowed }, ++ Range { from: '\u{1ee57}', to: '\u{1ee57}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee58}', to: '\u{1ee58}', mapping: Disallowed }, ++ Range { from: '\u{1ee59}', to: '\u{1ee59}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee5a}', to: '\u{1ee5a}', mapping: Disallowed }, ++ Range { from: '\u{1ee5b}', to: '\u{1ee5b}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee5c}', to: '\u{1ee5c}', mapping: Disallowed }, ++ Range { from: '\u{1ee5d}', to: '\u{1ee5d}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 31, byte_len: 2 }) }, ++ Range { from: '\u{1ee5e}', to: '\u{1ee5e}', mapping: Disallowed }, ++ Range { from: '\u{1ee5f}', to: '\u{1ee5f}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1ee60}', to: '\u{1ee60}', mapping: Disallowed }, ++ Range { from: '\u{1ee61}', to: '\u{1ee61}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee62}', to: '\u{1ee62}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee63}', to: '\u{1ee63}', mapping: Disallowed }, ++ Range { from: '\u{1ee64}', to: '\u{1ee64}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee65}', to: '\u{1ee66}', mapping: Disallowed }, ++ Range { from: '\u{1ee67}', to: '\u{1ee67}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee68}', to: '\u{1ee68}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee69}', to: '\u{1ee69}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee6a}', to: '\u{1ee6a}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee6b}', to: '\u{1ee6b}', mapping: Disallowed }, ++ Range { from: '\u{1ee6c}', to: '\u{1ee6c}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee6d}', to: '\u{1ee6d}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee6e}', to: '\u{1ee6e}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee6f}', to: '\u{1ee6f}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee70}', to: '\u{1ee70}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee71}', to: '\u{1ee71}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee72}', to: '\u{1ee72}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee73}', to: '\u{1ee73}', mapping: Disallowed }, ++ Range { from: '\u{1ee74}', to: '\u{1ee74}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee75}', to: '\u{1ee75}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee76}', to: '\u{1ee76}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee77}', to: '\u{1ee77}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee78}', to: '\u{1ee78}', mapping: Disallowed }, ++ Range { from: '\u{1ee79}', to: '\u{1ee79}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee7a}', to: '\u{1ee7a}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee7b}', to: '\u{1ee7b}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee7c}', to: '\u{1ee7c}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1ee7d}', to: '\u{1ee7d}', mapping: Disallowed }, ++ Range { from: '\u{1ee7e}', to: '\u{1ee7e}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1ee7f}', to: '\u{1ee7f}', mapping: Disallowed }, ++ Range { from: '\u{1ee80}', to: '\u{1ee80}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee81}', to: '\u{1ee81}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee82}', to: '\u{1ee82}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee83}', to: '\u{1ee83}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee84}', to: '\u{1ee84}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee85}', to: '\u{1ee85}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee86}', to: '\u{1ee86}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee87}', to: '\u{1ee87}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee88}', to: '\u{1ee88}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee89}', to: '\u{1ee89}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee8a}', to: '\u{1ee8a}', mapping: Disallowed }, ++ Range { from: '\u{1ee8b}', to: '\u{1ee8b}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee8c}', to: '\u{1ee8c}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee8d}', to: '\u{1ee8d}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee8e}', to: '\u{1ee8e}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee8f}', to: '\u{1ee8f}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee90}', to: '\u{1ee90}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee91}', to: '\u{1ee91}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee92}', to: '\u{1ee92}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee93}', to: '\u{1ee93}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee94}', to: '\u{1ee94}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee95}', to: '\u{1ee95}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee96}', to: '\u{1ee96}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1ee97}', to: '\u{1ee97}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee98}', to: '\u{1ee98}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee99}', to: '\u{1ee99}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee9a}', to: '\u{1ee9a}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee9b}', to: '\u{1ee9b}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1ee9c}', to: '\u{1eea0}', mapping: Disallowed }, ++ Range { from: '\u{1eea1}', to: '\u{1eea1}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1eea2}', to: '\u{1eea2}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1eea3}', to: '\u{1eea3}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1eea4}', to: '\u{1eea4}', mapping: Disallowed }, ++ Range { from: '\u{1eea5}', to: '\u{1eea5}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1eea6}', to: '\u{1eea6}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1eea7}', to: '\u{1eea7}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1eea8}', to: '\u{1eea8}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1eea9}', to: '\u{1eea9}', mapping: Mapped(StringTableSlice { byte_start_lo: 42, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1eeaa}', to: '\u{1eeaa}', mapping: Disallowed }, ++ Range { from: '\u{1eeab}', to: '\u{1eeab}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1eeac}', to: '\u{1eeac}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1eead}', to: '\u{1eead}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1eeae}', to: '\u{1eeae}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1eeaf}', to: '\u{1eeaf}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1eeb0}', to: '\u{1eeb0}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1eeb1}', to: '\u{1eeb1}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1eeb2}', to: '\u{1eeb2}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1eeb3}', to: '\u{1eeb3}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1eeb4}', to: '\u{1eeb4}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1eeb5}', to: '\u{1eeb5}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1eeb6}', to: '\u{1eeb6}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 37, byte_len: 2 }) }, ++ Range { from: '\u{1eeb7}', to: '\u{1eeb7}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1eeb8}', to: '\u{1eeb8}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1eeb9}', to: '\u{1eeb9}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1eeba}', to: '\u{1eeba}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1eebb}', to: '\u{1eebb}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 38, byte_len: 2 }) }, ++ Range { from: '\u{1eebc}', to: '\u{1eeef}', mapping: Disallowed }, ++ Range { from: '\u{1eef0}', to: '\u{1eef1}', mapping: Valid }, ++ Range { from: '\u{1eef2}', to: '\u{1efff}', mapping: Disallowed }, ++ Range { from: '\u{1f000}', to: '\u{1f02b}', mapping: Valid }, ++ Range { from: '\u{1f02c}', to: '\u{1f02f}', mapping: Disallowed }, ++ Range { from: '\u{1f030}', to: '\u{1f093}', mapping: Valid }, ++ Range { from: '\u{1f094}', to: '\u{1f09f}', mapping: Disallowed }, ++ Range { from: '\u{1f0a0}', to: '\u{1f0ae}', mapping: Valid }, ++ Range { from: '\u{1f0af}', to: '\u{1f0b0}', mapping: Disallowed }, ++ Range { from: '\u{1f0b1}', to: '\u{1f0bf}', mapping: Valid }, ++ Range { from: '\u{1f0c0}', to: '\u{1f0c0}', mapping: Disallowed }, ++ Range { from: '\u{1f0c1}', to: '\u{1f0cf}', mapping: Valid }, ++ Range { from: '\u{1f0d0}', to: '\u{1f0d0}', mapping: Disallowed }, ++ Range { from: '\u{1f0d1}', to: '\u{1f0f5}', mapping: Valid }, ++ Range { from: '\u{1f0f6}', to: '\u{1f100}', mapping: Disallowed }, ++ Range { from: '\u{1f101}', to: '\u{1f101}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 52, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1f102}', to: '\u{1f102}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1f103}', to: '\u{1f103}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1f104}', to: '\u{1f104}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1f105}', to: '\u{1f105}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1f106}', to: '\u{1f106}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1f107}', to: '\u{1f107}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1f108}', to: '\u{1f108}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1f109}', to: '\u{1f109}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1f10a}', to: '\u{1f10a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1f10b}', to: '\u{1f10c}', mapping: Valid }, ++ Range { from: '\u{1f10d}', to: '\u{1f10f}', mapping: Disallowed }, ++ Range { from: '\u{1f110}', to: '\u{1f110}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{1f111}', to: '\u{1f111}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 205, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{1f112}', to: '\u{1f112}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 208, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{1f113}', to: '\u{1f113}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 211, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{1f114}', to: '\u{1f114}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{1f115}', to: '\u{1f115}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 217, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{1f116}', to: '\u{1f116}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 220, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{1f117}', to: '\u{1f117}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 223, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{1f118}', to: '\u{1f118}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{1f119}', to: '\u{1f119}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{1f11a}', to: '\u{1f11a}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{1f11b}', to: '\u{1f11b}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{1f11c}', to: '\u{1f11c}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{1f11d}', to: '\u{1f11d}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{1f11e}', to: '\u{1f11e}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{1f11f}', to: '\u{1f11f}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{1f120}', to: '\u{1f120}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{1f121}', to: '\u{1f121}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 9, byte_len: 3 }) }, ++ Range { from: '\u{1f122}', to: '\u{1f122}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{1f123}', to: '\u{1f123}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{1f124}', to: '\u{1f124}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{1f125}', to: '\u{1f125}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{1f126}', to: '\u{1f126}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{1f127}', to: '\u{1f127}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{1f128}', to: '\u{1f128}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{1f129}', to: '\u{1f129}', mapping: DisallowedStd3Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 10, byte_len: 3 }) }, ++ Range { from: '\u{1f12a}', to: '\u{1f12a}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 42, byte_len: 7 }) }, ++ Range { from: '\u{1f12b}', to: '\u{1f12b}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f12c}', to: '\u{1f12c}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f12d}', to: '\u{1f12d}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{1f12e}', to: '\u{1f12e}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1f12f}', to: '\u{1f12f}', mapping: Disallowed }, ++ Range { from: '\u{1f130}', to: '\u{1f130}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f131}', to: '\u{1f131}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f132}', to: '\u{1f132}', mapping: Mapped(StringTableSlice { byte_start_lo: 2, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f133}', to: '\u{1f133}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f134}', to: '\u{1f134}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f135}', to: '\u{1f135}', mapping: Mapped(StringTableSlice { byte_start_lo: 5, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f136}', to: '\u{1f136}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f137}', to: '\u{1f137}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f138}', to: '\u{1f138}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f139}', to: '\u{1f139}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f13a}', to: '\u{1f13a}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f13b}', to: '\u{1f13b}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f13c}', to: '\u{1f13c}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f13d}', to: '\u{1f13d}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f13e}', to: '\u{1f13e}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f13f}', to: '\u{1f13f}', mapping: Mapped(StringTableSlice { byte_start_lo: 15, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f140}', to: '\u{1f140}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f141}', to: '\u{1f141}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f142}', to: '\u{1f142}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f143}', to: '\u{1f143}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f144}', to: '\u{1f144}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f145}', to: '\u{1f145}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f146}', to: '\u{1f146}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f147}', to: '\u{1f147}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f148}', to: '\u{1f148}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f149}', to: '\u{1f149}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 0, byte_len: 1 }) }, ++ Range { from: '\u{1f14a}', to: '\u{1f14a}', mapping: Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1f14b}', to: '\u{1f14b}', mapping: Mapped(StringTableSlice { byte_start_lo: 39, byte_start_hi: 23, byte_len: 2 }) }, ++ Range { from: '\u{1f14c}', to: '\u{1f14c}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1f14d}', to: '\u{1f14d}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 0, byte_len: 2 }) }, ++ Range { from: '\u{1f14e}', to: '\u{1f14e}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f14f}', to: '\u{1f14f}', mapping: Mapped(StringTableSlice { byte_start_lo: 88, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1f150}', to: '\u{1f169}', mapping: Valid }, ++ Range { from: '\u{1f16a}', to: '\u{1f16a}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1f16b}', to: '\u{1f16b}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1f16c}', to: '\u{1f16f}', mapping: Disallowed }, ++ Range { from: '\u{1f170}', to: '\u{1f18f}', mapping: Valid }, ++ Range { from: '\u{1f190}', to: '\u{1f190}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 42, byte_len: 2 }) }, ++ Range { from: '\u{1f191}', to: '\u{1f1ac}', mapping: Valid }, ++ Range { from: '\u{1f1ad}', to: '\u{1f1e5}', mapping: Disallowed }, ++ Range { from: '\u{1f1e6}', to: '\u{1f1ff}', mapping: Valid }, ++ Range { from: '\u{1f200}', to: '\u{1f200}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 42, byte_len: 6 }) }, ++ Range { from: '\u{1f201}', to: '\u{1f201}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 42, byte_len: 6 }) }, ++ Range { from: '\u{1f202}', to: '\u{1f202}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{1f203}', to: '\u{1f20f}', mapping: Disallowed }, ++ Range { from: '\u{1f210}', to: '\u{1f210}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{1f211}', to: '\u{1f211}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f212}', to: '\u{1f212}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f213}', to: '\u{1f213}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f214}', to: '\u{1f214}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{1f215}', to: '\u{1f215}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f216}', to: '\u{1f216}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f217}', to: '\u{1f217}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{1f218}', to: '\u{1f218}', mapping: Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f219}', to: '\u{1f219}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f21a}', to: '\u{1f21a}', mapping: Mapped(StringTableSlice { byte_start_lo: 129, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f21b}', to: '\u{1f21b}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{1f21c}', to: '\u{1f21c}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f21d}', to: '\u{1f21d}', mapping: Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f21e}', to: '\u{1f21e}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f21f}', to: '\u{1f21f}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f220}', to: '\u{1f220}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f221}', to: '\u{1f221}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f222}', to: '\u{1f222}', mapping: Mapped(StringTableSlice { byte_start_lo: 172, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{1f223}', to: '\u{1f223}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f224}', to: '\u{1f224}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f225}', to: '\u{1f225}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f226}', to: '\u{1f226}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f227}', to: '\u{1f227}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f228}', to: '\u{1f228}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f229}', to: '\u{1f229}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{1f22a}', to: '\u{1f22a}', mapping: Mapped(StringTableSlice { byte_start_lo: 58, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{1f22b}', to: '\u{1f22b}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f22c}', to: '\u{1f22c}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{1f22d}', to: '\u{1f22d}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 15, byte_len: 3 }) }, ++ Range { from: '\u{1f22e}', to: '\u{1f22e}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{1f22f}', to: '\u{1f22f}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f230}', to: '\u{1f230}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{1f231}', to: '\u{1f231}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f232}', to: '\u{1f232}', mapping: Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f233}', to: '\u{1f233}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f234}', to: '\u{1f234}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f235}', to: '\u{1f235}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f236}', to: '\u{1f236}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 17, byte_len: 3 }) }, ++ Range { from: '\u{1f237}', to: '\u{1f237}', mapping: Mapped(StringTableSlice { byte_start_lo: 94, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{1f238}', to: '\u{1f238}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f239}', to: '\u{1f239}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f23a}', to: '\u{1f23a}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f23b}', to: '\u{1f23b}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{1f23c}', to: '\u{1f23f}', mapping: Disallowed }, ++ Range { from: '\u{1f240}', to: '\u{1f240}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 42, byte_len: 9 }) }, ++ Range { from: '\u{1f241}', to: '\u{1f241}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 42, byte_len: 9 }) }, ++ Range { from: '\u{1f242}', to: '\u{1f242}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 42, byte_len: 9 }) }, ++ Range { from: '\u{1f243}', to: '\u{1f243}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 42, byte_len: 9 }) }, ++ Range { from: '\u{1f244}', to: '\u{1f244}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 42, byte_len: 9 }) }, ++ Range { from: '\u{1f245}', to: '\u{1f245}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 42, byte_len: 9 }) }, ++ Range { from: '\u{1f246}', to: '\u{1f246}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 42, byte_len: 9 }) }, ++ Range { from: '\u{1f247}', to: '\u{1f247}', mapping: Mapped(StringTableSlice { byte_start_lo: 8, byte_start_hi: 43, byte_len: 9 }) }, ++ Range { from: '\u{1f248}', to: '\u{1f248}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 43, byte_len: 9 }) }, ++ Range { from: '\u{1f249}', to: '\u{1f24f}', mapping: Disallowed }, ++ Range { from: '\u{1f250}', to: '\u{1f250}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{1f251}', to: '\u{1f251}', mapping: Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{1f252}', to: '\u{1f25f}', mapping: Disallowed }, ++ Range { from: '\u{1f260}', to: '\u{1f265}', mapping: Valid }, ++ Range { from: '\u{1f266}', to: '\u{1f2ff}', mapping: Disallowed }, ++ Range { from: '\u{1f300}', to: '\u{1f6d4}', mapping: Valid }, ++ Range { from: '\u{1f6d5}', to: '\u{1f6df}', mapping: Disallowed }, ++ Range { from: '\u{1f6e0}', to: '\u{1f6ec}', mapping: Valid }, ++ Range { from: '\u{1f6ed}', to: '\u{1f6ef}', mapping: Disallowed }, ++ Range { from: '\u{1f6f0}', to: '\u{1f6f8}', mapping: Valid }, ++ Range { from: '\u{1f6f9}', to: '\u{1f6ff}', mapping: Disallowed }, ++ Range { from: '\u{1f700}', to: '\u{1f773}', mapping: Valid }, ++ Range { from: '\u{1f774}', to: '\u{1f77f}', mapping: Disallowed }, ++ Range { from: '\u{1f780}', to: '\u{1f7d4}', mapping: Valid }, ++ Range { from: '\u{1f7d5}', to: '\u{1f7ff}', mapping: Disallowed }, ++ Range { from: '\u{1f800}', to: '\u{1f80b}', mapping: Valid }, ++ Range { from: '\u{1f80c}', to: '\u{1f80f}', mapping: Disallowed }, ++ Range { from: '\u{1f810}', to: '\u{1f847}', mapping: Valid }, ++ Range { from: '\u{1f848}', to: '\u{1f84f}', mapping: Disallowed }, ++ Range { from: '\u{1f850}', to: '\u{1f859}', mapping: Valid }, ++ Range { from: '\u{1f85a}', to: '\u{1f85f}', mapping: Disallowed }, ++ Range { from: '\u{1f860}', to: '\u{1f887}', mapping: Valid }, ++ Range { from: '\u{1f888}', to: '\u{1f88f}', mapping: Disallowed }, ++ Range { from: '\u{1f890}', to: '\u{1f8ad}', mapping: Valid }, ++ Range { from: '\u{1f8ae}', to: '\u{1f8ff}', mapping: Disallowed }, ++ Range { from: '\u{1f900}', to: '\u{1f90b}', mapping: Valid }, ++ Range { from: '\u{1f90c}', to: '\u{1f90f}', mapping: Disallowed }, ++ Range { from: '\u{1f910}', to: '\u{1f93e}', mapping: Valid }, ++ Range { from: '\u{1f93f}', to: '\u{1f93f}', mapping: Disallowed }, ++ Range { from: '\u{1f940}', to: '\u{1f94c}', mapping: Valid }, ++ Range { from: '\u{1f94d}', to: '\u{1f94f}', mapping: Disallowed }, ++ Range { from: '\u{1f950}', to: '\u{1f96b}', mapping: Valid }, ++ Range { from: '\u{1f96c}', to: '\u{1f97f}', mapping: Disallowed }, ++ Range { from: '\u{1f980}', to: '\u{1f997}', mapping: Valid }, ++ Range { from: '\u{1f998}', to: '\u{1f9bf}', mapping: Disallowed }, ++ Range { from: '\u{1f9c0}', to: '\u{1f9c0}', mapping: Valid }, ++ Range { from: '\u{1f9c1}', to: '\u{1f9cf}', mapping: Disallowed }, ++ Range { from: '\u{1f9d0}', to: '\u{1f9e6}', mapping: Valid }, ++ Range { from: '\u{1f9e7}', to: '\u{1ffff}', mapping: Disallowed }, ++ Range { from: '\u{20000}', to: '\u{2a6d6}', mapping: Valid }, ++ Range { from: '\u{2a6d7}', to: '\u{2a6ff}', mapping: Disallowed }, ++ Range { from: '\u{2a700}', to: '\u{2b734}', mapping: Valid }, ++ Range { from: '\u{2b735}', to: '\u{2b73f}', mapping: Disallowed }, ++ Range { from: '\u{2b740}', to: '\u{2b81d}', mapping: Valid }, ++ Range { from: '\u{2b81e}', to: '\u{2b81f}', mapping: Disallowed }, ++ Range { from: '\u{2b820}', to: '\u{2cea1}', mapping: Valid }, ++ Range { from: '\u{2cea2}', to: '\u{2ceaf}', mapping: Disallowed }, ++ Range { from: '\u{2ceb0}', to: '\u{2ebe0}', mapping: Valid }, ++ Range { from: '\u{2ebe1}', to: '\u{2f7ff}', mapping: Disallowed }, ++ Range { from: '\u{2f800}', to: '\u{2f800}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f801}', to: '\u{2f801}', mapping: Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f802}', to: '\u{2f802}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f803}', to: '\u{2f803}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 43, byte_len: 4 }) }, ++ Range { from: '\u{2f804}', to: '\u{2f804}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f805}', to: '\u{2f805}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{2f806}', to: '\u{2f806}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f807}', to: '\u{2f807}', mapping: Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f808}', to: '\u{2f808}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f809}', to: '\u{2f809}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f80a}', to: '\u{2f80a}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{2f80b}', to: '\u{2f80b}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f80c}', to: '\u{2f80c}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f80d}', to: '\u{2f80d}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 43, byte_len: 4 }) }, ++ Range { from: '\u{2f80e}', to: '\u{2f80e}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{2f80f}', to: '\u{2f80f}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f810}', to: '\u{2f810}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f811}', to: '\u{2f811}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f812}', to: '\u{2f812}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 43, byte_len: 4 }) }, ++ Range { from: '\u{2f813}', to: '\u{2f813}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f814}', to: '\u{2f814}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f815}', to: '\u{2f815}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{2f816}', to: '\u{2f816}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 43, byte_len: 4 }) }, ++ Range { from: '\u{2f817}', to: '\u{2f817}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f818}', to: '\u{2f818}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f819}', to: '\u{2f819}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f81a}', to: '\u{2f81a}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f81b}', to: '\u{2f81b}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2f81c}', to: '\u{2f81c}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 43, byte_len: 4 }) }, ++ Range { from: '\u{2f81d}', to: '\u{2f81d}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 11, byte_len: 3 }) }, ++ Range { from: '\u{2f81e}', to: '\u{2f81e}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f81f}', to: '\u{2f81f}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f820}', to: '\u{2f820}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f821}', to: '\u{2f821}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f822}', to: '\u{2f822}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{2f823}', to: '\u{2f823}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f824}', to: '\u{2f824}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f825}', to: '\u{2f825}', mapping: Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2f826}', to: '\u{2f826}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{2f827}', to: '\u{2f827}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{2f828}', to: '\u{2f828}', mapping: Mapped(StringTableSlice { byte_start_lo: 30, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2f829}', to: '\u{2f829}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f82a}', to: '\u{2f82a}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f82b}', to: '\u{2f82b}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{2f82c}', to: '\u{2f82c}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f82d}', to: '\u{2f82d}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{2f82e}', to: '\u{2f82e}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f82f}', to: '\u{2f82f}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f830}', to: '\u{2f830}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f831}', to: '\u{2f833}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f834}', to: '\u{2f834}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 43, byte_len: 4 }) }, ++ Range { from: '\u{2f835}', to: '\u{2f835}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f836}', to: '\u{2f836}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f837}', to: '\u{2f837}', mapping: Mapped(StringTableSlice { byte_start_lo: 158, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f838}', to: '\u{2f838}', mapping: Mapped(StringTableSlice { byte_start_lo: 161, byte_start_hi: 43, byte_len: 4 }) }, ++ Range { from: '\u{2f839}', to: '\u{2f839}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f83a}', to: '\u{2f83a}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f83b}', to: '\u{2f83b}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f83c}', to: '\u{2f83c}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f83d}', to: '\u{2f83d}', mapping: Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f83e}', to: '\u{2f83e}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f83f}', to: '\u{2f83f}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f840}', to: '\u{2f840}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f841}', to: '\u{2f841}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f842}', to: '\u{2f842}', mapping: Mapped(StringTableSlice { byte_start_lo: 192, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f843}', to: '\u{2f843}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f844}', to: '\u{2f844}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f845}', to: '\u{2f846}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f847}', to: '\u{2f847}', mapping: Mapped(StringTableSlice { byte_start_lo: 36, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2f848}', to: '\u{2f848}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f849}', to: '\u{2f849}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f84a}', to: '\u{2f84a}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f84b}', to: '\u{2f84b}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f84c}', to: '\u{2f84c}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{2f84d}', to: '\u{2f84d}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f84e}', to: '\u{2f84e}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f84f}', to: '\u{2f84f}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f850}', to: '\u{2f850}', mapping: Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{2f851}', to: '\u{2f851}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f852}', to: '\u{2f852}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f853}', to: '\u{2f853}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f854}', to: '\u{2f854}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f855}', to: '\u{2f855}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f856}', to: '\u{2f856}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f857}', to: '\u{2f857}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f858}', to: '\u{2f858}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f859}', to: '\u{2f859}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 43, byte_len: 4 }) }, ++ Range { from: '\u{2f85a}', to: '\u{2f85a}', mapping: Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 43, byte_len: 3 }) }, ++ Range { from: '\u{2f85b}', to: '\u{2f85b}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f85c}', to: '\u{2f85c}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f85d}', to: '\u{2f85d}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 42, byte_len: 3 }) }, ++ Range { from: '\u{2f85e}', to: '\u{2f85e}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f85f}', to: '\u{2f85f}', mapping: Mapped(StringTableSlice { byte_start_lo: 9, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f860}', to: '\u{2f860}', mapping: Mapped(StringTableSlice { byte_start_lo: 12, byte_start_hi: 44, byte_len: 4 }) }, ++ Range { from: '\u{2f861}', to: '\u{2f861}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 44, byte_len: 4 }) }, ++ Range { from: '\u{2f862}', to: '\u{2f862}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f863}', to: '\u{2f863}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f864}', to: '\u{2f864}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f865}', to: '\u{2f865}', mapping: Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f866}', to: '\u{2f866}', mapping: Mapped(StringTableSlice { byte_start_lo: 32, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f867}', to: '\u{2f867}', mapping: Mapped(StringTableSlice { byte_start_lo: 35, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f868}', to: '\u{2f868}', mapping: Disallowed }, ++ Range { from: '\u{2f869}', to: '\u{2f869}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f86a}', to: '\u{2f86b}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f86c}', to: '\u{2f86c}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 44, byte_len: 4 }) }, ++ Range { from: '\u{2f86d}', to: '\u{2f86d}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f86e}', to: '\u{2f86e}', mapping: Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f86f}', to: '\u{2f86f}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{2f870}', to: '\u{2f870}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f871}', to: '\u{2f871}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 44, byte_len: 4 }) }, ++ Range { from: '\u{2f872}', to: '\u{2f872}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f873}', to: '\u{2f873}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f874}', to: '\u{2f874}', mapping: Disallowed }, ++ Range { from: '\u{2f875}', to: '\u{2f875}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f876}', to: '\u{2f876}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f877}', to: '\u{2f877}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f878}', to: '\u{2f878}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f879}', to: '\u{2f879}', mapping: Mapped(StringTableSlice { byte_start_lo: 73, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f87a}', to: '\u{2f87a}', mapping: Mapped(StringTableSlice { byte_start_lo: 76, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f87b}', to: '\u{2f87b}', mapping: Mapped(StringTableSlice { byte_start_lo: 79, byte_start_hi: 44, byte_len: 4 }) }, ++ Range { from: '\u{2f87c}', to: '\u{2f87c}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f87d}', to: '\u{2f87d}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 44, byte_len: 4 }) }, ++ Range { from: '\u{2f87e}', to: '\u{2f87e}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f87f}', to: '\u{2f87f}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f880}', to: '\u{2f880}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f881}', to: '\u{2f881}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f882}', to: '\u{2f882}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f883}', to: '\u{2f883}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f884}', to: '\u{2f884}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f885}', to: '\u{2f885}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f886}', to: '\u{2f886}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f887}', to: '\u{2f887}', mapping: Mapped(StringTableSlice { byte_start_lo: 117, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f888}', to: '\u{2f888}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f889}', to: '\u{2f889}', mapping: Mapped(StringTableSlice { byte_start_lo: 123, byte_start_hi: 44, byte_len: 4 }) }, ++ Range { from: '\u{2f88a}', to: '\u{2f88a}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f88b}', to: '\u{2f88b}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f88c}', to: '\u{2f88c}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f88d}', to: '\u{2f88d}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f88e}', to: '\u{2f88e}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{2f88f}', to: '\u{2f88f}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 44, byte_len: 4 }) }, ++ Range { from: '\u{2f890}', to: '\u{2f890}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 12, byte_len: 3 }) }, ++ Range { from: '\u{2f891}', to: '\u{2f892}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 44, byte_len: 4 }) }, ++ Range { from: '\u{2f893}', to: '\u{2f893}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f894}', to: '\u{2f895}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f896}', to: '\u{2f896}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f897}', to: '\u{2f897}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 44, byte_len: 4 }) }, ++ Range { from: '\u{2f898}', to: '\u{2f898}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 44, byte_len: 4 }) }, ++ Range { from: '\u{2f899}', to: '\u{2f899}', mapping: Mapped(StringTableSlice { byte_start_lo: 164, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f89a}', to: '\u{2f89a}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f89b}', to: '\u{2f89b}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f89c}', to: '\u{2f89c}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f89d}', to: '\u{2f89d}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f89e}', to: '\u{2f89e}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f89f}', to: '\u{2f89f}', mapping: Mapped(StringTableSlice { byte_start_lo: 182, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8a0}', to: '\u{2f8a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 185, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8a1}', to: '\u{2f8a1}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8a2}', to: '\u{2f8a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8a3}', to: '\u{2f8a3}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{2f8a4}', to: '\u{2f8a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 44, byte_len: 4 }) }, ++ Range { from: '\u{2f8a5}', to: '\u{2f8a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8a6}', to: '\u{2f8a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 201, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8a7}', to: '\u{2f8a7}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8a8}', to: '\u{2f8a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2f8a9}', to: '\u{2f8a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 204, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8aa}', to: '\u{2f8aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8ab}', to: '\u{2f8ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{2f8ac}', to: '\u{2f8ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8ad}', to: '\u{2f8ad}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8ae}', to: '\u{2f8ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8af}', to: '\u{2f8af}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8b0}', to: '\u{2f8b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{2f8b1}', to: '\u{2f8b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{2f8b2}', to: '\u{2f8b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8b3}', to: '\u{2f8b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8b4}', to: '\u{2f8b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8b5}', to: '\u{2f8b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8b6}', to: '\u{2f8b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 234, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8b7}', to: '\u{2f8b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8b8}', to: '\u{2f8b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 44, byte_len: 4 }) }, ++ Range { from: '\u{2f8b9}', to: '\u{2f8b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8ba}', to: '\u{2f8ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 247, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8bb}', to: '\u{2f8bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 250, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8bc}', to: '\u{2f8bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 253, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f8bd}', to: '\u{2f8bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8be}', to: '\u{2f8be}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 45, byte_len: 4 }) }, ++ Range { from: '\u{2f8bf}', to: '\u{2f8bf}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8c0}', to: '\u{2f8c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8c1}', to: '\u{2f8c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8c2}', to: '\u{2f8c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8c3}', to: '\u{2f8c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 19, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8c4}', to: '\u{2f8c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 22, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8c5}', to: '\u{2f8c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 25, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8c6}', to: '\u{2f8c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 28, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8c7}', to: '\u{2f8c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8c8}', to: '\u{2f8c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{2f8c9}', to: '\u{2f8c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8ca}', to: '\u{2f8ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 45, byte_len: 4 }) }, ++ Range { from: '\u{2f8cb}', to: '\u{2f8cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8cc}', to: '\u{2f8cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8cd}', to: '\u{2f8cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8ce}', to: '\u{2f8ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8cf}', to: '\u{2f8cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{2f8d0}', to: '\u{2f8d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8d1}', to: '\u{2f8d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8d2}', to: '\u{2f8d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 59, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8d3}', to: '\u{2f8d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 62, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8d4}', to: '\u{2f8d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 65, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8d5}', to: '\u{2f8d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8d6}', to: '\u{2f8d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 71, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8d7}', to: '\u{2f8d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8d8}', to: '\u{2f8d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{2f8d9}', to: '\u{2f8d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2f8da}', to: '\u{2f8da}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8db}', to: '\u{2f8db}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8dc}', to: '\u{2f8dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8dd}', to: '\u{2f8dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 45, byte_len: 4 }) }, ++ Range { from: '\u{2f8de}', to: '\u{2f8de}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8df}', to: '\u{2f8df}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8e0}', to: '\u{2f8e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8e1}', to: '\u{2f8e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8e2}', to: '\u{2f8e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{2f8e3}', to: '\u{2f8e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 45, byte_len: 4 }) }, ++ Range { from: '\u{2f8e4}', to: '\u{2f8e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 106, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8e5}', to: '\u{2f8e5}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8e6}', to: '\u{2f8e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8e7}', to: '\u{2f8e7}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2f8e8}', to: '\u{2f8e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8e9}', to: '\u{2f8e9}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8ea}', to: '\u{2f8ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8eb}', to: '\u{2f8eb}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8ec}', to: '\u{2f8ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 45, byte_len: 4 }) }, ++ Range { from: '\u{2f8ed}', to: '\u{2f8ed}', mapping: Mapped(StringTableSlice { byte_start_lo: 131, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8ee}', to: '\u{2f8ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 134, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8ef}', to: '\u{2f8ef}', mapping: Mapped(StringTableSlice { byte_start_lo: 137, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8f0}', to: '\u{2f8f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 140, byte_start_hi: 45, byte_len: 4 }) }, ++ Range { from: '\u{2f8f1}', to: '\u{2f8f1}', mapping: Mapped(StringTableSlice { byte_start_lo: 144, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8f2}', to: '\u{2f8f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8f3}', to: '\u{2f8f3}', mapping: Mapped(StringTableSlice { byte_start_lo: 150, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8f4}', to: '\u{2f8f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 153, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8f5}', to: '\u{2f8f5}', mapping: Mapped(StringTableSlice { byte_start_lo: 110, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{2f8f6}', to: '\u{2f8f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8f7}', to: '\u{2f8f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 45, byte_len: 4 }) }, ++ Range { from: '\u{2f8f8}', to: '\u{2f8f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 45, byte_len: 4 }) }, ++ Range { from: '\u{2f8f9}', to: '\u{2f8f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 45, byte_len: 4 }) }, ++ Range { from: '\u{2f8fa}', to: '\u{2f8fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8fb}', to: '\u{2f8fb}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 45, byte_len: 4 }) }, ++ Range { from: '\u{2f8fc}', to: '\u{2f8fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 178, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8fd}', to: '\u{2f8fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 181, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8fe}', to: '\u{2f8fe}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f8ff}', to: '\u{2f8ff}', mapping: Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f900}', to: '\u{2f900}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f901}', to: '\u{2f901}', mapping: Mapped(StringTableSlice { byte_start_lo: 156, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{2f902}', to: '\u{2f902}', mapping: Mapped(StringTableSlice { byte_start_lo: 103, byte_start_hi: 28, byte_len: 3 }) }, ++ Range { from: '\u{2f903}', to: '\u{2f903}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f904}', to: '\u{2f904}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f905}', to: '\u{2f905}', mapping: Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f906}', to: '\u{2f906}', mapping: Mapped(StringTableSlice { byte_start_lo: 202, byte_start_hi: 45, byte_len: 4 }) }, ++ Range { from: '\u{2f907}', to: '\u{2f907}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f908}', to: '\u{2f908}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f909}', to: '\u{2f909}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f90a}', to: '\u{2f90a}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f90b}', to: '\u{2f90b}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2f90c}', to: '\u{2f90c}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f90d}', to: '\u{2f90d}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 45, byte_len: 4 }) }, ++ Range { from: '\u{2f90e}', to: '\u{2f90e}', mapping: Mapped(StringTableSlice { byte_start_lo: 225, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f90f}', to: '\u{2f90f}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f910}', to: '\u{2f910}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 45, byte_len: 4 }) }, ++ Range { from: '\u{2f911}', to: '\u{2f911}', mapping: Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 45, byte_len: 4 }) }, ++ Range { from: '\u{2f912}', to: '\u{2f912}', mapping: Mapped(StringTableSlice { byte_start_lo: 239, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f913}', to: '\u{2f913}', mapping: Mapped(StringTableSlice { byte_start_lo: 242, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f914}', to: '\u{2f914}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2f915}', to: '\u{2f915}', mapping: Mapped(StringTableSlice { byte_start_lo: 245, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f916}', to: '\u{2f916}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f917}', to: '\u{2f917}', mapping: Mapped(StringTableSlice { byte_start_lo: 251, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f918}', to: '\u{2f918}', mapping: Mapped(StringTableSlice { byte_start_lo: 254, byte_start_hi: 45, byte_len: 3 }) }, ++ Range { from: '\u{2f919}', to: '\u{2f919}', mapping: Mapped(StringTableSlice { byte_start_lo: 1, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f91a}', to: '\u{2f91a}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f91b}', to: '\u{2f91b}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f91c}', to: '\u{2f91c}', mapping: Mapped(StringTableSlice { byte_start_lo: 11, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f91d}', to: '\u{2f91d}', mapping: Mapped(StringTableSlice { byte_start_lo: 14, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f91e}', to: '\u{2f91e}', mapping: Mapped(StringTableSlice { byte_start_lo: 18, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f91f}', to: '\u{2f91f}', mapping: Disallowed }, ++ Range { from: '\u{2f920}', to: '\u{2f920}', mapping: Mapped(StringTableSlice { byte_start_lo: 21, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f921}', to: '\u{2f921}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2f922}', to: '\u{2f922}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f923}', to: '\u{2f923}', mapping: Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f924}', to: '\u{2f924}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f925}', to: '\u{2f925}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f926}', to: '\u{2f926}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f927}', to: '\u{2f927}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f928}', to: '\u{2f928}', mapping: Mapped(StringTableSlice { byte_start_lo: 45, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f929}', to: '\u{2f929}', mapping: Mapped(StringTableSlice { byte_start_lo: 48, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f92a}', to: '\u{2f92a}', mapping: Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f92b}', to: '\u{2f92b}', mapping: Mapped(StringTableSlice { byte_start_lo: 54, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f92c}', to: '\u{2f92d}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f92e}', to: '\u{2f92e}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f92f}', to: '\u{2f92f}', mapping: Mapped(StringTableSlice { byte_start_lo: 63, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f930}', to: '\u{2f930}', mapping: Mapped(StringTableSlice { byte_start_lo: 120, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2f931}', to: '\u{2f931}', mapping: Mapped(StringTableSlice { byte_start_lo: 66, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f932}', to: '\u{2f932}', mapping: Mapped(StringTableSlice { byte_start_lo: 69, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f933}', to: '\u{2f933}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f934}', to: '\u{2f934}', mapping: Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f935}', to: '\u{2f935}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f936}', to: '\u{2f936}', mapping: Mapped(StringTableSlice { byte_start_lo: 82, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f937}', to: '\u{2f937}', mapping: Mapped(StringTableSlice { byte_start_lo: 85, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f938}', to: '\u{2f938}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{2f939}', to: '\u{2f939}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f93a}', to: '\u{2f93a}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f93b}', to: '\u{2f93b}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f93c}', to: '\u{2f93c}', mapping: Mapped(StringTableSlice { byte_start_lo: 100, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f93d}', to: '\u{2f93d}', mapping: Mapped(StringTableSlice { byte_start_lo: 104, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f93e}', to: '\u{2f93e}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f93f}', to: '\u{2f93f}', mapping: Mapped(StringTableSlice { byte_start_lo: 111, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f940}', to: '\u{2f940}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2f941}', to: '\u{2f941}', mapping: Mapped(StringTableSlice { byte_start_lo: 114, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f942}', to: '\u{2f942}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f943}', to: '\u{2f943}', mapping: Mapped(StringTableSlice { byte_start_lo: 122, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f944}', to: '\u{2f944}', mapping: Mapped(StringTableSlice { byte_start_lo: 126, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f945}', to: '\u{2f945}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f946}', to: '\u{2f947}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f948}', to: '\u{2f948}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2f949}', to: '\u{2f949}', mapping: Mapped(StringTableSlice { byte_start_lo: 231, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2f94a}', to: '\u{2f94a}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f94b}', to: '\u{2f94b}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f94c}', to: '\u{2f94c}', mapping: Mapped(StringTableSlice { byte_start_lo: 142, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f94d}', to: '\u{2f94d}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f94e}', to: '\u{2f94e}', mapping: Mapped(StringTableSlice { byte_start_lo: 149, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f94f}', to: '\u{2f94f}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{2f950}', to: '\u{2f950}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2f951}', to: '\u{2f951}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f952}', to: '\u{2f952}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f953}', to: '\u{2f953}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{2f954}', to: '\u{2f954}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f955}', to: '\u{2f955}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f956}', to: '\u{2f956}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{2f957}', to: '\u{2f957}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f958}', to: '\u{2f958}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f959}', to: '\u{2f959}', mapping: Mapped(StringTableSlice { byte_start_lo: 195, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{2f95a}', to: '\u{2f95a}', mapping: Mapped(StringTableSlice { byte_start_lo: 173, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f95b}', to: '\u{2f95b}', mapping: Mapped(StringTableSlice { byte_start_lo: 176, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f95c}', to: '\u{2f95c}', mapping: Mapped(StringTableSlice { byte_start_lo: 179, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f95d}', to: '\u{2f95e}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f95f}', to: '\u{2f95f}', mapping: Disallowed }, ++ Range { from: '\u{2f960}', to: '\u{2f960}', mapping: Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f961}', to: '\u{2f961}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f962}', to: '\u{2f962}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f963}', to: '\u{2f963}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f964}', to: '\u{2f964}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f965}', to: '\u{2f965}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f966}', to: '\u{2f966}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f967}', to: '\u{2f967}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f968}', to: '\u{2f968}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f969}', to: '\u{2f969}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f96a}', to: '\u{2f96a}', mapping: Mapped(StringTableSlice { byte_start_lo: 219, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f96b}', to: '\u{2f96b}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f96c}', to: '\u{2f96c}', mapping: Mapped(StringTableSlice { byte_start_lo: 226, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f96d}', to: '\u{2f96d}', mapping: Mapped(StringTableSlice { byte_start_lo: 229, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f96e}', to: '\u{2f96e}', mapping: Mapped(StringTableSlice { byte_start_lo: 232, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f96f}', to: '\u{2f96f}', mapping: Mapped(StringTableSlice { byte_start_lo: 235, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f970}', to: '\u{2f970}', mapping: Mapped(StringTableSlice { byte_start_lo: 238, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f971}', to: '\u{2f971}', mapping: Mapped(StringTableSlice { byte_start_lo: 241, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f972}', to: '\u{2f972}', mapping: Mapped(StringTableSlice { byte_start_lo: 244, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f973}', to: '\u{2f973}', mapping: Mapped(StringTableSlice { byte_start_lo: 248, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f974}', to: '\u{2f974}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 46, byte_len: 3 }) }, ++ Range { from: '\u{2f975}', to: '\u{2f975}', mapping: Mapped(StringTableSlice { byte_start_lo: 255, byte_start_hi: 46, byte_len: 4 }) }, ++ Range { from: '\u{2f976}', to: '\u{2f976}', mapping: Mapped(StringTableSlice { byte_start_lo: 3, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f977}', to: '\u{2f977}', mapping: Mapped(StringTableSlice { byte_start_lo: 6, byte_start_hi: 47, byte_len: 4 }) }, ++ Range { from: '\u{2f978}', to: '\u{2f978}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f979}', to: '\u{2f979}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f97a}', to: '\u{2f97a}', mapping: Mapped(StringTableSlice { byte_start_lo: 213, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{2f97b}', to: '\u{2f97b}', mapping: Mapped(StringTableSlice { byte_start_lo: 16, byte_start_hi: 47, byte_len: 4 }) }, ++ Range { from: '\u{2f97c}', to: '\u{2f97c}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 47, byte_len: 4 }) }, ++ Range { from: '\u{2f97d}', to: '\u{2f97d}', mapping: Mapped(StringTableSlice { byte_start_lo: 24, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f97e}', to: '\u{2f97e}', mapping: Mapped(StringTableSlice { byte_start_lo: 27, byte_start_hi: 47, byte_len: 4 }) }, ++ Range { from: '\u{2f97f}', to: '\u{2f97f}', mapping: Mapped(StringTableSlice { byte_start_lo: 31, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f980}', to: '\u{2f980}', mapping: Mapped(StringTableSlice { byte_start_lo: 34, byte_start_hi: 47, byte_len: 4 }) }, ++ Range { from: '\u{2f981}', to: '\u{2f981}', mapping: Mapped(StringTableSlice { byte_start_lo: 38, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f982}', to: '\u{2f982}', mapping: Mapped(StringTableSlice { byte_start_lo: 41, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f983}', to: '\u{2f983}', mapping: Mapped(StringTableSlice { byte_start_lo: 44, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f984}', to: '\u{2f984}', mapping: Mapped(StringTableSlice { byte_start_lo: 47, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f985}', to: '\u{2f985}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f986}', to: '\u{2f986}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f987}', to: '\u{2f987}', mapping: Mapped(StringTableSlice { byte_start_lo: 56, byte_start_hi: 47, byte_len: 4 }) }, ++ Range { from: '\u{2f988}', to: '\u{2f988}', mapping: Mapped(StringTableSlice { byte_start_lo: 60, byte_start_hi: 47, byte_len: 4 }) }, ++ Range { from: '\u{2f989}', to: '\u{2f989}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 47, byte_len: 4 }) }, ++ Range { from: '\u{2f98a}', to: '\u{2f98a}', mapping: Mapped(StringTableSlice { byte_start_lo: 68, byte_start_hi: 47, byte_len: 4 }) }, ++ Range { from: '\u{2f98b}', to: '\u{2f98b}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 44, byte_len: 3 }) }, ++ Range { from: '\u{2f98c}', to: '\u{2f98c}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f98d}', to: '\u{2f98d}', mapping: Mapped(StringTableSlice { byte_start_lo: 75, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f98e}', to: '\u{2f98e}', mapping: Mapped(StringTableSlice { byte_start_lo: 78, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f98f}', to: '\u{2f98f}', mapping: Mapped(StringTableSlice { byte_start_lo: 81, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f990}', to: '\u{2f990}', mapping: Mapped(StringTableSlice { byte_start_lo: 84, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f991}', to: '\u{2f991}', mapping: Mapped(StringTableSlice { byte_start_lo: 87, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f992}', to: '\u{2f992}', mapping: Mapped(StringTableSlice { byte_start_lo: 90, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f993}', to: '\u{2f993}', mapping: Mapped(StringTableSlice { byte_start_lo: 93, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f994}', to: '\u{2f994}', mapping: Mapped(StringTableSlice { byte_start_lo: 96, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f995}', to: '\u{2f995}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f996}', to: '\u{2f996}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f997}', to: '\u{2f997}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 47, byte_len: 4 }) }, ++ Range { from: '\u{2f998}', to: '\u{2f998}', mapping: Mapped(StringTableSlice { byte_start_lo: 119, byte_start_hi: 27, byte_len: 3 }) }, ++ Range { from: '\u{2f999}', to: '\u{2f999}', mapping: Mapped(StringTableSlice { byte_start_lo: 109, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f99a}', to: '\u{2f99a}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f99b}', to: '\u{2f99b}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f99c}', to: '\u{2f99c}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f99d}', to: '\u{2f99d}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f99e}', to: '\u{2f99e}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f99f}', to: '\u{2f99f}', mapping: Mapped(StringTableSlice { byte_start_lo: 222, byte_start_hi: 29, byte_len: 3 }) }, ++ Range { from: '\u{2f9a0}', to: '\u{2f9a0}', mapping: Mapped(StringTableSlice { byte_start_lo: 127, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9a1}', to: '\u{2f9a1}', mapping: Mapped(StringTableSlice { byte_start_lo: 130, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9a2}', to: '\u{2f9a2}', mapping: Mapped(StringTableSlice { byte_start_lo: 133, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9a3}', to: '\u{2f9a3}', mapping: Mapped(StringTableSlice { byte_start_lo: 136, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9a4}', to: '\u{2f9a4}', mapping: Mapped(StringTableSlice { byte_start_lo: 139, byte_start_hi: 47, byte_len: 4 }) }, ++ Range { from: '\u{2f9a5}', to: '\u{2f9a5}', mapping: Mapped(StringTableSlice { byte_start_lo: 143, byte_start_hi: 47, byte_len: 4 }) }, ++ Range { from: '\u{2f9a6}', to: '\u{2f9a6}', mapping: Mapped(StringTableSlice { byte_start_lo: 147, byte_start_hi: 47, byte_len: 4 }) }, ++ Range { from: '\u{2f9a7}', to: '\u{2f9a7}', mapping: Mapped(StringTableSlice { byte_start_lo: 151, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9a8}', to: '\u{2f9a8}', mapping: Mapped(StringTableSlice { byte_start_lo: 154, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9a9}', to: '\u{2f9a9}', mapping: Mapped(StringTableSlice { byte_start_lo: 157, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9aa}', to: '\u{2f9aa}', mapping: Mapped(StringTableSlice { byte_start_lo: 160, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9ab}', to: '\u{2f9ab}', mapping: Mapped(StringTableSlice { byte_start_lo: 163, byte_start_hi: 47, byte_len: 4 }) }, ++ Range { from: '\u{2f9ac}', to: '\u{2f9ac}', mapping: Mapped(StringTableSlice { byte_start_lo: 167, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9ad}', to: '\u{2f9ad}', mapping: Mapped(StringTableSlice { byte_start_lo: 170, byte_start_hi: 47, byte_len: 4 }) }, ++ Range { from: '\u{2f9ae}', to: '\u{2f9ae}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9af}', to: '\u{2f9af}', mapping: Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9b0}', to: '\u{2f9b0}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 47, byte_len: 4 }) }, ++ Range { from: '\u{2f9b1}', to: '\u{2f9b1}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 47, byte_len: 4 }) }, ++ Range { from: '\u{2f9b2}', to: '\u{2f9b2}', mapping: Mapped(StringTableSlice { byte_start_lo: 188, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9b3}', to: '\u{2f9b3}', mapping: Mapped(StringTableSlice { byte_start_lo: 191, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9b4}', to: '\u{2f9b4}', mapping: Mapped(StringTableSlice { byte_start_lo: 198, byte_start_hi: 26, byte_len: 3 }) }, ++ Range { from: '\u{2f9b5}', to: '\u{2f9b5}', mapping: Mapped(StringTableSlice { byte_start_lo: 194, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9b6}', to: '\u{2f9b6}', mapping: Mapped(StringTableSlice { byte_start_lo: 197, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9b7}', to: '\u{2f9b7}', mapping: Mapped(StringTableSlice { byte_start_lo: 200, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9b8}', to: '\u{2f9b8}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9b9}', to: '\u{2f9b9}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9ba}', to: '\u{2f9ba}', mapping: Mapped(StringTableSlice { byte_start_lo: 209, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9bb}', to: '\u{2f9bb}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2f9bc}', to: '\u{2f9bc}', mapping: Mapped(StringTableSlice { byte_start_lo: 212, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9bd}', to: '\u{2f9bd}', mapping: Mapped(StringTableSlice { byte_start_lo: 215, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9be}', to: '\u{2f9be}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9bf}', to: '\u{2f9bf}', mapping: Disallowed }, ++ Range { from: '\u{2f9c0}', to: '\u{2f9c0}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9c1}', to: '\u{2f9c1}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9c2}', to: '\u{2f9c2}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9c3}', to: '\u{2f9c3}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9c4}', to: '\u{2f9c4}', mapping: Mapped(StringTableSlice { byte_start_lo: 51, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f9c5}', to: '\u{2f9c5}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 47, byte_len: 4 }) }, ++ Range { from: '\u{2f9c6}', to: '\u{2f9c6}', mapping: Mapped(StringTableSlice { byte_start_lo: 237, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9c7}', to: '\u{2f9c7}', mapping: Mapped(StringTableSlice { byte_start_lo: 240, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9c8}', to: '\u{2f9c8}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9c9}', to: '\u{2f9c9}', mapping: Mapped(StringTableSlice { byte_start_lo: 246, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9ca}', to: '\u{2f9ca}', mapping: Mapped(StringTableSlice { byte_start_lo: 249, byte_start_hi: 47, byte_len: 3 }) }, ++ Range { from: '\u{2f9cb}', to: '\u{2f9cb}', mapping: Mapped(StringTableSlice { byte_start_lo: 252, byte_start_hi: 47, byte_len: 4 }) }, ++ Range { from: '\u{2f9cc}', to: '\u{2f9cc}', mapping: Mapped(StringTableSlice { byte_start_lo: 0, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2f9cd}', to: '\u{2f9cd}', mapping: Mapped(StringTableSlice { byte_start_lo: 4, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9ce}', to: '\u{2f9ce}', mapping: Mapped(StringTableSlice { byte_start_lo: 7, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9cf}', to: '\u{2f9cf}', mapping: Mapped(StringTableSlice { byte_start_lo: 10, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9d0}', to: '\u{2f9d0}', mapping: Mapped(StringTableSlice { byte_start_lo: 183, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2f9d1}', to: '\u{2f9d1}', mapping: Mapped(StringTableSlice { byte_start_lo: 186, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2f9d2}', to: '\u{2f9d2}', mapping: Mapped(StringTableSlice { byte_start_lo: 72, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2f9d3}', to: '\u{2f9d3}', mapping: Mapped(StringTableSlice { byte_start_lo: 13, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2f9d4}', to: '\u{2f9d4}', mapping: Mapped(StringTableSlice { byte_start_lo: 17, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9d5}', to: '\u{2f9d5}', mapping: Mapped(StringTableSlice { byte_start_lo: 20, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9d6}', to: '\u{2f9d6}', mapping: Mapped(StringTableSlice { byte_start_lo: 23, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9d7}', to: '\u{2f9d7}', mapping: Mapped(StringTableSlice { byte_start_lo: 26, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9d8}', to: '\u{2f9d8}', mapping: Mapped(StringTableSlice { byte_start_lo: 29, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2f9d9}', to: '\u{2f9d9}', mapping: Mapped(StringTableSlice { byte_start_lo: 33, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2f9da}', to: '\u{2f9da}', mapping: Mapped(StringTableSlice { byte_start_lo: 37, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9db}', to: '\u{2f9db}', mapping: Mapped(StringTableSlice { byte_start_lo: 40, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9dc}', to: '\u{2f9dc}', mapping: Mapped(StringTableSlice { byte_start_lo: 43, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9dd}', to: '\u{2f9dd}', mapping: Mapped(StringTableSlice { byte_start_lo: 46, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2f9de}', to: '\u{2f9de}', mapping: Mapped(StringTableSlice { byte_start_lo: 50, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9df}', to: '\u{2f9df}', mapping: Mapped(StringTableSlice { byte_start_lo: 189, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2f9e0}', to: '\u{2f9e0}', mapping: Mapped(StringTableSlice { byte_start_lo: 53, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2f9e1}', to: '\u{2f9e1}', mapping: Mapped(StringTableSlice { byte_start_lo: 57, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2f9e2}', to: '\u{2f9e2}', mapping: Mapped(StringTableSlice { byte_start_lo: 61, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9e3}', to: '\u{2f9e3}', mapping: Mapped(StringTableSlice { byte_start_lo: 64, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9e4}', to: '\u{2f9e4}', mapping: Mapped(StringTableSlice { byte_start_lo: 67, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9e5}', to: '\u{2f9e5}', mapping: Mapped(StringTableSlice { byte_start_lo: 70, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2f9e6}', to: '\u{2f9e6}', mapping: Mapped(StringTableSlice { byte_start_lo: 74, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9e7}', to: '\u{2f9e7}', mapping: Mapped(StringTableSlice { byte_start_lo: 77, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9e8}', to: '\u{2f9e8}', mapping: Mapped(StringTableSlice { byte_start_lo: 80, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9e9}', to: '\u{2f9e9}', mapping: Mapped(StringTableSlice { byte_start_lo: 83, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9ea}', to: '\u{2f9ea}', mapping: Mapped(StringTableSlice { byte_start_lo: 86, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9eb}', to: '\u{2f9eb}', mapping: Mapped(StringTableSlice { byte_start_lo: 89, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9ec}', to: '\u{2f9ec}', mapping: Mapped(StringTableSlice { byte_start_lo: 92, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9ed}', to: '\u{2f9ed}', mapping: Mapped(StringTableSlice { byte_start_lo: 95, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2f9ee}', to: '\u{2f9ee}', mapping: Mapped(StringTableSlice { byte_start_lo: 99, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9ef}', to: '\u{2f9ef}', mapping: Mapped(StringTableSlice { byte_start_lo: 102, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9f0}', to: '\u{2f9f0}', mapping: Mapped(StringTableSlice { byte_start_lo: 105, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9f1}', to: '\u{2f9f1}', mapping: Mapped(StringTableSlice { byte_start_lo: 108, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2f9f2}', to: '\u{2f9f2}', mapping: Mapped(StringTableSlice { byte_start_lo: 112, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9f3}', to: '\u{2f9f3}', mapping: Mapped(StringTableSlice { byte_start_lo: 115, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9f4}', to: '\u{2f9f4}', mapping: Mapped(StringTableSlice { byte_start_lo: 118, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9f5}', to: '\u{2f9f5}', mapping: Mapped(StringTableSlice { byte_start_lo: 121, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9f6}', to: '\u{2f9f6}', mapping: Mapped(StringTableSlice { byte_start_lo: 124, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2f9f7}', to: '\u{2f9f7}', mapping: Mapped(StringTableSlice { byte_start_lo: 128, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2f9f8}', to: '\u{2f9f8}', mapping: Mapped(StringTableSlice { byte_start_lo: 132, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9f9}', to: '\u{2f9f9}', mapping: Mapped(StringTableSlice { byte_start_lo: 135, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9fa}', to: '\u{2f9fa}', mapping: Mapped(StringTableSlice { byte_start_lo: 138, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9fb}', to: '\u{2f9fb}', mapping: Mapped(StringTableSlice { byte_start_lo: 141, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2f9fc}', to: '\u{2f9fc}', mapping: Mapped(StringTableSlice { byte_start_lo: 145, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2f9fd}', to: '\u{2f9fd}', mapping: Mapped(StringTableSlice { byte_start_lo: 148, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2f9fe}', to: '\u{2f9ff}', mapping: Mapped(StringTableSlice { byte_start_lo: 207, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2fa00}', to: '\u{2fa00}', mapping: Mapped(StringTableSlice { byte_start_lo: 152, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2fa01}', to: '\u{2fa01}', mapping: Mapped(StringTableSlice { byte_start_lo: 155, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2fa02}', to: '\u{2fa02}', mapping: Mapped(StringTableSlice { byte_start_lo: 159, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2fa03}', to: '\u{2fa03}', mapping: Mapped(StringTableSlice { byte_start_lo: 162, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2fa04}', to: '\u{2fa04}', mapping: Mapped(StringTableSlice { byte_start_lo: 165, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2fa05}', to: '\u{2fa05}', mapping: Mapped(StringTableSlice { byte_start_lo: 168, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2fa06}', to: '\u{2fa06}', mapping: Mapped(StringTableSlice { byte_start_lo: 171, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2fa07}', to: '\u{2fa07}', mapping: Mapped(StringTableSlice { byte_start_lo: 174, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2fa08}', to: '\u{2fa08}', mapping: Mapped(StringTableSlice { byte_start_lo: 177, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2fa09}', to: '\u{2fa09}', mapping: Mapped(StringTableSlice { byte_start_lo: 180, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2fa0a}', to: '\u{2fa0a}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 30, byte_len: 3 }) }, ++ Range { from: '\u{2fa0b}', to: '\u{2fa0b}', mapping: Mapped(StringTableSlice { byte_start_lo: 184, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2fa0c}', to: '\u{2fa0c}', mapping: Mapped(StringTableSlice { byte_start_lo: 187, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2fa0d}', to: '\u{2fa0d}', mapping: Mapped(StringTableSlice { byte_start_lo: 190, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2fa0e}', to: '\u{2fa0e}', mapping: Mapped(StringTableSlice { byte_start_lo: 193, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2fa0f}', to: '\u{2fa0f}', mapping: Mapped(StringTableSlice { byte_start_lo: 196, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2fa10}', to: '\u{2fa10}', mapping: Mapped(StringTableSlice { byte_start_lo: 199, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2fa11}', to: '\u{2fa11}', mapping: Mapped(StringTableSlice { byte_start_lo: 203, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2fa12}', to: '\u{2fa12}', mapping: Mapped(StringTableSlice { byte_start_lo: 206, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2fa13}', to: '\u{2fa13}', mapping: Mapped(StringTableSlice { byte_start_lo: 210, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2fa14}', to: '\u{2fa14}', mapping: Mapped(StringTableSlice { byte_start_lo: 214, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2fa15}', to: '\u{2fa15}', mapping: Mapped(StringTableSlice { byte_start_lo: 216, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fa16}', to: '\u{2fa16}', mapping: Mapped(StringTableSlice { byte_start_lo: 218, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2fa17}', to: '\u{2fa17}', mapping: Mapped(StringTableSlice { byte_start_lo: 228, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fa18}', to: '\u{2fa18}', mapping: Mapped(StringTableSlice { byte_start_lo: 221, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2fa19}', to: '\u{2fa19}', mapping: Mapped(StringTableSlice { byte_start_lo: 224, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2fa1a}', to: '\u{2fa1a}', mapping: Mapped(StringTableSlice { byte_start_lo: 227, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2fa1b}', to: '\u{2fa1b}', mapping: Mapped(StringTableSlice { byte_start_lo: 230, byte_start_hi: 48, byte_len: 3 }) }, ++ Range { from: '\u{2fa1c}', to: '\u{2fa1c}', mapping: Mapped(StringTableSlice { byte_start_lo: 243, byte_start_hi: 13, byte_len: 3 }) }, ++ Range { from: '\u{2fa1d}', to: '\u{2fa1d}', mapping: Mapped(StringTableSlice { byte_start_lo: 233, byte_start_hi: 48, byte_len: 4 }) }, ++ Range { from: '\u{2fa1e}', to: '\u{e00ff}', mapping: Disallowed }, ++ Range { from: '\u{e0100}', to: '\u{e01ef}', mapping: Ignored }, ++ Range { from: '\u{e01f0}', to: '\u{10ffff}', mapping: Disallowed }, ++]; ++ ++static STRING_TABLE: &'static str = "\u{61}\ ++ \u{62}\ ++ \u{63}\ ++ \u{64}\ ++ \u{65}\ ++ \u{66}\ ++ \u{67}\ ++ \u{68}\ ++ \u{69}\ ++ \u{6a}\ ++ \u{6b}\ ++ \u{6c}\ ++ \u{6d}\ ++ \u{6e}\ ++ \u{6f}\ ++ \u{70}\ ++ \u{71}\ ++ \u{72}\ ++ \u{73}\ ++ \u{74}\ ++ \u{75}\ ++ \u{76}\ ++ \u{77}\ ++ \u{78}\ ++ \u{79}\ ++ \u{7a}\ ++ \u{20}\ ++ \u{20}\ ++ \u{308}\ ++ \u{20}\ ++ \u{304}\ ++ \u{32}\ ++ \u{33}\ ++ \u{20}\ ++ \u{301}\ ++ \u{3bc}\ ++ \u{20}\ ++ \u{327}\ ++ \u{31}\ ++ \u{31}\ ++ \u{2044}\ ++ \u{34}\ ++ \u{31}\ ++ \u{2044}\ ++ \u{32}\ ++ \u{33}\ ++ \u{2044}\ ++ \u{34}\ ++ \u{e0}\ ++ \u{e1}\ ++ \u{e2}\ ++ \u{e3}\ ++ \u{e4}\ ++ \u{e5}\ ++ \u{e6}\ ++ \u{e7}\ ++ \u{e8}\ ++ \u{e9}\ ++ \u{ea}\ ++ \u{eb}\ ++ \u{ec}\ ++ \u{ed}\ ++ \u{ee}\ ++ \u{ef}\ ++ \u{f0}\ ++ \u{f1}\ ++ \u{f2}\ ++ \u{f3}\ ++ \u{f4}\ ++ \u{f5}\ ++ \u{f6}\ ++ \u{f8}\ ++ \u{f9}\ ++ \u{fa}\ ++ \u{fb}\ ++ \u{fc}\ ++ \u{fd}\ ++ \u{fe}\ ++ \u{73}\ ++ \u{73}\ ++ \u{101}\ ++ \u{103}\ ++ \u{105}\ ++ \u{107}\ ++ \u{109}\ ++ \u{10b}\ ++ \u{10d}\ ++ \u{10f}\ ++ \u{111}\ ++ \u{113}\ ++ \u{115}\ ++ \u{117}\ ++ \u{119}\ ++ \u{11b}\ ++ \u{11d}\ ++ \u{11f}\ ++ \u{121}\ ++ \u{123}\ ++ \u{125}\ ++ \u{127}\ ++ \u{129}\ ++ \u{12b}\ ++ \u{12d}\ ++ \u{12f}\ ++ \u{69}\ ++ \u{307}\ ++ \u{69}\ ++ \u{6a}\ ++ \u{135}\ ++ \u{137}\ ++ \u{13a}\ ++ \u{13c}\ ++ \u{13e}\ ++ \u{6c}\ ++ \u{b7}\ ++ \u{142}\ ++ \u{144}\ ++ \u{146}\ ++ \u{148}\ ++ \u{2bc}\ ++ \u{6e}\ ++ \u{14b}\ ++ \u{14d}\ ++ \u{14f}\ ++ \u{151}\ ++ \u{153}\ ++ \u{155}\ ++ \u{157}\ ++ \u{159}\ ++ \u{15b}\ ++ \u{15d}\ ++ \u{15f}\ ++ \u{161}\ ++ \u{163}\ ++ \u{165}\ ++ \u{167}\ ++ \u{169}\ ++ \u{16b}\ ++ \u{16d}\ ++ \u{16f}\ ++ \u{171}\ ++ \u{173}\ ++ \u{175}\ ++ \u{177}\ ++ \u{ff}\ ++ \u{17a}\ ++ \u{17c}\ ++ \u{17e}\ ++ \u{253}\ ++ \u{183}\ ++ \u{185}\ ++ \u{254}\ ++ \u{188}\ ++ \u{256}\ ++ \u{257}\ ++ \u{18c}\ ++ \u{1dd}\ ++ \u{259}\ ++ \u{25b}\ ++ \u{192}\ ++ \u{260}\ ++ \u{263}\ ++ \u{269}\ ++ \u{268}\ ++ \u{199}\ ++ \u{26f}\ ++ \u{272}\ ++ \u{275}\ ++ \u{1a1}\ ++ \u{1a3}\ ++ \u{1a5}\ ++ \u{280}\ ++ \u{1a8}\ ++ \u{283}\ ++ \u{1ad}\ ++ \u{288}\ ++ \u{1b0}\ ++ \u{28a}\ ++ \u{28b}\ ++ \u{1b4}\ ++ \u{1b6}\ ++ \u{292}\ ++ \u{1b9}\ ++ \u{1bd}\ ++ \u{64}\ ++ \u{17e}\ ++ \u{6c}\ ++ \u{6a}\ ++ \u{6e}\ ++ \u{6a}\ ++ \u{1ce}\ ++ \u{1d0}\ ++ \u{1d2}\ ++ \u{1d4}\ ++ \u{1d6}\ ++ \u{1d8}\ ++ \u{1da}\ ++ \u{1dc}\ ++ \u{1df}\ ++ \u{1e1}\ ++ \u{1e3}\ ++ \u{1e5}\ ++ \u{1e7}\ ++ \u{1e9}\ ++ \u{1eb}\ ++ \u{1ed}\ ++ \u{1ef}\ ++ \u{64}\ ++ \u{7a}\ ++ \u{1f5}\ ++ \u{195}\ ++ \u{1bf}\ ++ \u{1f9}\ ++ \u{1fb}\ ++ \u{1fd}\ ++ \u{1ff}\ ++ \u{201}\ ++ \u{203}\ ++ \u{205}\ ++ \u{207}\ ++ \u{209}\ ++ \u{20b}\ ++ \u{20d}\ ++ \u{20f}\ ++ \u{211}\ ++ \u{213}\ ++ \u{215}\ ++ \u{217}\ ++ \u{219}\ ++ \u{21b}\ ++ \u{21d}\ ++ \u{21f}\ ++ \u{19e}\ ++ \u{223}\ ++ \u{225}\ ++ \u{227}\ ++ \u{229}\ ++ \u{22b}\ ++ \u{22d}\ ++ \u{22f}\ ++ \u{231}\ ++ \u{233}\ ++ \u{2c65}\ ++ \u{23c}\ ++ \u{19a}\ ++ \u{2c66}\ ++ \u{242}\ ++ \u{180}\ ++ \u{289}\ ++ \u{28c}\ ++ \u{247}\ ++ \u{249}\ ++ \u{24b}\ ++ \u{24d}\ ++ \u{24f}\ ++ \u{266}\ ++ \u{279}\ ++ \u{27b}\ ++ \u{281}\ ++ \u{20}\ ++ \u{306}\ ++ \u{20}\ ++ \u{307}\ ++ \u{20}\ ++ \u{30a}\ ++ \u{20}\ ++ \u{328}\ ++ \u{20}\ ++ \u{303}\ ++ \u{20}\ ++ \u{30b}\ ++ \u{295}\ ++ \u{300}\ ++ \u{301}\ ++ \u{313}\ ++ \u{308}\ ++ \u{301}\ ++ \u{3b9}\ ++ \u{371}\ ++ \u{373}\ ++ \u{2b9}\ ++ \u{377}\ ++ \u{20}\ ++ \u{3b9}\ ++ \u{3b}\ ++ \u{3f3}\ ++ \u{20}\ ++ \u{308}\ ++ \u{301}\ ++ \u{3ac}\ ++ \u{b7}\ ++ \u{3ad}\ ++ \u{3ae}\ ++ \u{3af}\ ++ \u{3cc}\ ++ \u{3cd}\ ++ \u{3ce}\ ++ \u{3b1}\ ++ \u{3b2}\ ++ \u{3b3}\ ++ \u{3b4}\ ++ \u{3b5}\ ++ \u{3b6}\ ++ \u{3b7}\ ++ \u{3b8}\ ++ \u{3ba}\ ++ \u{3bb}\ ++ \u{3bd}\ ++ \u{3be}\ ++ \u{3bf}\ ++ \u{3c0}\ ++ \u{3c1}\ ++ \u{3c3}\ ++ \u{3c4}\ ++ \u{3c5}\ ++ \u{3c6}\ ++ \u{3c7}\ ++ \u{3c8}\ ++ \u{3c9}\ ++ \u{3ca}\ ++ \u{3cb}\ ++ \u{3d7}\ ++ \u{3d9}\ ++ \u{3db}\ ++ \u{3dd}\ ++ \u{3df}\ ++ \u{3e1}\ ++ \u{3e3}\ ++ \u{3e5}\ ++ \u{3e7}\ ++ \u{3e9}\ ++ \u{3eb}\ ++ \u{3ed}\ ++ \u{3ef}\ ++ \u{3f8}\ ++ \u{3fb}\ ++ \u{37b}\ ++ \u{37c}\ ++ \u{37d}\ ++ \u{450}\ ++ \u{451}\ ++ \u{452}\ ++ \u{453}\ ++ \u{454}\ ++ \u{455}\ ++ \u{456}\ ++ \u{457}\ ++ \u{458}\ ++ \u{459}\ ++ \u{45a}\ ++ \u{45b}\ ++ \u{45c}\ ++ \u{45d}\ ++ \u{45e}\ ++ \u{45f}\ ++ \u{430}\ ++ \u{431}\ ++ \u{432}\ ++ \u{433}\ ++ \u{434}\ ++ \u{435}\ ++ \u{436}\ ++ \u{437}\ ++ \u{438}\ ++ \u{439}\ ++ \u{43a}\ ++ \u{43b}\ ++ \u{43c}\ ++ \u{43d}\ ++ \u{43e}\ ++ \u{43f}\ ++ \u{440}\ ++ \u{441}\ ++ \u{442}\ ++ \u{443}\ ++ \u{444}\ ++ \u{445}\ ++ \u{446}\ ++ \u{447}\ ++ \u{448}\ ++ \u{449}\ ++ \u{44a}\ ++ \u{44b}\ ++ \u{44c}\ ++ \u{44d}\ ++ \u{44e}\ ++ \u{44f}\ ++ \u{461}\ ++ \u{463}\ ++ \u{465}\ ++ \u{467}\ ++ \u{469}\ ++ \u{46b}\ ++ \u{46d}\ ++ \u{46f}\ ++ \u{471}\ ++ \u{473}\ ++ \u{475}\ ++ \u{477}\ ++ \u{479}\ ++ \u{47b}\ ++ \u{47d}\ ++ \u{47f}\ ++ \u{481}\ ++ \u{48b}\ ++ \u{48d}\ ++ \u{48f}\ ++ \u{491}\ ++ \u{493}\ ++ \u{495}\ ++ \u{497}\ ++ \u{499}\ ++ \u{49b}\ ++ \u{49d}\ ++ \u{49f}\ ++ \u{4a1}\ ++ \u{4a3}\ ++ \u{4a5}\ ++ \u{4a7}\ ++ \u{4a9}\ ++ \u{4ab}\ ++ \u{4ad}\ ++ \u{4af}\ ++ \u{4b1}\ ++ \u{4b3}\ ++ \u{4b5}\ ++ \u{4b7}\ ++ \u{4b9}\ ++ \u{4bb}\ ++ \u{4bd}\ ++ \u{4bf}\ ++ \u{4c2}\ ++ \u{4c4}\ ++ \u{4c6}\ ++ \u{4c8}\ ++ \u{4ca}\ ++ \u{4cc}\ ++ \u{4ce}\ ++ \u{4d1}\ ++ \u{4d3}\ ++ \u{4d5}\ ++ \u{4d7}\ ++ \u{4d9}\ ++ \u{4db}\ ++ \u{4dd}\ ++ \u{4df}\ ++ \u{4e1}\ ++ \u{4e3}\ ++ \u{4e5}\ ++ \u{4e7}\ ++ \u{4e9}\ ++ \u{4eb}\ ++ \u{4ed}\ ++ \u{4ef}\ ++ \u{4f1}\ ++ \u{4f3}\ ++ \u{4f5}\ ++ \u{4f7}\ ++ \u{4f9}\ ++ \u{4fb}\ ++ \u{4fd}\ ++ \u{4ff}\ ++ \u{501}\ ++ \u{503}\ ++ \u{505}\ ++ \u{507}\ ++ \u{509}\ ++ \u{50b}\ ++ \u{50d}\ ++ \u{50f}\ ++ \u{511}\ ++ \u{513}\ ++ \u{515}\ ++ \u{517}\ ++ \u{519}\ ++ \u{51b}\ ++ \u{51d}\ ++ \u{51f}\ ++ \u{521}\ ++ \u{523}\ ++ \u{525}\ ++ \u{527}\ ++ \u{529}\ ++ \u{52b}\ ++ \u{52d}\ ++ \u{52f}\ ++ \u{561}\ ++ \u{562}\ ++ \u{563}\ ++ \u{564}\ ++ \u{565}\ ++ \u{566}\ ++ \u{567}\ ++ \u{568}\ ++ \u{569}\ ++ \u{56a}\ ++ \u{56b}\ ++ \u{56c}\ ++ \u{56d}\ ++ \u{56e}\ ++ \u{56f}\ ++ \u{570}\ ++ \u{571}\ ++ \u{572}\ ++ \u{573}\ ++ \u{574}\ ++ \u{575}\ ++ \u{576}\ ++ \u{577}\ ++ \u{578}\ ++ \u{579}\ ++ \u{57a}\ ++ \u{57b}\ ++ \u{57c}\ ++ \u{57d}\ ++ \u{57e}\ ++ \u{57f}\ ++ \u{580}\ ++ \u{581}\ ++ \u{582}\ ++ \u{583}\ ++ \u{584}\ ++ \u{585}\ ++ \u{586}\ ++ \u{565}\ ++ \u{582}\ ++ \u{627}\ ++ \u{674}\ ++ \u{648}\ ++ \u{674}\ ++ \u{6c7}\ ++ \u{674}\ ++ \u{64a}\ ++ \u{674}\ ++ \u{915}\ ++ \u{93c}\ ++ \u{916}\ ++ \u{93c}\ ++ \u{917}\ ++ \u{93c}\ ++ \u{91c}\ ++ \u{93c}\ ++ \u{921}\ ++ \u{93c}\ ++ \u{922}\ ++ \u{93c}\ ++ \u{92b}\ ++ \u{93c}\ ++ \u{92f}\ ++ \u{93c}\ ++ \u{9a1}\ ++ \u{9bc}\ ++ \u{9a2}\ ++ \u{9bc}\ ++ \u{9af}\ ++ \u{9bc}\ ++ \u{a32}\ ++ \u{a3c}\ ++ \u{a38}\ ++ \u{a3c}\ ++ \u{a16}\ ++ \u{a3c}\ ++ \u{a17}\ ++ \u{a3c}\ ++ \u{a1c}\ ++ \u{a3c}\ ++ \u{a2b}\ ++ \u{a3c}\ ++ \u{b21}\ ++ \u{b3c}\ ++ \u{b22}\ ++ \u{b3c}\ ++ \u{e4d}\ ++ \u{e32}\ ++ \u{ecd}\ ++ \u{eb2}\ ++ \u{eab}\ ++ \u{e99}\ ++ \u{eab}\ ++ \u{ea1}\ ++ \u{f0b}\ ++ \u{f42}\ ++ \u{fb7}\ ++ \u{f4c}\ ++ \u{fb7}\ ++ \u{f51}\ ++ \u{fb7}\ ++ \u{f56}\ ++ \u{fb7}\ ++ \u{f5b}\ ++ \u{fb7}\ ++ \u{f40}\ ++ \u{fb5}\ ++ \u{f71}\ ++ \u{f72}\ ++ \u{f71}\ ++ \u{f74}\ ++ \u{fb2}\ ++ \u{f80}\ ++ \u{fb2}\ ++ \u{f71}\ ++ \u{f80}\ ++ \u{fb3}\ ++ \u{f80}\ ++ \u{fb3}\ ++ \u{f71}\ ++ \u{f80}\ ++ \u{f71}\ ++ \u{f80}\ ++ \u{f92}\ ++ \u{fb7}\ ++ \u{f9c}\ ++ \u{fb7}\ ++ \u{fa1}\ ++ \u{fb7}\ ++ \u{fa6}\ ++ \u{fb7}\ ++ \u{fab}\ ++ \u{fb7}\ ++ \u{f90}\ ++ \u{fb5}\ ++ \u{2d27}\ ++ \u{2d2d}\ ++ \u{10dc}\ ++ \u{13f0}\ ++ \u{13f1}\ ++ \u{13f2}\ ++ \u{13f3}\ ++ \u{13f4}\ ++ \u{13f5}\ ++ \u{a64b}\ ++ \u{250}\ ++ \u{251}\ ++ \u{1d02}\ ++ \u{25c}\ ++ \u{1d16}\ ++ \u{1d17}\ ++ \u{1d1d}\ ++ \u{1d25}\ ++ \u{252}\ ++ \u{255}\ ++ \u{25f}\ ++ \u{261}\ ++ \u{265}\ ++ \u{26a}\ ++ \u{1d7b}\ ++ \u{29d}\ ++ \u{26d}\ ++ \u{1d85}\ ++ \u{29f}\ ++ \u{271}\ ++ \u{270}\ ++ \u{273}\ ++ \u{274}\ ++ \u{278}\ ++ \u{282}\ ++ \u{1ab}\ ++ \u{1d1c}\ ++ \u{290}\ ++ \u{291}\ ++ \u{1e01}\ ++ \u{1e03}\ ++ \u{1e05}\ ++ \u{1e07}\ ++ \u{1e09}\ ++ \u{1e0b}\ ++ \u{1e0d}\ ++ \u{1e0f}\ ++ \u{1e11}\ ++ \u{1e13}\ ++ \u{1e15}\ ++ \u{1e17}\ ++ \u{1e19}\ ++ \u{1e1b}\ ++ \u{1e1d}\ ++ \u{1e1f}\ ++ \u{1e21}\ ++ \u{1e23}\ ++ \u{1e25}\ ++ \u{1e27}\ ++ \u{1e29}\ ++ \u{1e2b}\ ++ \u{1e2d}\ ++ \u{1e2f}\ ++ \u{1e31}\ ++ \u{1e33}\ ++ \u{1e35}\ ++ \u{1e37}\ ++ \u{1e39}\ ++ \u{1e3b}\ ++ \u{1e3d}\ ++ \u{1e3f}\ ++ \u{1e41}\ ++ \u{1e43}\ ++ \u{1e45}\ ++ \u{1e47}\ ++ \u{1e49}\ ++ \u{1e4b}\ ++ \u{1e4d}\ ++ \u{1e4f}\ ++ \u{1e51}\ ++ \u{1e53}\ ++ \u{1e55}\ ++ \u{1e57}\ ++ \u{1e59}\ ++ \u{1e5b}\ ++ \u{1e5d}\ ++ \u{1e5f}\ ++ \u{1e61}\ ++ \u{1e63}\ ++ \u{1e65}\ ++ \u{1e67}\ ++ \u{1e69}\ ++ \u{1e6b}\ ++ \u{1e6d}\ ++ \u{1e6f}\ ++ \u{1e71}\ ++ \u{1e73}\ ++ \u{1e75}\ ++ \u{1e77}\ ++ \u{1e79}\ ++ \u{1e7b}\ ++ \u{1e7d}\ ++ \u{1e7f}\ ++ \u{1e81}\ ++ \u{1e83}\ ++ \u{1e85}\ ++ \u{1e87}\ ++ \u{1e89}\ ++ \u{1e8b}\ ++ \u{1e8d}\ ++ \u{1e8f}\ ++ \u{1e91}\ ++ \u{1e93}\ ++ \u{1e95}\ ++ \u{61}\ ++ \u{2be}\ ++ \u{1ea1}\ ++ \u{1ea3}\ ++ \u{1ea5}\ ++ \u{1ea7}\ ++ \u{1ea9}\ ++ \u{1eab}\ ++ \u{1ead}\ ++ \u{1eaf}\ ++ \u{1eb1}\ ++ \u{1eb3}\ ++ \u{1eb5}\ ++ \u{1eb7}\ ++ \u{1eb9}\ ++ \u{1ebb}\ ++ \u{1ebd}\ ++ \u{1ebf}\ ++ \u{1ec1}\ ++ \u{1ec3}\ ++ \u{1ec5}\ ++ \u{1ec7}\ ++ \u{1ec9}\ ++ \u{1ecb}\ ++ \u{1ecd}\ ++ \u{1ecf}\ ++ \u{1ed1}\ ++ \u{1ed3}\ ++ \u{1ed5}\ ++ \u{1ed7}\ ++ \u{1ed9}\ ++ \u{1edb}\ ++ \u{1edd}\ ++ \u{1edf}\ ++ \u{1ee1}\ ++ \u{1ee3}\ ++ \u{1ee5}\ ++ \u{1ee7}\ ++ \u{1ee9}\ ++ \u{1eeb}\ ++ \u{1eed}\ ++ \u{1eef}\ ++ \u{1ef1}\ ++ \u{1ef3}\ ++ \u{1ef5}\ ++ \u{1ef7}\ ++ \u{1ef9}\ ++ \u{1efb}\ ++ \u{1efd}\ ++ \u{1eff}\ ++ \u{1f00}\ ++ \u{1f01}\ ++ \u{1f02}\ ++ \u{1f03}\ ++ \u{1f04}\ ++ \u{1f05}\ ++ \u{1f06}\ ++ \u{1f07}\ ++ \u{1f10}\ ++ \u{1f11}\ ++ \u{1f12}\ ++ \u{1f13}\ ++ \u{1f14}\ ++ \u{1f15}\ ++ \u{1f20}\ ++ \u{1f21}\ ++ \u{1f22}\ ++ \u{1f23}\ ++ \u{1f24}\ ++ \u{1f25}\ ++ \u{1f26}\ ++ \u{1f27}\ ++ \u{1f30}\ ++ \u{1f31}\ ++ \u{1f32}\ ++ \u{1f33}\ ++ \u{1f34}\ ++ \u{1f35}\ ++ \u{1f36}\ ++ \u{1f37}\ ++ \u{1f40}\ ++ \u{1f41}\ ++ \u{1f42}\ ++ \u{1f43}\ ++ \u{1f44}\ ++ \u{1f45}\ ++ \u{1f51}\ ++ \u{1f53}\ ++ \u{1f55}\ ++ \u{1f57}\ ++ \u{1f60}\ ++ \u{1f61}\ ++ \u{1f62}\ ++ \u{1f63}\ ++ \u{1f64}\ ++ \u{1f65}\ ++ \u{1f66}\ ++ \u{1f67}\ ++ \u{1f00}\ ++ \u{3b9}\ ++ \u{1f01}\ ++ \u{3b9}\ ++ \u{1f02}\ ++ \u{3b9}\ ++ \u{1f03}\ ++ \u{3b9}\ ++ \u{1f04}\ ++ \u{3b9}\ ++ \u{1f05}\ ++ \u{3b9}\ ++ \u{1f06}\ ++ \u{3b9}\ ++ \u{1f07}\ ++ \u{3b9}\ ++ \u{1f20}\ ++ \u{3b9}\ ++ \u{1f21}\ ++ \u{3b9}\ ++ \u{1f22}\ ++ \u{3b9}\ ++ \u{1f23}\ ++ \u{3b9}\ ++ \u{1f24}\ ++ \u{3b9}\ ++ \u{1f25}\ ++ \u{3b9}\ ++ \u{1f26}\ ++ \u{3b9}\ ++ \u{1f27}\ ++ \u{3b9}\ ++ \u{1f60}\ ++ \u{3b9}\ ++ \u{1f61}\ ++ \u{3b9}\ ++ \u{1f62}\ ++ \u{3b9}\ ++ \u{1f63}\ ++ \u{3b9}\ ++ \u{1f64}\ ++ \u{3b9}\ ++ \u{1f65}\ ++ \u{3b9}\ ++ \u{1f66}\ ++ \u{3b9}\ ++ \u{1f67}\ ++ \u{3b9}\ ++ \u{1f70}\ ++ \u{3b9}\ ++ \u{3b1}\ ++ \u{3b9}\ ++ \u{3ac}\ ++ \u{3b9}\ ++ \u{1fb6}\ ++ \u{3b9}\ ++ \u{1fb0}\ ++ \u{1fb1}\ ++ \u{1f70}\ ++ \u{20}\ ++ \u{313}\ ++ \u{20}\ ++ \u{342}\ ++ \u{20}\ ++ \u{308}\ ++ \u{342}\ ++ \u{1f74}\ ++ \u{3b9}\ ++ \u{3b7}\ ++ \u{3b9}\ ++ \u{3ae}\ ++ \u{3b9}\ ++ \u{1fc6}\ ++ \u{3b9}\ ++ \u{1f72}\ ++ \u{1f74}\ ++ \u{20}\ ++ \u{313}\ ++ \u{300}\ ++ \u{20}\ ++ \u{313}\ ++ \u{301}\ ++ \u{20}\ ++ \u{313}\ ++ \u{342}\ ++ \u{390}\ ++ \u{1fd0}\ ++ \u{1fd1}\ ++ \u{1f76}\ ++ \u{20}\ ++ \u{314}\ ++ \u{300}\ ++ \u{20}\ ++ \u{314}\ ++ \u{301}\ ++ \u{20}\ ++ \u{314}\ ++ \u{342}\ ++ \u{3b0}\ ++ \u{1fe0}\ ++ \u{1fe1}\ ++ \u{1f7a}\ ++ \u{1fe5}\ ++ \u{20}\ ++ \u{308}\ ++ \u{300}\ ++ \u{60}\ ++ \u{1f7c}\ ++ \u{3b9}\ ++ \u{3c9}\ ++ \u{3b9}\ ++ \u{3ce}\ ++ \u{3b9}\ ++ \u{1ff6}\ ++ \u{3b9}\ ++ \u{1f78}\ ++ \u{1f7c}\ ++ \u{20}\ ++ \u{314}\ ++ \u{2010}\ ++ \u{20}\ ++ \u{333}\ ++ \u{2032}\ ++ \u{2032}\ ++ \u{2032}\ ++ \u{2032}\ ++ \u{2032}\ ++ \u{2035}\ ++ \u{2035}\ ++ \u{2035}\ ++ \u{2035}\ ++ \u{2035}\ ++ \u{21}\ ++ \u{21}\ ++ \u{20}\ ++ \u{305}\ ++ \u{3f}\ ++ \u{3f}\ ++ \u{3f}\ ++ \u{21}\ ++ \u{21}\ ++ \u{3f}\ ++ \u{2032}\ ++ \u{2032}\ ++ \u{2032}\ ++ \u{2032}\ ++ \u{30}\ ++ \u{34}\ ++ \u{35}\ ++ \u{36}\ ++ \u{37}\ ++ \u{38}\ ++ \u{39}\ ++ \u{2b}\ ++ \u{2212}\ ++ \u{3d}\ ++ \u{28}\ ++ \u{29}\ ++ \u{72}\ ++ \u{73}\ ++ \u{61}\ ++ \u{2f}\ ++ \u{63}\ ++ \u{61}\ ++ \u{2f}\ ++ \u{73}\ ++ \u{b0}\ ++ \u{63}\ ++ \u{63}\ ++ \u{2f}\ ++ \u{6f}\ ++ \u{63}\ ++ \u{2f}\ ++ \u{75}\ ++ \u{b0}\ ++ \u{66}\ ++ \u{6e}\ ++ \u{6f}\ ++ \u{73}\ ++ \u{6d}\ ++ \u{74}\ ++ \u{65}\ ++ \u{6c}\ ++ \u{74}\ ++ \u{6d}\ ++ \u{5d0}\ ++ \u{5d1}\ ++ \u{5d2}\ ++ \u{5d3}\ ++ \u{66}\ ++ \u{61}\ ++ \u{78}\ ++ \u{2211}\ ++ \u{31}\ ++ \u{2044}\ ++ \u{37}\ ++ \u{31}\ ++ \u{2044}\ ++ \u{39}\ ++ \u{31}\ ++ \u{2044}\ ++ \u{31}\ ++ \u{30}\ ++ \u{31}\ ++ \u{2044}\ ++ \u{33}\ ++ \u{32}\ ++ \u{2044}\ ++ \u{33}\ ++ \u{31}\ ++ \u{2044}\ ++ \u{35}\ ++ \u{32}\ ++ \u{2044}\ ++ \u{35}\ ++ \u{33}\ ++ \u{2044}\ ++ \u{35}\ ++ \u{34}\ ++ \u{2044}\ ++ \u{35}\ ++ \u{31}\ ++ \u{2044}\ ++ \u{36}\ ++ \u{35}\ ++ \u{2044}\ ++ \u{36}\ ++ \u{31}\ ++ \u{2044}\ ++ \u{38}\ ++ \u{33}\ ++ \u{2044}\ ++ \u{38}\ ++ \u{35}\ ++ \u{2044}\ ++ \u{38}\ ++ \u{37}\ ++ \u{2044}\ ++ \u{38}\ ++ \u{31}\ ++ \u{2044}\ ++ \u{69}\ ++ \u{69}\ ++ \u{69}\ ++ \u{69}\ ++ \u{69}\ ++ \u{69}\ ++ \u{76}\ ++ \u{76}\ ++ \u{69}\ ++ \u{76}\ ++ \u{69}\ ++ \u{69}\ ++ \u{76}\ ++ \u{69}\ ++ \u{69}\ ++ \u{69}\ ++ \u{69}\ ++ \u{78}\ ++ \u{78}\ ++ \u{69}\ ++ \u{78}\ ++ \u{69}\ ++ \u{69}\ ++ \u{30}\ ++ \u{2044}\ ++ \u{33}\ ++ \u{222b}\ ++ \u{222b}\ ++ \u{222b}\ ++ \u{222b}\ ++ \u{222b}\ ++ \u{222e}\ ++ \u{222e}\ ++ \u{222e}\ ++ \u{222e}\ ++ \u{222e}\ ++ \u{3008}\ ++ \u{3009}\ ++ \u{31}\ ++ \u{30}\ ++ \u{31}\ ++ \u{31}\ ++ \u{31}\ ++ \u{32}\ ++ \u{31}\ ++ \u{33}\ ++ \u{31}\ ++ \u{34}\ ++ \u{31}\ ++ \u{35}\ ++ \u{31}\ ++ \u{36}\ ++ \u{31}\ ++ \u{37}\ ++ \u{31}\ ++ \u{38}\ ++ \u{31}\ ++ \u{39}\ ++ \u{32}\ ++ \u{30}\ ++ \u{28}\ ++ \u{31}\ ++ \u{29}\ ++ \u{28}\ ++ \u{32}\ ++ \u{29}\ ++ \u{28}\ ++ \u{33}\ ++ \u{29}\ ++ \u{28}\ ++ \u{34}\ ++ \u{29}\ ++ \u{28}\ ++ \u{35}\ ++ \u{29}\ ++ \u{28}\ ++ \u{36}\ ++ \u{29}\ ++ \u{28}\ ++ \u{37}\ ++ \u{29}\ ++ \u{28}\ ++ \u{38}\ ++ \u{29}\ ++ \u{28}\ ++ \u{39}\ ++ \u{29}\ ++ \u{28}\ ++ \u{31}\ ++ \u{30}\ ++ \u{29}\ ++ \u{28}\ ++ \u{31}\ ++ \u{31}\ ++ \u{29}\ ++ \u{28}\ ++ \u{31}\ ++ \u{32}\ ++ \u{29}\ ++ \u{28}\ ++ \u{31}\ ++ \u{33}\ ++ \u{29}\ ++ \u{28}\ ++ \u{31}\ ++ \u{34}\ ++ \u{29}\ ++ \u{28}\ ++ \u{31}\ ++ \u{35}\ ++ \u{29}\ ++ \u{28}\ ++ \u{31}\ ++ \u{36}\ ++ \u{29}\ ++ \u{28}\ ++ \u{31}\ ++ \u{37}\ ++ \u{29}\ ++ \u{28}\ ++ \u{31}\ ++ \u{38}\ ++ \u{29}\ ++ \u{28}\ ++ \u{31}\ ++ \u{39}\ ++ \u{29}\ ++ \u{28}\ ++ \u{32}\ ++ \u{30}\ ++ \u{29}\ ++ \u{28}\ ++ \u{61}\ ++ \u{29}\ ++ \u{28}\ ++ \u{62}\ ++ \u{29}\ ++ \u{28}\ ++ \u{63}\ ++ \u{29}\ ++ \u{28}\ ++ \u{64}\ ++ \u{29}\ ++ \u{28}\ ++ \u{65}\ ++ \u{29}\ ++ \u{28}\ ++ \u{66}\ ++ \u{29}\ ++ \u{28}\ ++ \u{67}\ ++ \u{29}\ ++ \u{28}\ ++ \u{68}\ ++ \u{29}\ ++ \u{28}\ ++ \u{69}\ ++ \u{29}\ ++ \u{28}\ ++ \u{6a}\ ++ \u{29}\ ++ \u{28}\ ++ \u{6b}\ ++ \u{29}\ ++ \u{28}\ ++ \u{6c}\ ++ \u{29}\ ++ \u{28}\ ++ \u{6d}\ ++ \u{29}\ ++ \u{28}\ ++ \u{6e}\ ++ \u{29}\ ++ \u{28}\ ++ \u{6f}\ ++ \u{29}\ ++ \u{28}\ ++ \u{70}\ ++ \u{29}\ ++ \u{28}\ ++ \u{71}\ ++ \u{29}\ ++ \u{28}\ ++ \u{72}\ ++ \u{29}\ ++ \u{28}\ ++ \u{73}\ ++ \u{29}\ ++ \u{28}\ ++ \u{74}\ ++ \u{29}\ ++ \u{28}\ ++ \u{75}\ ++ \u{29}\ ++ \u{28}\ ++ \u{76}\ ++ \u{29}\ ++ \u{28}\ ++ \u{77}\ ++ \u{29}\ ++ \u{28}\ ++ \u{78}\ ++ \u{29}\ ++ \u{28}\ ++ \u{79}\ ++ \u{29}\ ++ \u{28}\ ++ \u{7a}\ ++ \u{29}\ ++ \u{222b}\ ++ \u{222b}\ ++ \u{222b}\ ++ \u{222b}\ ++ \u{3a}\ ++ \u{3a}\ ++ \u{3d}\ ++ \u{3d}\ ++ \u{3d}\ ++ \u{3d}\ ++ \u{3d}\ ++ \u{3d}\ ++ \u{2add}\ ++ \u{338}\ ++ \u{2c30}\ ++ \u{2c31}\ ++ \u{2c32}\ ++ \u{2c33}\ ++ \u{2c34}\ ++ \u{2c35}\ ++ \u{2c36}\ ++ \u{2c37}\ ++ \u{2c38}\ ++ \u{2c39}\ ++ \u{2c3a}\ ++ \u{2c3b}\ ++ \u{2c3c}\ ++ \u{2c3d}\ ++ \u{2c3e}\ ++ \u{2c3f}\ ++ \u{2c40}\ ++ \u{2c41}\ ++ \u{2c42}\ ++ \u{2c43}\ ++ \u{2c44}\ ++ \u{2c45}\ ++ \u{2c46}\ ++ \u{2c47}\ ++ \u{2c48}\ ++ \u{2c49}\ ++ \u{2c4a}\ ++ \u{2c4b}\ ++ \u{2c4c}\ ++ \u{2c4d}\ ++ \u{2c4e}\ ++ \u{2c4f}\ ++ \u{2c50}\ ++ \u{2c51}\ ++ \u{2c52}\ ++ \u{2c53}\ ++ \u{2c54}\ ++ \u{2c55}\ ++ \u{2c56}\ ++ \u{2c57}\ ++ \u{2c58}\ ++ \u{2c59}\ ++ \u{2c5a}\ ++ \u{2c5b}\ ++ \u{2c5c}\ ++ \u{2c5d}\ ++ \u{2c5e}\ ++ \u{2c61}\ ++ \u{26b}\ ++ \u{1d7d}\ ++ \u{27d}\ ++ \u{2c68}\ ++ \u{2c6a}\ ++ \u{2c6c}\ ++ \u{2c73}\ ++ \u{2c76}\ ++ \u{23f}\ ++ \u{240}\ ++ \u{2c81}\ ++ \u{2c83}\ ++ \u{2c85}\ ++ \u{2c87}\ ++ \u{2c89}\ ++ \u{2c8b}\ ++ \u{2c8d}\ ++ \u{2c8f}\ ++ \u{2c91}\ ++ \u{2c93}\ ++ \u{2c95}\ ++ \u{2c97}\ ++ \u{2c99}\ ++ \u{2c9b}\ ++ \u{2c9d}\ ++ \u{2c9f}\ ++ \u{2ca1}\ ++ \u{2ca3}\ ++ \u{2ca5}\ ++ \u{2ca7}\ ++ \u{2ca9}\ ++ \u{2cab}\ ++ \u{2cad}\ ++ \u{2caf}\ ++ \u{2cb1}\ ++ \u{2cb3}\ ++ \u{2cb5}\ ++ \u{2cb7}\ ++ \u{2cb9}\ ++ \u{2cbb}\ ++ \u{2cbd}\ ++ \u{2cbf}\ ++ \u{2cc1}\ ++ \u{2cc3}\ ++ \u{2cc5}\ ++ \u{2cc7}\ ++ \u{2cc9}\ ++ \u{2ccb}\ ++ \u{2ccd}\ ++ \u{2ccf}\ ++ \u{2cd1}\ ++ \u{2cd3}\ ++ \u{2cd5}\ ++ \u{2cd7}\ ++ \u{2cd9}\ ++ \u{2cdb}\ ++ \u{2cdd}\ ++ \u{2cdf}\ ++ \u{2ce1}\ ++ \u{2ce3}\ ++ \u{2cec}\ ++ \u{2cee}\ ++ \u{2cf3}\ ++ \u{2d61}\ ++ \u{6bcd}\ ++ \u{9f9f}\ ++ \u{4e00}\ ++ \u{4e28}\ ++ \u{4e36}\ ++ \u{4e3f}\ ++ \u{4e59}\ ++ \u{4e85}\ ++ \u{4e8c}\ ++ \u{4ea0}\ ++ \u{4eba}\ ++ \u{513f}\ ++ \u{5165}\ ++ \u{516b}\ ++ \u{5182}\ ++ \u{5196}\ ++ \u{51ab}\ ++ \u{51e0}\ ++ \u{51f5}\ ++ \u{5200}\ ++ \u{529b}\ ++ \u{52f9}\ ++ \u{5315}\ ++ \u{531a}\ ++ \u{5338}\ ++ \u{5341}\ ++ \u{535c}\ ++ \u{5369}\ ++ \u{5382}\ ++ \u{53b6}\ ++ \u{53c8}\ ++ \u{53e3}\ ++ \u{56d7}\ ++ \u{571f}\ ++ \u{58eb}\ ++ \u{5902}\ ++ \u{590a}\ ++ \u{5915}\ ++ \u{5927}\ ++ \u{5973}\ ++ \u{5b50}\ ++ \u{5b80}\ ++ \u{5bf8}\ ++ \u{5c0f}\ ++ \u{5c22}\ ++ \u{5c38}\ ++ \u{5c6e}\ ++ \u{5c71}\ ++ \u{5ddb}\ ++ \u{5de5}\ ++ \u{5df1}\ ++ \u{5dfe}\ ++ \u{5e72}\ ++ \u{5e7a}\ ++ \u{5e7f}\ ++ \u{5ef4}\ ++ \u{5efe}\ ++ \u{5f0b}\ ++ \u{5f13}\ ++ \u{5f50}\ ++ \u{5f61}\ ++ \u{5f73}\ ++ \u{5fc3}\ ++ \u{6208}\ ++ \u{6236}\ ++ \u{624b}\ ++ \u{652f}\ ++ \u{6534}\ ++ \u{6587}\ ++ \u{6597}\ ++ \u{65a4}\ ++ \u{65b9}\ ++ \u{65e0}\ ++ \u{65e5}\ ++ \u{66f0}\ ++ \u{6708}\ ++ \u{6728}\ ++ \u{6b20}\ ++ \u{6b62}\ ++ \u{6b79}\ ++ \u{6bb3}\ ++ \u{6bcb}\ ++ \u{6bd4}\ ++ \u{6bdb}\ ++ \u{6c0f}\ ++ \u{6c14}\ ++ \u{6c34}\ ++ \u{706b}\ ++ \u{722a}\ ++ \u{7236}\ ++ \u{723b}\ ++ \u{723f}\ ++ \u{7247}\ ++ \u{7259}\ ++ \u{725b}\ ++ \u{72ac}\ ++ \u{7384}\ ++ \u{7389}\ ++ \u{74dc}\ ++ \u{74e6}\ ++ \u{7518}\ ++ \u{751f}\ ++ \u{7528}\ ++ \u{7530}\ ++ \u{758b}\ ++ \u{7592}\ ++ \u{7676}\ ++ \u{767d}\ ++ \u{76ae}\ ++ \u{76bf}\ ++ \u{76ee}\ ++ \u{77db}\ ++ \u{77e2}\ ++ \u{77f3}\ ++ \u{793a}\ ++ \u{79b8}\ ++ \u{79be}\ ++ \u{7a74}\ ++ \u{7acb}\ ++ \u{7af9}\ ++ \u{7c73}\ ++ \u{7cf8}\ ++ \u{7f36}\ ++ \u{7f51}\ ++ \u{7f8a}\ ++ \u{7fbd}\ ++ \u{8001}\ ++ \u{800c}\ ++ \u{8012}\ ++ \u{8033}\ ++ \u{807f}\ ++ \u{8089}\ ++ \u{81e3}\ ++ \u{81ea}\ ++ \u{81f3}\ ++ \u{81fc}\ ++ \u{820c}\ ++ \u{821b}\ ++ \u{821f}\ ++ \u{826e}\ ++ \u{8272}\ ++ \u{8278}\ ++ \u{864d}\ ++ \u{866b}\ ++ \u{8840}\ ++ \u{884c}\ ++ \u{8863}\ ++ \u{897e}\ ++ \u{898b}\ ++ \u{89d2}\ ++ \u{8a00}\ ++ \u{8c37}\ ++ \u{8c46}\ ++ \u{8c55}\ ++ \u{8c78}\ ++ \u{8c9d}\ ++ \u{8d64}\ ++ \u{8d70}\ ++ \u{8db3}\ ++ \u{8eab}\ ++ \u{8eca}\ ++ \u{8f9b}\ ++ \u{8fb0}\ ++ \u{8fb5}\ ++ \u{9091}\ ++ \u{9149}\ ++ \u{91c6}\ ++ \u{91cc}\ ++ \u{91d1}\ ++ \u{9577}\ ++ \u{9580}\ ++ \u{961c}\ ++ \u{96b6}\ ++ \u{96b9}\ ++ \u{96e8}\ ++ \u{9751}\ ++ \u{975e}\ ++ \u{9762}\ ++ \u{9769}\ ++ \u{97cb}\ ++ \u{97ed}\ ++ \u{97f3}\ ++ \u{9801}\ ++ \u{98a8}\ ++ \u{98db}\ ++ \u{98df}\ ++ \u{9996}\ ++ \u{9999}\ ++ \u{99ac}\ ++ \u{9aa8}\ ++ \u{9ad8}\ ++ \u{9adf}\ ++ \u{9b25}\ ++ \u{9b2f}\ ++ \u{9b32}\ ++ \u{9b3c}\ ++ \u{9b5a}\ ++ \u{9ce5}\ ++ \u{9e75}\ ++ \u{9e7f}\ ++ \u{9ea5}\ ++ \u{9ebb}\ ++ \u{9ec3}\ ++ \u{9ecd}\ ++ \u{9ed1}\ ++ \u{9ef9}\ ++ \u{9efd}\ ++ \u{9f0e}\ ++ \u{9f13}\ ++ \u{9f20}\ ++ \u{9f3b}\ ++ \u{9f4a}\ ++ \u{9f52}\ ++ \u{9f8d}\ ++ \u{9f9c}\ ++ \u{9fa0}\ ++ \u{2e}\ ++ \u{3012}\ ++ \u{5344}\ ++ \u{5345}\ ++ \u{20}\ ++ \u{3099}\ ++ \u{20}\ ++ \u{309a}\ ++ \u{3088}\ ++ \u{308a}\ ++ \u{30b3}\ ++ \u{30c8}\ ++ \u{1100}\ ++ \u{1101}\ ++ \u{11aa}\ ++ \u{1102}\ ++ \u{11ac}\ ++ \u{11ad}\ ++ \u{1103}\ ++ \u{1104}\ ++ \u{1105}\ ++ \u{11b0}\ ++ \u{11b1}\ ++ \u{11b2}\ ++ \u{11b3}\ ++ \u{11b4}\ ++ \u{11b5}\ ++ \u{111a}\ ++ \u{1106}\ ++ \u{1107}\ ++ \u{1108}\ ++ \u{1121}\ ++ \u{1109}\ ++ \u{110a}\ ++ \u{110b}\ ++ \u{110c}\ ++ \u{110d}\ ++ \u{110e}\ ++ \u{110f}\ ++ \u{1110}\ ++ \u{1111}\ ++ \u{1112}\ ++ \u{1161}\ ++ \u{1162}\ ++ \u{1163}\ ++ \u{1164}\ ++ \u{1165}\ ++ \u{1166}\ ++ \u{1167}\ ++ \u{1168}\ ++ \u{1169}\ ++ \u{116a}\ ++ \u{116b}\ ++ \u{116c}\ ++ \u{116d}\ ++ \u{116e}\ ++ \u{116f}\ ++ \u{1170}\ ++ \u{1171}\ ++ \u{1172}\ ++ \u{1173}\ ++ \u{1174}\ ++ \u{1175}\ ++ \u{1114}\ ++ \u{1115}\ ++ \u{11c7}\ ++ \u{11c8}\ ++ \u{11cc}\ ++ \u{11ce}\ ++ \u{11d3}\ ++ \u{11d7}\ ++ \u{11d9}\ ++ \u{111c}\ ++ \u{11dd}\ ++ \u{11df}\ ++ \u{111d}\ ++ \u{111e}\ ++ \u{1120}\ ++ \u{1122}\ ++ \u{1123}\ ++ \u{1127}\ ++ \u{1129}\ ++ \u{112b}\ ++ \u{112c}\ ++ \u{112d}\ ++ \u{112e}\ ++ \u{112f}\ ++ \u{1132}\ ++ \u{1136}\ ++ \u{1140}\ ++ \u{1147}\ ++ \u{114c}\ ++ \u{11f1}\ ++ \u{11f2}\ ++ \u{1157}\ ++ \u{1158}\ ++ \u{1159}\ ++ \u{1184}\ ++ \u{1185}\ ++ \u{1188}\ ++ \u{1191}\ ++ \u{1192}\ ++ \u{1194}\ ++ \u{119e}\ ++ \u{11a1}\ ++ \u{4e09}\ ++ \u{56db}\ ++ \u{4e0a}\ ++ \u{4e2d}\ ++ \u{4e0b}\ ++ \u{7532}\ ++ \u{4e19}\ ++ \u{4e01}\ ++ \u{5929}\ ++ \u{5730}\ ++ \u{28}\ ++ \u{1100}\ ++ \u{29}\ ++ \u{28}\ ++ \u{1102}\ ++ \u{29}\ ++ \u{28}\ ++ \u{1103}\ ++ \u{29}\ ++ \u{28}\ ++ \u{1105}\ ++ \u{29}\ ++ \u{28}\ ++ \u{1106}\ ++ \u{29}\ ++ \u{28}\ ++ \u{1107}\ ++ \u{29}\ ++ \u{28}\ ++ \u{1109}\ ++ \u{29}\ ++ \u{28}\ ++ \u{110b}\ ++ \u{29}\ ++ \u{28}\ ++ \u{110c}\ ++ \u{29}\ ++ \u{28}\ ++ \u{110e}\ ++ \u{29}\ ++ \u{28}\ ++ \u{110f}\ ++ \u{29}\ ++ \u{28}\ ++ \u{1110}\ ++ \u{29}\ ++ \u{28}\ ++ \u{1111}\ ++ \u{29}\ ++ \u{28}\ ++ \u{1112}\ ++ \u{29}\ ++ \u{28}\ ++ \u{ac00}\ ++ \u{29}\ ++ \u{28}\ ++ \u{b098}\ ++ \u{29}\ ++ \u{28}\ ++ \u{b2e4}\ ++ \u{29}\ ++ \u{28}\ ++ \u{b77c}\ ++ \u{29}\ ++ \u{28}\ ++ \u{b9c8}\ ++ \u{29}\ ++ \u{28}\ ++ \u{bc14}\ ++ \u{29}\ ++ \u{28}\ ++ \u{c0ac}\ ++ \u{29}\ ++ \u{28}\ ++ \u{c544}\ ++ \u{29}\ ++ \u{28}\ ++ \u{c790}\ ++ \u{29}\ ++ \u{28}\ ++ \u{cc28}\ ++ \u{29}\ ++ \u{28}\ ++ \u{ce74}\ ++ \u{29}\ ++ \u{28}\ ++ \u{d0c0}\ ++ \u{29}\ ++ \u{28}\ ++ \u{d30c}\ ++ \u{29}\ ++ \u{28}\ ++ \u{d558}\ ++ \u{29}\ ++ \u{28}\ ++ \u{c8fc}\ ++ \u{29}\ ++ \u{28}\ ++ \u{c624}\ ++ \u{c804}\ ++ \u{29}\ ++ \u{28}\ ++ \u{c624}\ ++ \u{d6c4}\ ++ \u{29}\ ++ \u{28}\ ++ \u{4e00}\ ++ \u{29}\ ++ \u{28}\ ++ \u{4e8c}\ ++ \u{29}\ ++ \u{28}\ ++ \u{4e09}\ ++ \u{29}\ ++ \u{28}\ ++ \u{56db}\ ++ \u{29}\ ++ \u{28}\ ++ \u{4e94}\ ++ \u{29}\ ++ \u{28}\ ++ \u{516d}\ ++ \u{29}\ ++ \u{28}\ ++ \u{4e03}\ ++ \u{29}\ ++ \u{28}\ ++ \u{516b}\ ++ \u{29}\ ++ \u{28}\ ++ \u{4e5d}\ ++ \u{29}\ ++ \u{28}\ ++ \u{5341}\ ++ \u{29}\ ++ \u{28}\ ++ \u{6708}\ ++ \u{29}\ ++ \u{28}\ ++ \u{706b}\ ++ \u{29}\ ++ \u{28}\ ++ \u{6c34}\ ++ \u{29}\ ++ \u{28}\ ++ \u{6728}\ ++ \u{29}\ ++ \u{28}\ ++ \u{91d1}\ ++ \u{29}\ ++ \u{28}\ ++ \u{571f}\ ++ \u{29}\ ++ \u{28}\ ++ \u{65e5}\ ++ \u{29}\ ++ \u{28}\ ++ \u{682a}\ ++ \u{29}\ ++ \u{28}\ ++ \u{6709}\ ++ \u{29}\ ++ \u{28}\ ++ \u{793e}\ ++ \u{29}\ ++ \u{28}\ ++ \u{540d}\ ++ \u{29}\ ++ \u{28}\ ++ \u{7279}\ ++ \u{29}\ ++ \u{28}\ ++ \u{8ca1}\ ++ \u{29}\ ++ \u{28}\ ++ \u{795d}\ ++ \u{29}\ ++ \u{28}\ ++ \u{52b4}\ ++ \u{29}\ ++ \u{28}\ ++ \u{4ee3}\ ++ \u{29}\ ++ \u{28}\ ++ \u{547c}\ ++ \u{29}\ ++ \u{28}\ ++ \u{5b66}\ ++ \u{29}\ ++ \u{28}\ ++ \u{76e3}\ ++ \u{29}\ ++ \u{28}\ ++ \u{4f01}\ ++ \u{29}\ ++ \u{28}\ ++ \u{8cc7}\ ++ \u{29}\ ++ \u{28}\ ++ \u{5354}\ ++ \u{29}\ ++ \u{28}\ ++ \u{796d}\ ++ \u{29}\ ++ \u{28}\ ++ \u{4f11}\ ++ \u{29}\ ++ \u{28}\ ++ \u{81ea}\ ++ \u{29}\ ++ \u{28}\ ++ \u{81f3}\ ++ \u{29}\ ++ \u{554f}\ ++ \u{5e7c}\ ++ \u{7b8f}\ ++ \u{70}\ ++ \u{74}\ ++ \u{65}\ ++ \u{32}\ ++ \u{31}\ ++ \u{32}\ ++ \u{32}\ ++ \u{32}\ ++ \u{33}\ ++ \u{32}\ ++ \u{34}\ ++ \u{32}\ ++ \u{35}\ ++ \u{32}\ ++ \u{36}\ ++ \u{32}\ ++ \u{37}\ ++ \u{32}\ ++ \u{38}\ ++ \u{32}\ ++ \u{39}\ ++ \u{33}\ ++ \u{30}\ ++ \u{33}\ ++ \u{31}\ ++ \u{33}\ ++ \u{32}\ ++ \u{33}\ ++ \u{33}\ ++ \u{33}\ ++ \u{34}\ ++ \u{33}\ ++ \u{35}\ ++ \u{ac00}\ ++ \u{b098}\ ++ \u{b2e4}\ ++ \u{b77c}\ ++ \u{b9c8}\ ++ \u{bc14}\ ++ \u{c0ac}\ ++ \u{c544}\ ++ \u{c790}\ ++ \u{cc28}\ ++ \u{ce74}\ ++ \u{d0c0}\ ++ \u{d30c}\ ++ \u{d558}\ ++ \u{cc38}\ ++ \u{ace0}\ ++ \u{c8fc}\ ++ \u{c758}\ ++ \u{c6b0}\ ++ \u{4e94}\ ++ \u{516d}\ ++ \u{4e03}\ ++ \u{4e5d}\ ++ \u{682a}\ ++ \u{6709}\ ++ \u{793e}\ ++ \u{540d}\ ++ \u{7279}\ ++ \u{8ca1}\ ++ \u{795d}\ ++ \u{52b4}\ ++ \u{79d8}\ ++ \u{7537}\ ++ \u{9069}\ ++ \u{512a}\ ++ \u{5370}\ ++ \u{6ce8}\ ++ \u{9805}\ ++ \u{4f11}\ ++ \u{5199}\ ++ \u{6b63}\ ++ \u{5de6}\ ++ \u{53f3}\ ++ \u{533b}\ ++ \u{5b97}\ ++ \u{5b66}\ ++ \u{76e3}\ ++ \u{4f01}\ ++ \u{8cc7}\ ++ \u{5354}\ ++ \u{591c}\ ++ \u{33}\ ++ \u{36}\ ++ \u{33}\ ++ \u{37}\ ++ \u{33}\ ++ \u{38}\ ++ \u{33}\ ++ \u{39}\ ++ \u{34}\ ++ \u{30}\ ++ \u{34}\ ++ \u{31}\ ++ \u{34}\ ++ \u{32}\ ++ \u{34}\ ++ \u{33}\ ++ \u{34}\ ++ \u{34}\ ++ \u{34}\ ++ \u{35}\ ++ \u{34}\ ++ \u{36}\ ++ \u{34}\ ++ \u{37}\ ++ \u{34}\ ++ \u{38}\ ++ \u{34}\ ++ \u{39}\ ++ \u{35}\ ++ \u{30}\ ++ \u{31}\ ++ \u{6708}\ ++ \u{32}\ ++ \u{6708}\ ++ \u{33}\ ++ \u{6708}\ ++ \u{34}\ ++ \u{6708}\ ++ \u{35}\ ++ \u{6708}\ ++ \u{36}\ ++ \u{6708}\ ++ \u{37}\ ++ \u{6708}\ ++ \u{38}\ ++ \u{6708}\ ++ \u{39}\ ++ \u{6708}\ ++ \u{31}\ ++ \u{30}\ ++ \u{6708}\ ++ \u{31}\ ++ \u{31}\ ++ \u{6708}\ ++ \u{31}\ ++ \u{32}\ ++ \u{6708}\ ++ \u{68}\ ++ \u{67}\ ++ \u{65}\ ++ \u{72}\ ++ \u{67}\ ++ \u{65}\ ++ \u{76}\ ++ \u{6c}\ ++ \u{74}\ ++ \u{64}\ ++ \u{30a2}\ ++ \u{30a4}\ ++ \u{30a6}\ ++ \u{30a8}\ ++ \u{30aa}\ ++ \u{30ab}\ ++ \u{30ad}\ ++ \u{30af}\ ++ \u{30b1}\ ++ \u{30b3}\ ++ \u{30b5}\ ++ \u{30b7}\ ++ \u{30b9}\ ++ \u{30bb}\ ++ \u{30bd}\ ++ \u{30bf}\ ++ \u{30c1}\ ++ \u{30c4}\ ++ \u{30c6}\ ++ \u{30c8}\ ++ \u{30ca}\ ++ \u{30cb}\ ++ \u{30cc}\ ++ \u{30cd}\ ++ \u{30ce}\ ++ \u{30cf}\ ++ \u{30d2}\ ++ \u{30d5}\ ++ \u{30d8}\ ++ \u{30db}\ ++ \u{30de}\ ++ \u{30df}\ ++ \u{30e0}\ ++ \u{30e1}\ ++ \u{30e2}\ ++ \u{30e4}\ ++ \u{30e6}\ ++ \u{30e8}\ ++ \u{30e9}\ ++ \u{30ea}\ ++ \u{30eb}\ ++ \u{30ec}\ ++ \u{30ed}\ ++ \u{30ef}\ ++ \u{30f0}\ ++ \u{30f1}\ ++ \u{30f2}\ ++ \u{30a2}\ ++ \u{30d1}\ ++ \u{30fc}\ ++ \u{30c8}\ ++ \u{30a2}\ ++ \u{30eb}\ ++ \u{30d5}\ ++ \u{30a1}\ ++ \u{30a2}\ ++ \u{30f3}\ ++ \u{30da}\ ++ \u{30a2}\ ++ \u{30a2}\ ++ \u{30fc}\ ++ \u{30eb}\ ++ \u{30a4}\ ++ \u{30cb}\ ++ \u{30f3}\ ++ \u{30b0}\ ++ \u{30a4}\ ++ \u{30f3}\ ++ \u{30c1}\ ++ \u{30a6}\ ++ \u{30a9}\ ++ \u{30f3}\ ++ \u{30a8}\ ++ \u{30b9}\ ++ \u{30af}\ ++ \u{30fc}\ ++ \u{30c9}\ ++ \u{30a8}\ ++ \u{30fc}\ ++ \u{30ab}\ ++ \u{30fc}\ ++ \u{30aa}\ ++ \u{30f3}\ ++ \u{30b9}\ ++ \u{30aa}\ ++ \u{30fc}\ ++ \u{30e0}\ ++ \u{30ab}\ ++ \u{30a4}\ ++ \u{30ea}\ ++ \u{30ab}\ ++ \u{30e9}\ ++ \u{30c3}\ ++ \u{30c8}\ ++ \u{30ab}\ ++ \u{30ed}\ ++ \u{30ea}\ ++ \u{30fc}\ ++ \u{30ac}\ ++ \u{30ed}\ ++ \u{30f3}\ ++ \u{30ac}\ ++ \u{30f3}\ ++ \u{30de}\ ++ \u{30ae}\ ++ \u{30ac}\ ++ \u{30ae}\ ++ \u{30cb}\ ++ \u{30fc}\ ++ \u{30ad}\ ++ \u{30e5}\ ++ \u{30ea}\ ++ \u{30fc}\ ++ \u{30ae}\ ++ \u{30eb}\ ++ \u{30c0}\ ++ \u{30fc}\ ++ \u{30ad}\ ++ \u{30ed}\ ++ \u{30ad}\ ++ \u{30ed}\ ++ \u{30b0}\ ++ \u{30e9}\ ++ \u{30e0}\ ++ \u{30ad}\ ++ \u{30ed}\ ++ \u{30e1}\ ++ \u{30fc}\ ++ \u{30c8}\ ++ \u{30eb}\ ++ \u{30ad}\ ++ \u{30ed}\ ++ \u{30ef}\ ++ \u{30c3}\ ++ \u{30c8}\ ++ \u{30b0}\ ++ \u{30e9}\ ++ \u{30e0}\ ++ \u{30b0}\ ++ \u{30e9}\ ++ \u{30e0}\ ++ \u{30c8}\ ++ \u{30f3}\ ++ \u{30af}\ ++ \u{30eb}\ ++ \u{30bc}\ ++ \u{30a4}\ ++ \u{30ed}\ ++ \u{30af}\ ++ \u{30ed}\ ++ \u{30fc}\ ++ \u{30cd}\ ++ \u{30b1}\ ++ \u{30fc}\ ++ \u{30b9}\ ++ \u{30b3}\ ++ \u{30eb}\ ++ \u{30ca}\ ++ \u{30b3}\ ++ \u{30fc}\ ++ \u{30dd}\ ++ \u{30b5}\ ++ \u{30a4}\ ++ \u{30af}\ ++ \u{30eb}\ ++ \u{30b5}\ ++ \u{30f3}\ ++ \u{30c1}\ ++ \u{30fc}\ ++ \u{30e0}\ ++ \u{30b7}\ ++ \u{30ea}\ ++ \u{30f3}\ ++ \u{30b0}\ ++ \u{30bb}\ ++ \u{30f3}\ ++ \u{30c1}\ ++ \u{30bb}\ ++ \u{30f3}\ ++ \u{30c8}\ ++ \u{30c0}\ ++ \u{30fc}\ ++ \u{30b9}\ ++ \u{30c7}\ ++ \u{30b7}\ ++ \u{30c9}\ ++ \u{30eb}\ ++ \u{30c8}\ ++ \u{30f3}\ ++ \u{30ca}\ ++ \u{30ce}\ ++ \u{30ce}\ ++ \u{30c3}\ ++ \u{30c8}\ ++ \u{30cf}\ ++ \u{30a4}\ ++ \u{30c4}\ ++ \u{30d1}\ ++ \u{30fc}\ ++ \u{30bb}\ ++ \u{30f3}\ ++ \u{30c8}\ ++ \u{30d1}\ ++ \u{30fc}\ ++ \u{30c4}\ ++ \u{30d0}\ ++ \u{30fc}\ ++ \u{30ec}\ ++ \u{30eb}\ ++ \u{30d4}\ ++ \u{30a2}\ ++ \u{30b9}\ ++ \u{30c8}\ ++ \u{30eb}\ ++ \u{30d4}\ ++ \u{30af}\ ++ \u{30eb}\ ++ \u{30d4}\ ++ \u{30b3}\ ++ \u{30d3}\ ++ \u{30eb}\ ++ \u{30d5}\ ++ \u{30a1}\ ++ \u{30e9}\ ++ \u{30c3}\ ++ \u{30c9}\ ++ \u{30d5}\ ++ \u{30a3}\ ++ \u{30fc}\ ++ \u{30c8}\ ++ \u{30d6}\ ++ \u{30c3}\ ++ \u{30b7}\ ++ \u{30a7}\ ++ \u{30eb}\ ++ \u{30d5}\ ++ \u{30e9}\ ++ \u{30f3}\ ++ \u{30d8}\ ++ \u{30af}\ ++ \u{30bf}\ ++ \u{30fc}\ ++ \u{30eb}\ ++ \u{30da}\ ++ \u{30bd}\ ++ \u{30da}\ ++ \u{30cb}\ ++ \u{30d2}\ ++ \u{30d8}\ ++ \u{30eb}\ ++ \u{30c4}\ ++ \u{30da}\ ++ \u{30f3}\ ++ \u{30b9}\ ++ \u{30da}\ ++ \u{30fc}\ ++ \u{30b8}\ ++ \u{30d9}\ ++ \u{30fc}\ ++ \u{30bf}\ ++ \u{30dd}\ ++ \u{30a4}\ ++ \u{30f3}\ ++ \u{30c8}\ ++ \u{30dc}\ ++ \u{30eb}\ ++ \u{30c8}\ ++ \u{30db}\ ++ \u{30f3}\ ++ \u{30dd}\ ++ \u{30f3}\ ++ \u{30c9}\ ++ \u{30db}\ ++ \u{30fc}\ ++ \u{30eb}\ ++ \u{30db}\ ++ \u{30fc}\ ++ \u{30f3}\ ++ \u{30de}\ ++ \u{30a4}\ ++ \u{30af}\ ++ \u{30ed}\ ++ \u{30de}\ ++ \u{30a4}\ ++ \u{30eb}\ ++ \u{30de}\ ++ \u{30c3}\ ++ \u{30cf}\ ++ \u{30de}\ ++ \u{30eb}\ ++ \u{30af}\ ++ \u{30de}\ ++ \u{30f3}\ ++ \u{30b7}\ ++ \u{30e7}\ ++ \u{30f3}\ ++ \u{30df}\ ++ \u{30af}\ ++ \u{30ed}\ ++ \u{30f3}\ ++ \u{30df}\ ++ \u{30ea}\ ++ \u{30df}\ ++ \u{30ea}\ ++ \u{30d0}\ ++ \u{30fc}\ ++ \u{30eb}\ ++ \u{30e1}\ ++ \u{30ac}\ ++ \u{30e1}\ ++ \u{30ac}\ ++ \u{30c8}\ ++ \u{30f3}\ ++ \u{30e1}\ ++ \u{30fc}\ ++ \u{30c8}\ ++ \u{30eb}\ ++ \u{30e4}\ ++ \u{30fc}\ ++ \u{30c9}\ ++ \u{30e4}\ ++ \u{30fc}\ ++ \u{30eb}\ ++ \u{30e6}\ ++ \u{30a2}\ ++ \u{30f3}\ ++ \u{30ea}\ ++ \u{30c3}\ ++ \u{30c8}\ ++ \u{30eb}\ ++ \u{30ea}\ ++ \u{30e9}\ ++ \u{30eb}\ ++ \u{30d4}\ ++ \u{30fc}\ ++ \u{30eb}\ ++ \u{30fc}\ ++ \u{30d6}\ ++ \u{30eb}\ ++ \u{30ec}\ ++ \u{30e0}\ ++ \u{30ec}\ ++ \u{30f3}\ ++ \u{30c8}\ ++ \u{30b2}\ ++ \u{30f3}\ ++ \u{30ef}\ ++ \u{30c3}\ ++ \u{30c8}\ ++ \u{30}\ ++ \u{70b9}\ ++ \u{31}\ ++ \u{70b9}\ ++ \u{32}\ ++ \u{70b9}\ ++ \u{33}\ ++ \u{70b9}\ ++ \u{34}\ ++ \u{70b9}\ ++ \u{35}\ ++ \u{70b9}\ ++ \u{36}\ ++ \u{70b9}\ ++ \u{37}\ ++ \u{70b9}\ ++ \u{38}\ ++ \u{70b9}\ ++ \u{39}\ ++ \u{70b9}\ ++ \u{31}\ ++ \u{30}\ ++ \u{70b9}\ ++ \u{31}\ ++ \u{31}\ ++ \u{70b9}\ ++ \u{31}\ ++ \u{32}\ ++ \u{70b9}\ ++ \u{31}\ ++ \u{33}\ ++ \u{70b9}\ ++ \u{31}\ ++ \u{34}\ ++ \u{70b9}\ ++ \u{31}\ ++ \u{35}\ ++ \u{70b9}\ ++ \u{31}\ ++ \u{36}\ ++ \u{70b9}\ ++ \u{31}\ ++ \u{37}\ ++ \u{70b9}\ ++ \u{31}\ ++ \u{38}\ ++ \u{70b9}\ ++ \u{31}\ ++ \u{39}\ ++ \u{70b9}\ ++ \u{32}\ ++ \u{30}\ ++ \u{70b9}\ ++ \u{32}\ ++ \u{31}\ ++ \u{70b9}\ ++ \u{32}\ ++ \u{32}\ ++ \u{70b9}\ ++ \u{32}\ ++ \u{33}\ ++ \u{70b9}\ ++ \u{32}\ ++ \u{34}\ ++ \u{70b9}\ ++ \u{68}\ ++ \u{70}\ ++ \u{61}\ ++ \u{64}\ ++ \u{61}\ ++ \u{61}\ ++ \u{75}\ ++ \u{62}\ ++ \u{61}\ ++ \u{72}\ ++ \u{6f}\ ++ \u{76}\ ++ \u{70}\ ++ \u{63}\ ++ \u{64}\ ++ \u{6d}\ ++ \u{64}\ ++ \u{6d}\ ++ \u{32}\ ++ \u{64}\ ++ \u{6d}\ ++ \u{33}\ ++ \u{69}\ ++ \u{75}\ ++ \u{5e73}\ ++ \u{6210}\ ++ \u{662d}\ ++ \u{548c}\ ++ \u{5927}\ ++ \u{6b63}\ ++ \u{660e}\ ++ \u{6cbb}\ ++ \u{682a}\ ++ \u{5f0f}\ ++ \u{4f1a}\ ++ \u{793e}\ ++ \u{70}\ ++ \u{61}\ ++ \u{6e}\ ++ \u{61}\ ++ \u{3bc}\ ++ \u{61}\ ++ \u{6d}\ ++ \u{61}\ ++ \u{6b}\ ++ \u{61}\ ++ \u{6b}\ ++ \u{62}\ ++ \u{6d}\ ++ \u{62}\ ++ \u{67}\ ++ \u{62}\ ++ \u{63}\ ++ \u{61}\ ++ \u{6c}\ ++ \u{6b}\ ++ \u{63}\ ++ \u{61}\ ++ \u{6c}\ ++ \u{70}\ ++ \u{66}\ ++ \u{6e}\ ++ \u{66}\ ++ \u{3bc}\ ++ \u{66}\ ++ \u{3bc}\ ++ \u{67}\ ++ \u{6d}\ ++ \u{67}\ ++ \u{6b}\ ++ \u{67}\ ++ \u{68}\ ++ \u{7a}\ ++ \u{6b}\ ++ \u{68}\ ++ \u{7a}\ ++ \u{6d}\ ++ \u{68}\ ++ \u{7a}\ ++ \u{67}\ ++ \u{68}\ ++ \u{7a}\ ++ \u{74}\ ++ \u{68}\ ++ \u{7a}\ ++ \u{3bc}\ ++ \u{6c}\ ++ \u{6d}\ ++ \u{6c}\ ++ \u{64}\ ++ \u{6c}\ ++ \u{6b}\ ++ \u{6c}\ ++ \u{66}\ ++ \u{6d}\ ++ \u{6e}\ ++ \u{6d}\ ++ \u{3bc}\ ++ \u{6d}\ ++ \u{6d}\ ++ \u{6d}\ ++ \u{63}\ ++ \u{6d}\ ++ \u{6b}\ ++ \u{6d}\ ++ \u{6d}\ ++ \u{6d}\ ++ \u{32}\ ++ \u{63}\ ++ \u{6d}\ ++ \u{32}\ ++ \u{6d}\ ++ \u{32}\ ++ \u{6b}\ ++ \u{6d}\ ++ \u{32}\ ++ \u{6d}\ ++ \u{6d}\ ++ \u{33}\ ++ \u{63}\ ++ \u{6d}\ ++ \u{33}\ ++ \u{6d}\ ++ \u{33}\ ++ \u{6b}\ ++ \u{6d}\ ++ \u{33}\ ++ \u{6d}\ ++ \u{2215}\ ++ \u{73}\ ++ \u{6d}\ ++ \u{2215}\ ++ \u{73}\ ++ \u{32}\ ++ \u{6b}\ ++ \u{70}\ ++ \u{61}\ ++ \u{6d}\ ++ \u{70}\ ++ \u{61}\ ++ \u{67}\ ++ \u{70}\ ++ \u{61}\ ++ \u{72}\ ++ \u{61}\ ++ \u{64}\ ++ \u{72}\ ++ \u{61}\ ++ \u{64}\ ++ \u{2215}\ ++ \u{73}\ ++ \u{72}\ ++ \u{61}\ ++ \u{64}\ ++ \u{2215}\ ++ \u{73}\ ++ \u{32}\ ++ \u{70}\ ++ \u{73}\ ++ \u{6e}\ ++ \u{73}\ ++ \u{3bc}\ ++ \u{73}\ ++ \u{6d}\ ++ \u{73}\ ++ \u{70}\ ++ \u{76}\ ++ \u{6e}\ ++ \u{76}\ ++ \u{3bc}\ ++ \u{76}\ ++ \u{6d}\ ++ \u{76}\ ++ \u{6b}\ ++ \u{76}\ ++ \u{70}\ ++ \u{77}\ ++ \u{6e}\ ++ \u{77}\ ++ \u{3bc}\ ++ \u{77}\ ++ \u{6d}\ ++ \u{77}\ ++ \u{6b}\ ++ \u{77}\ ++ \u{6b}\ ++ \u{3c9}\ ++ \u{6d}\ ++ \u{3c9}\ ++ \u{62}\ ++ \u{71}\ ++ \u{63}\ ++ \u{63}\ ++ \u{63}\ ++ \u{64}\ ++ \u{63}\ ++ \u{2215}\ ++ \u{6b}\ ++ \u{67}\ ++ \u{64}\ ++ \u{62}\ ++ \u{67}\ ++ \u{79}\ ++ \u{68}\ ++ \u{61}\ ++ \u{68}\ ++ \u{70}\ ++ \u{69}\ ++ \u{6e}\ ++ \u{6b}\ ++ \u{6b}\ ++ \u{6b}\ ++ \u{74}\ ++ \u{6c}\ ++ \u{6d}\ ++ \u{6c}\ ++ \u{6e}\ ++ \u{6c}\ ++ \u{6f}\ ++ \u{67}\ ++ \u{6c}\ ++ \u{78}\ ++ \u{6d}\ ++ \u{69}\ ++ \u{6c}\ ++ \u{6d}\ ++ \u{6f}\ ++ \u{6c}\ ++ \u{70}\ ++ \u{68}\ ++ \u{70}\ ++ \u{70}\ ++ \u{6d}\ ++ \u{70}\ ++ \u{72}\ ++ \u{73}\ ++ \u{72}\ ++ \u{73}\ ++ \u{76}\ ++ \u{77}\ ++ \u{62}\ ++ \u{76}\ ++ \u{2215}\ ++ \u{6d}\ ++ \u{61}\ ++ \u{2215}\ ++ \u{6d}\ ++ \u{31}\ ++ \u{65e5}\ ++ \u{32}\ ++ \u{65e5}\ ++ \u{33}\ ++ \u{65e5}\ ++ \u{34}\ ++ \u{65e5}\ ++ \u{35}\ ++ \u{65e5}\ ++ \u{36}\ ++ \u{65e5}\ ++ \u{37}\ ++ \u{65e5}\ ++ \u{38}\ ++ \u{65e5}\ ++ \u{39}\ ++ \u{65e5}\ ++ \u{31}\ ++ \u{30}\ ++ \u{65e5}\ ++ \u{31}\ ++ \u{31}\ ++ \u{65e5}\ ++ \u{31}\ ++ \u{32}\ ++ \u{65e5}\ ++ \u{31}\ ++ \u{33}\ ++ \u{65e5}\ ++ \u{31}\ ++ \u{34}\ ++ \u{65e5}\ ++ \u{31}\ ++ \u{35}\ ++ \u{65e5}\ ++ \u{31}\ ++ \u{36}\ ++ \u{65e5}\ ++ \u{31}\ ++ \u{37}\ ++ \u{65e5}\ ++ \u{31}\ ++ \u{38}\ ++ \u{65e5}\ ++ \u{31}\ ++ \u{39}\ ++ \u{65e5}\ ++ \u{32}\ ++ \u{30}\ ++ \u{65e5}\ ++ \u{32}\ ++ \u{31}\ ++ \u{65e5}\ ++ \u{32}\ ++ \u{32}\ ++ \u{65e5}\ ++ \u{32}\ ++ \u{33}\ ++ \u{65e5}\ ++ \u{32}\ ++ \u{34}\ ++ \u{65e5}\ ++ \u{32}\ ++ \u{35}\ ++ \u{65e5}\ ++ \u{32}\ ++ \u{36}\ ++ \u{65e5}\ ++ \u{32}\ ++ \u{37}\ ++ \u{65e5}\ ++ \u{32}\ ++ \u{38}\ ++ \u{65e5}\ ++ \u{32}\ ++ \u{39}\ ++ \u{65e5}\ ++ \u{33}\ ++ \u{30}\ ++ \u{65e5}\ ++ \u{33}\ ++ \u{31}\ ++ \u{65e5}\ ++ \u{67}\ ++ \u{61}\ ++ \u{6c}\ ++ \u{a641}\ ++ \u{a643}\ ++ \u{a645}\ ++ \u{a647}\ ++ \u{a649}\ ++ \u{a64d}\ ++ \u{a64f}\ ++ \u{a651}\ ++ \u{a653}\ ++ \u{a655}\ ++ \u{a657}\ ++ \u{a659}\ ++ \u{a65b}\ ++ \u{a65d}\ ++ \u{a65f}\ ++ \u{a661}\ ++ \u{a663}\ ++ \u{a665}\ ++ \u{a667}\ ++ \u{a669}\ ++ \u{a66b}\ ++ \u{a66d}\ ++ \u{a681}\ ++ \u{a683}\ ++ \u{a685}\ ++ \u{a687}\ ++ \u{a689}\ ++ \u{a68b}\ ++ \u{a68d}\ ++ \u{a68f}\ ++ \u{a691}\ ++ \u{a693}\ ++ \u{a695}\ ++ \u{a697}\ ++ \u{a699}\ ++ \u{a69b}\ ++ \u{a723}\ ++ \u{a725}\ ++ \u{a727}\ ++ \u{a729}\ ++ \u{a72b}\ ++ \u{a72d}\ ++ \u{a72f}\ ++ \u{a733}\ ++ \u{a735}\ ++ \u{a737}\ ++ \u{a739}\ ++ \u{a73b}\ ++ \u{a73d}\ ++ \u{a73f}\ ++ \u{a741}\ ++ \u{a743}\ ++ \u{a745}\ ++ \u{a747}\ ++ \u{a749}\ ++ \u{a74b}\ ++ \u{a74d}\ ++ \u{a74f}\ ++ \u{a751}\ ++ \u{a753}\ ++ \u{a755}\ ++ \u{a757}\ ++ \u{a759}\ ++ \u{a75b}\ ++ \u{a75d}\ ++ \u{a75f}\ ++ \u{a761}\ ++ \u{a763}\ ++ \u{a765}\ ++ \u{a767}\ ++ \u{a769}\ ++ \u{a76b}\ ++ \u{a76d}\ ++ \u{a76f}\ ++ \u{a77a}\ ++ \u{a77c}\ ++ \u{1d79}\ ++ \u{a77f}\ ++ \u{a781}\ ++ \u{a783}\ ++ \u{a785}\ ++ \u{a787}\ ++ \u{a78c}\ ++ \u{a791}\ ++ \u{a793}\ ++ \u{a797}\ ++ \u{a799}\ ++ \u{a79b}\ ++ \u{a79d}\ ++ \u{a79f}\ ++ \u{a7a1}\ ++ \u{a7a3}\ ++ \u{a7a5}\ ++ \u{a7a7}\ ++ \u{a7a9}\ ++ \u{26c}\ ++ \u{29e}\ ++ \u{287}\ ++ \u{ab53}\ ++ \u{a7b5}\ ++ \u{a7b7}\ ++ \u{ab37}\ ++ \u{ab52}\ ++ \u{13a0}\ ++ \u{13a1}\ ++ \u{13a2}\ ++ \u{13a3}\ ++ \u{13a4}\ ++ \u{13a5}\ ++ \u{13a6}\ ++ \u{13a7}\ ++ \u{13a8}\ ++ \u{13a9}\ ++ \u{13aa}\ ++ \u{13ab}\ ++ \u{13ac}\ ++ \u{13ad}\ ++ \u{13ae}\ ++ \u{13af}\ ++ \u{13b0}\ ++ \u{13b1}\ ++ \u{13b2}\ ++ \u{13b3}\ ++ \u{13b4}\ ++ \u{13b5}\ ++ \u{13b6}\ ++ \u{13b7}\ ++ \u{13b8}\ ++ \u{13b9}\ ++ \u{13ba}\ ++ \u{13bb}\ ++ \u{13bc}\ ++ \u{13bd}\ ++ \u{13be}\ ++ \u{13bf}\ ++ \u{13c0}\ ++ \u{13c1}\ ++ \u{13c2}\ ++ \u{13c3}\ ++ \u{13c4}\ ++ \u{13c5}\ ++ \u{13c6}\ ++ \u{13c7}\ ++ \u{13c8}\ ++ \u{13c9}\ ++ \u{13ca}\ ++ \u{13cb}\ ++ \u{13cc}\ ++ \u{13cd}\ ++ \u{13ce}\ ++ \u{13cf}\ ++ \u{13d0}\ ++ \u{13d1}\ ++ \u{13d2}\ ++ \u{13d3}\ ++ \u{13d4}\ ++ \u{13d5}\ ++ \u{13d6}\ ++ \u{13d7}\ ++ \u{13d8}\ ++ \u{13d9}\ ++ \u{13da}\ ++ \u{13db}\ ++ \u{13dc}\ ++ \u{13dd}\ ++ \u{13de}\ ++ \u{13df}\ ++ \u{13e0}\ ++ \u{13e1}\ ++ \u{13e2}\ ++ \u{13e3}\ ++ \u{13e4}\ ++ \u{13e5}\ ++ \u{13e6}\ ++ \u{13e7}\ ++ \u{13e8}\ ++ \u{13e9}\ ++ \u{13ea}\ ++ \u{13eb}\ ++ \u{13ec}\ ++ \u{13ed}\ ++ \u{13ee}\ ++ \u{13ef}\ ++ \u{8c48}\ ++ \u{66f4}\ ++ \u{8cc8}\ ++ \u{6ed1}\ ++ \u{4e32}\ ++ \u{53e5}\ ++ \u{5951}\ ++ \u{5587}\ ++ \u{5948}\ ++ \u{61f6}\ ++ \u{7669}\ ++ \u{7f85}\ ++ \u{863f}\ ++ \u{87ba}\ ++ \u{88f8}\ ++ \u{908f}\ ++ \u{6a02}\ ++ \u{6d1b}\ ++ \u{70d9}\ ++ \u{73de}\ ++ \u{843d}\ ++ \u{916a}\ ++ \u{99f1}\ ++ \u{4e82}\ ++ \u{5375}\ ++ \u{6b04}\ ++ \u{721b}\ ++ \u{862d}\ ++ \u{9e1e}\ ++ \u{5d50}\ ++ \u{6feb}\ ++ \u{85cd}\ ++ \u{8964}\ ++ \u{62c9}\ ++ \u{81d8}\ ++ \u{881f}\ ++ \u{5eca}\ ++ \u{6717}\ ++ \u{6d6a}\ ++ \u{72fc}\ ++ \u{90ce}\ ++ \u{4f86}\ ++ \u{51b7}\ ++ \u{52de}\ ++ \u{64c4}\ ++ \u{6ad3}\ ++ \u{7210}\ ++ \u{76e7}\ ++ \u{8606}\ ++ \u{865c}\ ++ \u{8def}\ ++ \u{9732}\ ++ \u{9b6f}\ ++ \u{9dfa}\ ++ \u{788c}\ ++ \u{797f}\ ++ \u{7da0}\ ++ \u{83c9}\ ++ \u{9304}\ ++ \u{8ad6}\ ++ \u{58df}\ ++ \u{5f04}\ ++ \u{7c60}\ ++ \u{807e}\ ++ \u{7262}\ ++ \u{78ca}\ ++ \u{8cc2}\ ++ \u{96f7}\ ++ \u{58d8}\ ++ \u{5c62}\ ++ \u{6a13}\ ++ \u{6dda}\ ++ \u{6f0f}\ ++ \u{7d2f}\ ++ \u{7e37}\ ++ \u{964b}\ ++ \u{52d2}\ ++ \u{808b}\ ++ \u{51dc}\ ++ \u{51cc}\ ++ \u{7a1c}\ ++ \u{7dbe}\ ++ \u{83f1}\ ++ \u{9675}\ ++ \u{8b80}\ ++ \u{62cf}\ ++ \u{8afe}\ ++ \u{4e39}\ ++ \u{5be7}\ ++ \u{6012}\ ++ \u{7387}\ ++ \u{7570}\ ++ \u{5317}\ ++ \u{78fb}\ ++ \u{4fbf}\ ++ \u{5fa9}\ ++ \u{4e0d}\ ++ \u{6ccc}\ ++ \u{6578}\ ++ \u{7d22}\ ++ \u{53c3}\ ++ \u{585e}\ ++ \u{7701}\ ++ \u{8449}\ ++ \u{8aaa}\ ++ \u{6bba}\ ++ \u{6c88}\ ++ \u{62fe}\ ++ \u{82e5}\ ++ \u{63a0}\ ++ \u{7565}\ ++ \u{4eae}\ ++ \u{5169}\ ++ \u{51c9}\ ++ \u{6881}\ ++ \u{7ce7}\ ++ \u{826f}\ ++ \u{8ad2}\ ++ \u{91cf}\ ++ \u{52f5}\ ++ \u{5442}\ ++ \u{5eec}\ ++ \u{65c5}\ ++ \u{6ffe}\ ++ \u{792a}\ ++ \u{95ad}\ ++ \u{9a6a}\ ++ \u{9e97}\ ++ \u{9ece}\ ++ \u{66c6}\ ++ \u{6b77}\ ++ \u{8f62}\ ++ \u{5e74}\ ++ \u{6190}\ ++ \u{6200}\ ++ \u{649a}\ ++ \u{6f23}\ ++ \u{7149}\ ++ \u{7489}\ ++ \u{79ca}\ ++ \u{7df4}\ ++ \u{806f}\ ++ \u{8f26}\ ++ \u{84ee}\ ++ \u{9023}\ ++ \u{934a}\ ++ \u{5217}\ ++ \u{52a3}\ ++ \u{54bd}\ ++ \u{70c8}\ ++ \u{88c2}\ ++ \u{5ec9}\ ++ \u{5ff5}\ ++ \u{637b}\ ++ \u{6bae}\ ++ \u{7c3e}\ ++ \u{7375}\ ++ \u{4ee4}\ ++ \u{56f9}\ ++ \u{5dba}\ ++ \u{601c}\ ++ \u{73b2}\ ++ \u{7469}\ ++ \u{7f9a}\ ++ \u{8046}\ ++ \u{9234}\ ++ \u{96f6}\ ++ \u{9748}\ ++ \u{9818}\ ++ \u{4f8b}\ ++ \u{79ae}\ ++ \u{91b4}\ ++ \u{96b8}\ ++ \u{60e1}\ ++ \u{4e86}\ ++ \u{50da}\ ++ \u{5bee}\ ++ \u{5c3f}\ ++ \u{6599}\ ++ \u{71ce}\ ++ \u{7642}\ ++ \u{84fc}\ ++ \u{907c}\ ++ \u{6688}\ ++ \u{962e}\ ++ \u{5289}\ ++ \u{677b}\ ++ \u{67f3}\ ++ \u{6d41}\ ++ \u{6e9c}\ ++ \u{7409}\ ++ \u{7559}\ ++ \u{786b}\ ++ \u{7d10}\ ++ \u{985e}\ ++ \u{622e}\ ++ \u{9678}\ ++ \u{502b}\ ++ \u{5d19}\ ++ \u{6dea}\ ++ \u{8f2a}\ ++ \u{5f8b}\ ++ \u{6144}\ ++ \u{6817}\ ++ \u{9686}\ ++ \u{5229}\ ++ \u{540f}\ ++ \u{5c65}\ ++ \u{6613}\ ++ \u{674e}\ ++ \u{68a8}\ ++ \u{6ce5}\ ++ \u{7406}\ ++ \u{75e2}\ ++ \u{7f79}\ ++ \u{88cf}\ ++ \u{88e1}\ ++ \u{96e2}\ ++ \u{533f}\ ++ \u{6eba}\ ++ \u{541d}\ ++ \u{71d0}\ ++ \u{7498}\ ++ \u{85fa}\ ++ \u{96a3}\ ++ \u{9c57}\ ++ \u{9e9f}\ ++ \u{6797}\ ++ \u{6dcb}\ ++ \u{81e8}\ ++ \u{7b20}\ ++ \u{7c92}\ ++ \u{72c0}\ ++ \u{7099}\ ++ \u{8b58}\ ++ \u{4ec0}\ ++ \u{8336}\ ++ \u{523a}\ ++ \u{5207}\ ++ \u{5ea6}\ ++ \u{62d3}\ ++ \u{7cd6}\ ++ \u{5b85}\ ++ \u{6d1e}\ ++ \u{66b4}\ ++ \u{8f3b}\ ++ \u{964d}\ ++ \u{5ed3}\ ++ \u{5140}\ ++ \u{55c0}\ ++ \u{585a}\ ++ \u{6674}\ ++ \u{51de}\ ++ \u{732a}\ ++ \u{76ca}\ ++ \u{793c}\ ++ \u{795e}\ ++ \u{7965}\ ++ \u{798f}\ ++ \u{9756}\ ++ \u{7cbe}\ ++ \u{8612}\ ++ \u{8af8}\ ++ \u{9038}\ ++ \u{90fd}\ ++ \u{98ef}\ ++ \u{98fc}\ ++ \u{9928}\ ++ \u{9db4}\ ++ \u{90de}\ ++ \u{96b7}\ ++ \u{4fae}\ ++ \u{50e7}\ ++ \u{514d}\ ++ \u{52c9}\ ++ \u{52e4}\ ++ \u{5351}\ ++ \u{559d}\ ++ \u{5606}\ ++ \u{5668}\ ++ \u{5840}\ ++ \u{58a8}\ ++ \u{5c64}\ ++ \u{6094}\ ++ \u{6168}\ ++ \u{618e}\ ++ \u{61f2}\ ++ \u{654f}\ ++ \u{65e2}\ ++ \u{6691}\ ++ \u{6885}\ ++ \u{6d77}\ ++ \u{6e1a}\ ++ \u{6f22}\ ++ \u{716e}\ ++ \u{722b}\ ++ \u{7422}\ ++ \u{7891}\ ++ \u{7949}\ ++ \u{7948}\ ++ \u{7950}\ ++ \u{7956}\ ++ \u{798d}\ ++ \u{798e}\ ++ \u{7a40}\ ++ \u{7a81}\ ++ \u{7bc0}\ ++ \u{7e09}\ ++ \u{7e41}\ ++ \u{7f72}\ ++ \u{8005}\ ++ \u{81ed}\ ++ \u{8279}\ ++ \u{8457}\ ++ \u{8910}\ ++ \u{8996}\ ++ \u{8b01}\ ++ \u{8b39}\ ++ \u{8cd3}\ ++ \u{8d08}\ ++ \u{8fb6}\ ++ \u{96e3}\ ++ \u{97ff}\ ++ \u{983b}\ ++ \u{6075}\ ++ \u{242ee}\ ++ \u{8218}\ ++ \u{4e26}\ ++ \u{51b5}\ ++ \u{5168}\ ++ \u{4f80}\ ++ \u{5145}\ ++ \u{5180}\ ++ \u{52c7}\ ++ \u{52fa}\ ++ \u{5555}\ ++ \u{5599}\ ++ \u{55e2}\ ++ \u{58b3}\ ++ \u{5944}\ ++ \u{5954}\ ++ \u{5a62}\ ++ \u{5b28}\ ++ \u{5ed2}\ ++ \u{5ed9}\ ++ \u{5f69}\ ++ \u{5fad}\ ++ \u{60d8}\ ++ \u{614e}\ ++ \u{6108}\ ++ \u{6160}\ ++ \u{6234}\ ++ \u{63c4}\ ++ \u{641c}\ ++ \u{6452}\ ++ \u{6556}\ ++ \u{671b}\ ++ \u{6756}\ ++ \u{6edb}\ ++ \u{6ecb}\ ++ \u{701e}\ ++ \u{77a7}\ ++ \u{7235}\ ++ \u{72af}\ ++ \u{7471}\ ++ \u{7506}\ ++ \u{753b}\ ++ \u{761d}\ ++ \u{761f}\ ++ \u{76db}\ ++ \u{76f4}\ ++ \u{774a}\ ++ \u{7740}\ ++ \u{78cc}\ ++ \u{7ab1}\ ++ \u{7c7b}\ ++ \u{7d5b}\ ++ \u{7f3e}\ ++ \u{8352}\ ++ \u{83ef}\ ++ \u{8779}\ ++ \u{8941}\ ++ \u{8986}\ ++ \u{8abf}\ ++ \u{8acb}\ ++ \u{8aed}\ ++ \u{8b8a}\ ++ \u{8f38}\ ++ \u{9072}\ ++ \u{9199}\ ++ \u{9276}\ ++ \u{967c}\ ++ \u{97db}\ ++ \u{980b}\ ++ \u{9b12}\ ++ \u{2284a}\ ++ \u{22844}\ ++ \u{233d5}\ ++ \u{3b9d}\ ++ \u{4018}\ ++ \u{4039}\ ++ \u{25249}\ ++ \u{25cd0}\ ++ \u{27ed3}\ ++ \u{9f43}\ ++ \u{9f8e}\ ++ \u{66}\ ++ \u{66}\ ++ \u{66}\ ++ \u{69}\ ++ \u{66}\ ++ \u{6c}\ ++ \u{66}\ ++ \u{66}\ ++ \u{69}\ ++ \u{66}\ ++ \u{66}\ ++ \u{6c}\ ++ \u{73}\ ++ \u{74}\ ++ \u{574}\ ++ \u{576}\ ++ \u{574}\ ++ \u{565}\ ++ \u{574}\ ++ \u{56b}\ ++ \u{57e}\ ++ \u{576}\ ++ \u{574}\ ++ \u{56d}\ ++ \u{5d9}\ ++ \u{5b4}\ ++ \u{5f2}\ ++ \u{5b7}\ ++ \u{5e2}\ ++ \u{5d4}\ ++ \u{5db}\ ++ \u{5dc}\ ++ \u{5dd}\ ++ \u{5e8}\ ++ \u{5ea}\ ++ \u{5e9}\ ++ \u{5c1}\ ++ \u{5e9}\ ++ \u{5c2}\ ++ \u{5e9}\ ++ \u{5bc}\ ++ \u{5c1}\ ++ \u{5e9}\ ++ \u{5bc}\ ++ \u{5c2}\ ++ \u{5d0}\ ++ \u{5b7}\ ++ \u{5d0}\ ++ \u{5b8}\ ++ \u{5d0}\ ++ \u{5bc}\ ++ \u{5d1}\ ++ \u{5bc}\ ++ \u{5d2}\ ++ \u{5bc}\ ++ \u{5d3}\ ++ \u{5bc}\ ++ \u{5d4}\ ++ \u{5bc}\ ++ \u{5d5}\ ++ \u{5bc}\ ++ \u{5d6}\ ++ \u{5bc}\ ++ \u{5d8}\ ++ \u{5bc}\ ++ \u{5d9}\ ++ \u{5bc}\ ++ \u{5da}\ ++ \u{5bc}\ ++ \u{5db}\ ++ \u{5bc}\ ++ \u{5dc}\ ++ \u{5bc}\ ++ \u{5de}\ ++ \u{5bc}\ ++ \u{5e0}\ ++ \u{5bc}\ ++ \u{5e1}\ ++ \u{5bc}\ ++ \u{5e3}\ ++ \u{5bc}\ ++ \u{5e4}\ ++ \u{5bc}\ ++ \u{5e6}\ ++ \u{5bc}\ ++ \u{5e7}\ ++ \u{5bc}\ ++ \u{5e8}\ ++ \u{5bc}\ ++ \u{5e9}\ ++ \u{5bc}\ ++ \u{5ea}\ ++ \u{5bc}\ ++ \u{5d5}\ ++ \u{5b9}\ ++ \u{5d1}\ ++ \u{5bf}\ ++ \u{5db}\ ++ \u{5bf}\ ++ \u{5e4}\ ++ \u{5bf}\ ++ \u{5d0}\ ++ \u{5dc}\ ++ \u{671}\ ++ \u{67b}\ ++ \u{67e}\ ++ \u{680}\ ++ \u{67a}\ ++ \u{67f}\ ++ \u{679}\ ++ \u{6a4}\ ++ \u{6a6}\ ++ \u{684}\ ++ \u{683}\ ++ \u{686}\ ++ \u{687}\ ++ \u{68d}\ ++ \u{68c}\ ++ \u{68e}\ ++ \u{688}\ ++ \u{698}\ ++ \u{691}\ ++ \u{6a9}\ ++ \u{6af}\ ++ \u{6b3}\ ++ \u{6b1}\ ++ \u{6ba}\ ++ \u{6bb}\ ++ \u{6c0}\ ++ \u{6c1}\ ++ \u{6be}\ ++ \u{6d2}\ ++ \u{6d3}\ ++ \u{6ad}\ ++ \u{6c7}\ ++ \u{6c6}\ ++ \u{6c8}\ ++ \u{6cb}\ ++ \u{6c5}\ ++ \u{6c9}\ ++ \u{6d0}\ ++ \u{649}\ ++ \u{626}\ ++ \u{627}\ ++ \u{626}\ ++ \u{6d5}\ ++ \u{626}\ ++ \u{648}\ ++ \u{626}\ ++ \u{6c7}\ ++ \u{626}\ ++ \u{6c6}\ ++ \u{626}\ ++ \u{6c8}\ ++ \u{626}\ ++ \u{6d0}\ ++ \u{626}\ ++ \u{649}\ ++ \u{6cc}\ ++ \u{626}\ ++ \u{62c}\ ++ \u{626}\ ++ \u{62d}\ ++ \u{626}\ ++ \u{645}\ ++ \u{626}\ ++ \u{64a}\ ++ \u{628}\ ++ \u{62c}\ ++ \u{628}\ ++ \u{62d}\ ++ \u{628}\ ++ \u{62e}\ ++ \u{628}\ ++ \u{645}\ ++ \u{628}\ ++ \u{649}\ ++ \u{628}\ ++ \u{64a}\ ++ \u{62a}\ ++ \u{62c}\ ++ \u{62a}\ ++ \u{62d}\ ++ \u{62a}\ ++ \u{62e}\ ++ \u{62a}\ ++ \u{645}\ ++ \u{62a}\ ++ \u{649}\ ++ \u{62a}\ ++ \u{64a}\ ++ \u{62b}\ ++ \u{62c}\ ++ \u{62b}\ ++ \u{645}\ ++ \u{62b}\ ++ \u{649}\ ++ \u{62b}\ ++ \u{64a}\ ++ \u{62c}\ ++ \u{62d}\ ++ \u{62c}\ ++ \u{645}\ ++ \u{62d}\ ++ \u{62c}\ ++ \u{62d}\ ++ \u{645}\ ++ \u{62e}\ ++ \u{62c}\ ++ \u{62e}\ ++ \u{62d}\ ++ \u{62e}\ ++ \u{645}\ ++ \u{633}\ ++ \u{62c}\ ++ \u{633}\ ++ \u{62d}\ ++ \u{633}\ ++ \u{62e}\ ++ \u{633}\ ++ \u{645}\ ++ \u{635}\ ++ \u{62d}\ ++ \u{635}\ ++ \u{645}\ ++ \u{636}\ ++ \u{62c}\ ++ \u{636}\ ++ \u{62d}\ ++ \u{636}\ ++ \u{62e}\ ++ \u{636}\ ++ \u{645}\ ++ \u{637}\ ++ \u{62d}\ ++ \u{637}\ ++ \u{645}\ ++ \u{638}\ ++ \u{645}\ ++ \u{639}\ ++ \u{62c}\ ++ \u{639}\ ++ \u{645}\ ++ \u{63a}\ ++ \u{62c}\ ++ \u{63a}\ ++ \u{645}\ ++ \u{641}\ ++ \u{62c}\ ++ \u{641}\ ++ \u{62d}\ ++ \u{641}\ ++ \u{62e}\ ++ \u{641}\ ++ \u{645}\ ++ \u{641}\ ++ \u{649}\ ++ \u{641}\ ++ \u{64a}\ ++ \u{642}\ ++ \u{62d}\ ++ \u{642}\ ++ \u{645}\ ++ \u{642}\ ++ \u{649}\ ++ \u{642}\ ++ \u{64a}\ ++ \u{643}\ ++ \u{627}\ ++ \u{643}\ ++ \u{62c}\ ++ \u{643}\ ++ \u{62d}\ ++ \u{643}\ ++ \u{62e}\ ++ \u{643}\ ++ \u{644}\ ++ \u{643}\ ++ \u{645}\ ++ \u{643}\ ++ \u{649}\ ++ \u{643}\ ++ \u{64a}\ ++ \u{644}\ ++ \u{62c}\ ++ \u{644}\ ++ \u{62d}\ ++ \u{644}\ ++ \u{62e}\ ++ \u{644}\ ++ \u{645}\ ++ \u{644}\ ++ \u{649}\ ++ \u{644}\ ++ \u{64a}\ ++ \u{645}\ ++ \u{62c}\ ++ \u{645}\ ++ \u{62d}\ ++ \u{645}\ ++ \u{62e}\ ++ \u{645}\ ++ \u{645}\ ++ \u{645}\ ++ \u{649}\ ++ \u{645}\ ++ \u{64a}\ ++ \u{646}\ ++ \u{62c}\ ++ \u{646}\ ++ \u{62d}\ ++ \u{646}\ ++ \u{62e}\ ++ \u{646}\ ++ \u{645}\ ++ \u{646}\ ++ \u{649}\ ++ \u{646}\ ++ \u{64a}\ ++ \u{647}\ ++ \u{62c}\ ++ \u{647}\ ++ \u{645}\ ++ \u{647}\ ++ \u{649}\ ++ \u{647}\ ++ \u{64a}\ ++ \u{64a}\ ++ \u{62c}\ ++ \u{64a}\ ++ \u{62d}\ ++ \u{64a}\ ++ \u{62e}\ ++ \u{64a}\ ++ \u{645}\ ++ \u{64a}\ ++ \u{649}\ ++ \u{64a}\ ++ \u{64a}\ ++ \u{630}\ ++ \u{670}\ ++ \u{631}\ ++ \u{670}\ ++ \u{649}\ ++ \u{670}\ ++ \u{20}\ ++ \u{64c}\ ++ \u{651}\ ++ \u{20}\ ++ \u{64d}\ ++ \u{651}\ ++ \u{20}\ ++ \u{64e}\ ++ \u{651}\ ++ \u{20}\ ++ \u{64f}\ ++ \u{651}\ ++ \u{20}\ ++ \u{650}\ ++ \u{651}\ ++ \u{20}\ ++ \u{651}\ ++ \u{670}\ ++ \u{626}\ ++ \u{631}\ ++ \u{626}\ ++ \u{632}\ ++ \u{626}\ ++ \u{646}\ ++ \u{628}\ ++ \u{631}\ ++ \u{628}\ ++ \u{632}\ ++ \u{628}\ ++ \u{646}\ ++ \u{62a}\ ++ \u{631}\ ++ \u{62a}\ ++ \u{632}\ ++ \u{62a}\ ++ \u{646}\ ++ \u{62b}\ ++ \u{631}\ ++ \u{62b}\ ++ \u{632}\ ++ \u{62b}\ ++ \u{646}\ ++ \u{645}\ ++ \u{627}\ ++ \u{646}\ ++ \u{631}\ ++ \u{646}\ ++ \u{632}\ ++ \u{646}\ ++ \u{646}\ ++ \u{64a}\ ++ \u{631}\ ++ \u{64a}\ ++ \u{632}\ ++ \u{64a}\ ++ \u{646}\ ++ \u{626}\ ++ \u{62e}\ ++ \u{626}\ ++ \u{647}\ ++ \u{628}\ ++ \u{647}\ ++ \u{62a}\ ++ \u{647}\ ++ \u{635}\ ++ \u{62e}\ ++ \u{644}\ ++ \u{647}\ ++ \u{646}\ ++ \u{647}\ ++ \u{647}\ ++ \u{670}\ ++ \u{64a}\ ++ \u{647}\ ++ \u{62b}\ ++ \u{647}\ ++ \u{633}\ ++ \u{647}\ ++ \u{634}\ ++ \u{645}\ ++ \u{634}\ ++ \u{647}\ ++ \u{640}\ ++ \u{64e}\ ++ \u{651}\ ++ \u{640}\ ++ \u{64f}\ ++ \u{651}\ ++ \u{640}\ ++ \u{650}\ ++ \u{651}\ ++ \u{637}\ ++ \u{649}\ ++ \u{637}\ ++ \u{64a}\ ++ \u{639}\ ++ \u{649}\ ++ \u{639}\ ++ \u{64a}\ ++ \u{63a}\ ++ \u{649}\ ++ \u{63a}\ ++ \u{64a}\ ++ \u{633}\ ++ \u{649}\ ++ \u{633}\ ++ \u{64a}\ ++ \u{634}\ ++ \u{649}\ ++ \u{634}\ ++ \u{64a}\ ++ \u{62d}\ ++ \u{649}\ ++ \u{62d}\ ++ \u{64a}\ ++ \u{62c}\ ++ \u{649}\ ++ \u{62c}\ ++ \u{64a}\ ++ \u{62e}\ ++ \u{649}\ ++ \u{62e}\ ++ \u{64a}\ ++ \u{635}\ ++ \u{649}\ ++ \u{635}\ ++ \u{64a}\ ++ \u{636}\ ++ \u{649}\ ++ \u{636}\ ++ \u{64a}\ ++ \u{634}\ ++ \u{62c}\ ++ \u{634}\ ++ \u{62d}\ ++ \u{634}\ ++ \u{62e}\ ++ \u{634}\ ++ \u{631}\ ++ \u{633}\ ++ \u{631}\ ++ \u{635}\ ++ \u{631}\ ++ \u{636}\ ++ \u{631}\ ++ \u{627}\ ++ \u{64b}\ ++ \u{62a}\ ++ \u{62c}\ ++ \u{645}\ ++ \u{62a}\ ++ \u{62d}\ ++ \u{62c}\ ++ \u{62a}\ ++ \u{62d}\ ++ \u{645}\ ++ \u{62a}\ ++ \u{62e}\ ++ \u{645}\ ++ \u{62a}\ ++ \u{645}\ ++ \u{62c}\ ++ \u{62a}\ ++ \u{645}\ ++ \u{62d}\ ++ \u{62a}\ ++ \u{645}\ ++ \u{62e}\ ++ \u{62c}\ ++ \u{645}\ ++ \u{62d}\ ++ \u{62d}\ ++ \u{645}\ ++ \u{64a}\ ++ \u{62d}\ ++ \u{645}\ ++ \u{649}\ ++ \u{633}\ ++ \u{62d}\ ++ \u{62c}\ ++ \u{633}\ ++ \u{62c}\ ++ \u{62d}\ ++ \u{633}\ ++ \u{62c}\ ++ \u{649}\ ++ \u{633}\ ++ \u{645}\ ++ \u{62d}\ ++ \u{633}\ ++ \u{645}\ ++ \u{62c}\ ++ \u{633}\ ++ \u{645}\ ++ \u{645}\ ++ \u{635}\ ++ \u{62d}\ ++ \u{62d}\ ++ \u{635}\ ++ \u{645}\ ++ \u{645}\ ++ \u{634}\ ++ \u{62d}\ ++ \u{645}\ ++ \u{634}\ ++ \u{62c}\ ++ \u{64a}\ ++ \u{634}\ ++ \u{645}\ ++ \u{62e}\ ++ \u{634}\ ++ \u{645}\ ++ \u{645}\ ++ \u{636}\ ++ \u{62d}\ ++ \u{649}\ ++ \u{636}\ ++ \u{62e}\ ++ \u{645}\ ++ \u{637}\ ++ \u{645}\ ++ \u{62d}\ ++ \u{637}\ ++ \u{645}\ ++ \u{645}\ ++ \u{637}\ ++ \u{645}\ ++ \u{64a}\ ++ \u{639}\ ++ \u{62c}\ ++ \u{645}\ ++ \u{639}\ ++ \u{645}\ ++ \u{645}\ ++ \u{639}\ ++ \u{645}\ ++ \u{649}\ ++ \u{63a}\ ++ \u{645}\ ++ \u{645}\ ++ \u{63a}\ ++ \u{645}\ ++ \u{64a}\ ++ \u{63a}\ ++ \u{645}\ ++ \u{649}\ ++ \u{641}\ ++ \u{62e}\ ++ \u{645}\ ++ \u{642}\ ++ \u{645}\ ++ \u{62d}\ ++ \u{642}\ ++ \u{645}\ ++ \u{645}\ ++ \u{644}\ ++ \u{62d}\ ++ \u{645}\ ++ \u{644}\ ++ \u{62d}\ ++ \u{64a}\ ++ \u{644}\ ++ \u{62d}\ ++ \u{649}\ ++ \u{644}\ ++ \u{62c}\ ++ \u{62c}\ ++ \u{644}\ ++ \u{62e}\ ++ \u{645}\ ++ \u{644}\ ++ \u{645}\ ++ \u{62d}\ ++ \u{645}\ ++ \u{62d}\ ++ \u{62c}\ ++ \u{645}\ ++ \u{62d}\ ++ \u{645}\ ++ \u{645}\ ++ \u{62d}\ ++ \u{64a}\ ++ \u{645}\ ++ \u{62c}\ ++ \u{62d}\ ++ \u{645}\ ++ \u{62c}\ ++ \u{645}\ ++ \u{645}\ ++ \u{62e}\ ++ \u{62c}\ ++ \u{645}\ ++ \u{62e}\ ++ \u{645}\ ++ \u{645}\ ++ \u{62c}\ ++ \u{62e}\ ++ \u{647}\ ++ \u{645}\ ++ \u{62c}\ ++ \u{647}\ ++ \u{645}\ ++ \u{645}\ ++ \u{646}\ ++ \u{62d}\ ++ \u{645}\ ++ \u{646}\ ++ \u{62d}\ ++ \u{649}\ ++ \u{646}\ ++ \u{62c}\ ++ \u{645}\ ++ \u{646}\ ++ \u{62c}\ ++ \u{649}\ ++ \u{646}\ ++ \u{645}\ ++ \u{64a}\ ++ \u{646}\ ++ \u{645}\ ++ \u{649}\ ++ \u{64a}\ ++ \u{645}\ ++ \u{645}\ ++ \u{628}\ ++ \u{62e}\ ++ \u{64a}\ ++ \u{62a}\ ++ \u{62c}\ ++ \u{64a}\ ++ \u{62a}\ ++ \u{62c}\ ++ \u{649}\ ++ \u{62a}\ ++ \u{62e}\ ++ \u{64a}\ ++ \u{62a}\ ++ \u{62e}\ ++ \u{649}\ ++ \u{62a}\ ++ \u{645}\ ++ \u{64a}\ ++ \u{62a}\ ++ \u{645}\ ++ \u{649}\ ++ \u{62c}\ ++ \u{645}\ ++ \u{64a}\ ++ \u{62c}\ ++ \u{62d}\ ++ \u{649}\ ++ \u{62c}\ ++ \u{645}\ ++ \u{649}\ ++ \u{633}\ ++ \u{62e}\ ++ \u{649}\ ++ \u{635}\ ++ \u{62d}\ ++ \u{64a}\ ++ \u{634}\ ++ \u{62d}\ ++ \u{64a}\ ++ \u{636}\ ++ \u{62d}\ ++ \u{64a}\ ++ \u{644}\ ++ \u{62c}\ ++ \u{64a}\ ++ \u{644}\ ++ \u{645}\ ++ \u{64a}\ ++ \u{64a}\ ++ \u{62d}\ ++ \u{64a}\ ++ \u{64a}\ ++ \u{62c}\ ++ \u{64a}\ ++ \u{64a}\ ++ \u{645}\ ++ \u{64a}\ ++ \u{645}\ ++ \u{645}\ ++ \u{64a}\ ++ \u{642}\ ++ \u{645}\ ++ \u{64a}\ ++ \u{646}\ ++ \u{62d}\ ++ \u{64a}\ ++ \u{639}\ ++ \u{645}\ ++ \u{64a}\ ++ \u{643}\ ++ \u{645}\ ++ \u{64a}\ ++ \u{646}\ ++ \u{62c}\ ++ \u{62d}\ ++ \u{645}\ ++ \u{62e}\ ++ \u{64a}\ ++ \u{644}\ ++ \u{62c}\ ++ \u{645}\ ++ \u{643}\ ++ \u{645}\ ++ \u{645}\ ++ \u{62c}\ ++ \u{62d}\ ++ \u{64a}\ ++ \u{62d}\ ++ \u{62c}\ ++ \u{64a}\ ++ \u{645}\ ++ \u{62c}\ ++ \u{64a}\ ++ \u{641}\ ++ \u{645}\ ++ \u{64a}\ ++ \u{628}\ ++ \u{62d}\ ++ \u{64a}\ ++ \u{633}\ ++ \u{62e}\ ++ \u{64a}\ ++ \u{646}\ ++ \u{62c}\ ++ \u{64a}\ ++ \u{635}\ ++ \u{644}\ ++ \u{6d2}\ ++ \u{642}\ ++ \u{644}\ ++ \u{6d2}\ ++ \u{627}\ ++ \u{644}\ ++ \u{644}\ ++ \u{647}\ ++ \u{627}\ ++ \u{643}\ ++ \u{628}\ ++ \u{631}\ ++ \u{645}\ ++ \u{62d}\ ++ \u{645}\ ++ \u{62f}\ ++ \u{635}\ ++ \u{644}\ ++ \u{639}\ ++ \u{645}\ ++ \u{631}\ ++ \u{633}\ ++ \u{648}\ ++ \u{644}\ ++ \u{639}\ ++ \u{644}\ ++ \u{64a}\ ++ \u{647}\ ++ \u{648}\ ++ \u{633}\ ++ \u{644}\ ++ \u{645}\ ++ \u{635}\ ++ \u{644}\ ++ \u{649}\ ++ \u{635}\ ++ \u{644}\ ++ \u{649}\ ++ \u{20}\ ++ \u{627}\ ++ \u{644}\ ++ \u{644}\ ++ \u{647}\ ++ \u{20}\ ++ \u{639}\ ++ \u{644}\ ++ \u{64a}\ ++ \u{647}\ ++ \u{20}\ ++ \u{648}\ ++ \u{633}\ ++ \u{644}\ ++ \u{645}\ ++ \u{62c}\ ++ \u{644}\ ++ \u{20}\ ++ \u{62c}\ ++ \u{644}\ ++ \u{627}\ ++ \u{644}\ ++ \u{647}\ ++ \u{631}\ ++ \u{6cc}\ ++ \u{627}\ ++ \u{644}\ ++ \u{2c}\ ++ \u{3001}\ ++ \u{3a}\ ++ \u{21}\ ++ \u{3f}\ ++ \u{3016}\ ++ \u{3017}\ ++ \u{2014}\ ++ \u{2013}\ ++ \u{5f}\ ++ \u{7b}\ ++ \u{7d}\ ++ \u{3014}\ ++ \u{3015}\ ++ \u{3010}\ ++ \u{3011}\ ++ \u{300a}\ ++ \u{300b}\ ++ \u{300c}\ ++ \u{300d}\ ++ \u{300e}\ ++ \u{300f}\ ++ \u{5b}\ ++ \u{5d}\ ++ \u{23}\ ++ \u{26}\ ++ \u{2a}\ ++ \u{2d}\ ++ \u{3c}\ ++ \u{3e}\ ++ \u{5c}\ ++ \u{24}\ ++ \u{25}\ ++ \u{40}\ ++ \u{20}\ ++ \u{64b}\ ++ \u{640}\ ++ \u{64b}\ ++ \u{20}\ ++ \u{64c}\ ++ \u{20}\ ++ \u{64d}\ ++ \u{20}\ ++ \u{64e}\ ++ \u{640}\ ++ \u{64e}\ ++ \u{20}\ ++ \u{64f}\ ++ \u{640}\ ++ \u{64f}\ ++ \u{20}\ ++ \u{650}\ ++ \u{640}\ ++ \u{650}\ ++ \u{20}\ ++ \u{651}\ ++ \u{640}\ ++ \u{651}\ ++ \u{20}\ ++ \u{652}\ ++ \u{640}\ ++ \u{652}\ ++ \u{621}\ ++ \u{622}\ ++ \u{623}\ ++ \u{624}\ ++ \u{625}\ ++ \u{626}\ ++ \u{627}\ ++ \u{628}\ ++ \u{629}\ ++ \u{62a}\ ++ \u{62b}\ ++ \u{62c}\ ++ \u{62d}\ ++ \u{62e}\ ++ \u{62f}\ ++ \u{630}\ ++ \u{631}\ ++ \u{632}\ ++ \u{633}\ ++ \u{634}\ ++ \u{635}\ ++ \u{636}\ ++ \u{637}\ ++ \u{638}\ ++ \u{639}\ ++ \u{63a}\ ++ \u{641}\ ++ \u{642}\ ++ \u{643}\ ++ \u{644}\ ++ \u{645}\ ++ \u{646}\ ++ \u{647}\ ++ \u{648}\ ++ \u{64a}\ ++ \u{644}\ ++ \u{622}\ ++ \u{644}\ ++ \u{623}\ ++ \u{644}\ ++ \u{625}\ ++ \u{644}\ ++ \u{627}\ ++ \u{22}\ ++ \u{27}\ ++ \u{2f}\ ++ \u{5e}\ ++ \u{7c}\ ++ \u{7e}\ ++ \u{2985}\ ++ \u{2986}\ ++ \u{30fb}\ ++ \u{30a1}\ ++ \u{30a3}\ ++ \u{30a5}\ ++ \u{30a7}\ ++ \u{30a9}\ ++ \u{30e3}\ ++ \u{30e5}\ ++ \u{30e7}\ ++ \u{30c3}\ ++ \u{30fc}\ ++ \u{30f3}\ ++ \u{3099}\ ++ \u{309a}\ ++ \u{a2}\ ++ \u{a3}\ ++ \u{ac}\ ++ \u{a6}\ ++ \u{a5}\ ++ \u{20a9}\ ++ \u{2502}\ ++ \u{2190}\ ++ \u{2191}\ ++ \u{2192}\ ++ \u{2193}\ ++ \u{25a0}\ ++ \u{25cb}\ ++ \u{10428}\ ++ \u{10429}\ ++ \u{1042a}\ ++ \u{1042b}\ ++ \u{1042c}\ ++ \u{1042d}\ ++ \u{1042e}\ ++ \u{1042f}\ ++ \u{10430}\ ++ \u{10431}\ ++ \u{10432}\ ++ \u{10433}\ ++ \u{10434}\ ++ \u{10435}\ ++ \u{10436}\ ++ \u{10437}\ ++ \u{10438}\ ++ \u{10439}\ ++ \u{1043a}\ ++ \u{1043b}\ ++ \u{1043c}\ ++ \u{1043d}\ ++ \u{1043e}\ ++ \u{1043f}\ ++ \u{10440}\ ++ \u{10441}\ ++ \u{10442}\ ++ \u{10443}\ ++ \u{10444}\ ++ \u{10445}\ ++ \u{10446}\ ++ \u{10447}\ ++ \u{10448}\ ++ \u{10449}\ ++ \u{1044a}\ ++ \u{1044b}\ ++ \u{1044c}\ ++ \u{1044d}\ ++ \u{1044e}\ ++ \u{1044f}\ ++ \u{104d8}\ ++ \u{104d9}\ ++ \u{104da}\ ++ \u{104db}\ ++ \u{104dc}\ ++ \u{104dd}\ ++ \u{104de}\ ++ \u{104df}\ ++ \u{104e0}\ ++ \u{104e1}\ ++ \u{104e2}\ ++ \u{104e3}\ ++ \u{104e4}\ ++ \u{104e5}\ ++ \u{104e6}\ ++ \u{104e7}\ ++ \u{104e8}\ ++ \u{104e9}\ ++ \u{104ea}\ ++ \u{104eb}\ ++ \u{104ec}\ ++ \u{104ed}\ ++ \u{104ee}\ ++ \u{104ef}\ ++ \u{104f0}\ ++ \u{104f1}\ ++ \u{104f2}\ ++ \u{104f3}\ ++ \u{104f4}\ ++ \u{104f5}\ ++ \u{104f6}\ ++ \u{104f7}\ ++ \u{104f8}\ ++ \u{104f9}\ ++ \u{104fa}\ ++ \u{104fb}\ ++ \u{10cc0}\ ++ \u{10cc1}\ ++ \u{10cc2}\ ++ \u{10cc3}\ ++ \u{10cc4}\ ++ \u{10cc5}\ ++ \u{10cc6}\ ++ \u{10cc7}\ ++ \u{10cc8}\ ++ \u{10cc9}\ ++ \u{10cca}\ ++ \u{10ccb}\ ++ \u{10ccc}\ ++ \u{10ccd}\ ++ \u{10cce}\ ++ \u{10ccf}\ ++ \u{10cd0}\ ++ \u{10cd1}\ ++ \u{10cd2}\ ++ \u{10cd3}\ ++ \u{10cd4}\ ++ \u{10cd5}\ ++ \u{10cd6}\ ++ \u{10cd7}\ ++ \u{10cd8}\ ++ \u{10cd9}\ ++ \u{10cda}\ ++ \u{10cdb}\ ++ \u{10cdc}\ ++ \u{10cdd}\ ++ \u{10cde}\ ++ \u{10cdf}\ ++ \u{10ce0}\ ++ \u{10ce1}\ ++ \u{10ce2}\ ++ \u{10ce3}\ ++ \u{10ce4}\ ++ \u{10ce5}\ ++ \u{10ce6}\ ++ \u{10ce7}\ ++ \u{10ce8}\ ++ \u{10ce9}\ ++ \u{10cea}\ ++ \u{10ceb}\ ++ \u{10cec}\ ++ \u{10ced}\ ++ \u{10cee}\ ++ \u{10cef}\ ++ \u{10cf0}\ ++ \u{10cf1}\ ++ \u{10cf2}\ ++ \u{118c0}\ ++ \u{118c1}\ ++ \u{118c2}\ ++ \u{118c3}\ ++ \u{118c4}\ ++ \u{118c5}\ ++ \u{118c6}\ ++ \u{118c7}\ ++ \u{118c8}\ ++ \u{118c9}\ ++ \u{118ca}\ ++ \u{118cb}\ ++ \u{118cc}\ ++ \u{118cd}\ ++ \u{118ce}\ ++ \u{118cf}\ ++ \u{118d0}\ ++ \u{118d1}\ ++ \u{118d2}\ ++ \u{118d3}\ ++ \u{118d4}\ ++ \u{118d5}\ ++ \u{118d6}\ ++ \u{118d7}\ ++ \u{118d8}\ ++ \u{118d9}\ ++ \u{118da}\ ++ \u{118db}\ ++ \u{118dc}\ ++ \u{118dd}\ ++ \u{118de}\ ++ \u{118df}\ ++ \u{1d157}\ ++ \u{1d165}\ ++ \u{1d158}\ ++ \u{1d165}\ ++ \u{1d158}\ ++ \u{1d165}\ ++ \u{1d16e}\ ++ \u{1d158}\ ++ \u{1d165}\ ++ \u{1d16f}\ ++ \u{1d158}\ ++ \u{1d165}\ ++ \u{1d170}\ ++ \u{1d158}\ ++ \u{1d165}\ ++ \u{1d171}\ ++ \u{1d158}\ ++ \u{1d165}\ ++ \u{1d172}\ ++ \u{1d1b9}\ ++ \u{1d165}\ ++ \u{1d1ba}\ ++ \u{1d165}\ ++ \u{1d1b9}\ ++ \u{1d165}\ ++ \u{1d16e}\ ++ \u{1d1ba}\ ++ \u{1d165}\ ++ \u{1d16e}\ ++ \u{1d1b9}\ ++ \u{1d165}\ ++ \u{1d16f}\ ++ \u{1d1ba}\ ++ \u{1d165}\ ++ \u{1d16f}\ ++ \u{131}\ ++ \u{237}\ ++ \u{2207}\ ++ \u{2202}\ ++ \u{1e922}\ ++ \u{1e923}\ ++ \u{1e924}\ ++ \u{1e925}\ ++ \u{1e926}\ ++ \u{1e927}\ ++ \u{1e928}\ ++ \u{1e929}\ ++ \u{1e92a}\ ++ \u{1e92b}\ ++ \u{1e92c}\ ++ \u{1e92d}\ ++ \u{1e92e}\ ++ \u{1e92f}\ ++ \u{1e930}\ ++ \u{1e931}\ ++ \u{1e932}\ ++ \u{1e933}\ ++ \u{1e934}\ ++ \u{1e935}\ ++ \u{1e936}\ ++ \u{1e937}\ ++ \u{1e938}\ ++ \u{1e939}\ ++ \u{1e93a}\ ++ \u{1e93b}\ ++ \u{1e93c}\ ++ \u{1e93d}\ ++ \u{1e93e}\ ++ \u{1e93f}\ ++ \u{1e940}\ ++ \u{1e941}\ ++ \u{1e942}\ ++ \u{1e943}\ ++ \u{66e}\ ++ \u{6a1}\ ++ \u{66f}\ ++ \u{30}\ ++ \u{2c}\ ++ \u{31}\ ++ \u{2c}\ ++ \u{32}\ ++ \u{2c}\ ++ \u{33}\ ++ \u{2c}\ ++ \u{34}\ ++ \u{2c}\ ++ \u{35}\ ++ \u{2c}\ ++ \u{36}\ ++ \u{2c}\ ++ \u{37}\ ++ \u{2c}\ ++ \u{38}\ ++ \u{2c}\ ++ \u{39}\ ++ \u{2c}\ ++ \u{3014}\ ++ \u{73}\ ++ \u{3015}\ ++ \u{77}\ ++ \u{7a}\ ++ \u{68}\ ++ \u{76}\ ++ \u{73}\ ++ \u{64}\ ++ \u{70}\ ++ \u{70}\ ++ \u{76}\ ++ \u{77}\ ++ \u{63}\ ++ \u{6d}\ ++ \u{63}\ ++ \u{6d}\ ++ \u{64}\ ++ \u{64}\ ++ \u{6a}\ ++ \u{307b}\ ++ \u{304b}\ ++ \u{30b3}\ ++ \u{30b3}\ ++ \u{5b57}\ ++ \u{53cc}\ ++ \u{30c7}\ ++ \u{591a}\ ++ \u{89e3}\ ++ \u{4ea4}\ ++ \u{6620}\ ++ \u{7121}\ ++ \u{524d}\ ++ \u{5f8c}\ ++ \u{518d}\ ++ \u{65b0}\ ++ \u{521d}\ ++ \u{7d42}\ ++ \u{8ca9}\ ++ \u{58f0}\ ++ \u{5439}\ ++ \u{6f14}\ ++ \u{6295}\ ++ \u{6355}\ ++ \u{904a}\ ++ \u{6307}\ ++ \u{6253}\ ++ \u{7981}\ ++ \u{7a7a}\ ++ \u{5408}\ ++ \u{6e80}\ ++ \u{7533}\ ++ \u{5272}\ ++ \u{55b6}\ ++ \u{914d}\ ++ \u{3014}\ ++ \u{672c}\ ++ \u{3015}\ ++ \u{3014}\ ++ \u{4e09}\ ++ \u{3015}\ ++ \u{3014}\ ++ \u{4e8c}\ ++ \u{3015}\ ++ \u{3014}\ ++ \u{5b89}\ ++ \u{3015}\ ++ \u{3014}\ ++ \u{70b9}\ ++ \u{3015}\ ++ \u{3014}\ ++ \u{6253}\ ++ \u{3015}\ ++ \u{3014}\ ++ \u{76d7}\ ++ \u{3015}\ ++ \u{3014}\ ++ \u{52dd}\ ++ \u{3015}\ ++ \u{3014}\ ++ \u{6557}\ ++ \u{3015}\ ++ \u{5f97}\ ++ \u{53ef}\ ++ \u{4e3d}\ ++ \u{4e38}\ ++ \u{4e41}\ ++ \u{20122}\ ++ \u{4f60}\ ++ \u{4fbb}\ ++ \u{5002}\ ++ \u{507a}\ ++ \u{5099}\ ++ \u{50cf}\ ++ \u{349e}\ ++ \u{2063a}\ ++ \u{5154}\ ++ \u{5164}\ ++ \u{5177}\ ++ \u{2051c}\ ++ \u{34b9}\ ++ \u{5167}\ ++ \u{2054b}\ ++ \u{5197}\ ++ \u{51a4}\ ++ \u{4ecc}\ ++ \u{51ac}\ ++ \u{291df}\ ++ \u{5203}\ ++ \u{34df}\ ++ \u{523b}\ ++ \u{5246}\ ++ \u{5277}\ ++ \u{3515}\ ++ \u{5305}\ ++ \u{5306}\ ++ \u{5349}\ ++ \u{535a}\ ++ \u{5373}\ ++ \u{537d}\ ++ \u{537f}\ ++ \u{20a2c}\ ++ \u{7070}\ ++ \u{53ca}\ ++ \u{53df}\ ++ \u{20b63}\ ++ \u{53eb}\ ++ \u{53f1}\ ++ \u{5406}\ ++ \u{549e}\ ++ \u{5438}\ ++ \u{5448}\ ++ \u{5468}\ ++ \u{54a2}\ ++ \u{54f6}\ ++ \u{5510}\ ++ \u{5553}\ ++ \u{5563}\ ++ \u{5584}\ ++ \u{55ab}\ ++ \u{55b3}\ ++ \u{55c2}\ ++ \u{5716}\ ++ \u{5717}\ ++ \u{5651}\ ++ \u{5674}\ ++ \u{58ee}\ ++ \u{57ce}\ ++ \u{57f4}\ ++ \u{580d}\ ++ \u{578b}\ ++ \u{5832}\ ++ \u{5831}\ ++ \u{58ac}\ ++ \u{214e4}\ ++ \u{58f2}\ ++ \u{58f7}\ ++ \u{5906}\ ++ \u{5922}\ ++ \u{5962}\ ++ \u{216a8}\ ++ \u{216ea}\ ++ \u{59ec}\ ++ \u{5a1b}\ ++ \u{5a27}\ ++ \u{59d8}\ ++ \u{5a66}\ ++ \u{36ee}\ ++ \u{5b08}\ ++ \u{5b3e}\ ++ \u{219c8}\ ++ \u{5bc3}\ ++ \u{5bd8}\ ++ \u{5bf3}\ ++ \u{21b18}\ ++ \u{5bff}\ ++ \u{5c06}\ ++ \u{3781}\ ++ \u{5c60}\ ++ \u{5cc0}\ ++ \u{5c8d}\ ++ \u{21de4}\ ++ \u{5d43}\ ++ \u{21de6}\ ++ \u{5d6e}\ ++ \u{5d6b}\ ++ \u{5d7c}\ ++ \u{5de1}\ ++ \u{5de2}\ ++ \u{382f}\ ++ \u{5dfd}\ ++ \u{5e28}\ ++ \u{5e3d}\ ++ \u{5e69}\ ++ \u{3862}\ ++ \u{22183}\ ++ \u{387c}\ ++ \u{5eb0}\ ++ \u{5eb3}\ ++ \u{5eb6}\ ++ \u{2a392}\ ++ \u{22331}\ ++ \u{8201}\ ++ \u{5f22}\ ++ \u{38c7}\ ++ \u{232b8}\ ++ \u{261da}\ ++ \u{5f62}\ ++ \u{5f6b}\ ++ \u{38e3}\ ++ \u{5f9a}\ ++ \u{5fcd}\ ++ \u{5fd7}\ ++ \u{5ff9}\ ++ \u{6081}\ ++ \u{393a}\ ++ \u{391c}\ ++ \u{226d4}\ ++ \u{60c7}\ ++ \u{6148}\ ++ \u{614c}\ ++ \u{617a}\ ++ \u{61b2}\ ++ \u{61a4}\ ++ \u{61af}\ ++ \u{61de}\ ++ \u{6210}\ ++ \u{621b}\ ++ \u{625d}\ ++ \u{62b1}\ ++ \u{62d4}\ ++ \u{6350}\ ++ \u{22b0c}\ ++ \u{633d}\ ++ \u{62fc}\ ++ \u{6368}\ ++ \u{6383}\ ++ \u{63e4}\ ++ \u{22bf1}\ ++ \u{6422}\ ++ \u{63c5}\ ++ \u{63a9}\ ++ \u{3a2e}\ ++ \u{6469}\ ++ \u{647e}\ ++ \u{649d}\ ++ \u{6477}\ ++ \u{3a6c}\ ++ \u{656c}\ ++ \u{2300a}\ ++ \u{65e3}\ ++ \u{66f8}\ ++ \u{6649}\ ++ \u{3b19}\ ++ \u{3b08}\ ++ \u{3ae4}\ ++ \u{5192}\ ++ \u{5195}\ ++ \u{6700}\ ++ \u{669c}\ ++ \u{80ad}\ ++ \u{43d9}\ ++ \u{6721}\ ++ \u{675e}\ ++ \u{6753}\ ++ \u{233c3}\ ++ \u{3b49}\ ++ \u{67fa}\ ++ \u{6785}\ ++ \u{6852}\ ++ \u{2346d}\ ++ \u{688e}\ ++ \u{681f}\ ++ \u{6914}\ ++ \u{6942}\ ++ \u{69a3}\ ++ \u{69ea}\ ++ \u{6aa8}\ ++ \u{236a3}\ ++ \u{6adb}\ ++ \u{3c18}\ ++ \u{6b21}\ ++ \u{238a7}\ ++ \u{6b54}\ ++ \u{3c4e}\ ++ \u{6b72}\ ++ \u{6b9f}\ ++ \u{6bbb}\ ++ \u{23a8d}\ ++ \u{21d0b}\ ++ \u{23afa}\ ++ \u{6c4e}\ ++ \u{23cbc}\ ++ \u{6cbf}\ ++ \u{6ccd}\ ++ \u{6c67}\ ++ \u{6d16}\ ++ \u{6d3e}\ ++ \u{6d69}\ ++ \u{6d78}\ ++ \u{6d85}\ ++ \u{23d1e}\ ++ \u{6d34}\ ++ \u{6e2f}\ ++ \u{6e6e}\ ++ \u{3d33}\ ++ \u{6ec7}\ ++ \u{23ed1}\ ++ \u{6df9}\ ++ \u{6f6e}\ ++ \u{23f5e}\ ++ \u{23f8e}\ ++ \u{6fc6}\ ++ \u{7039}\ ++ \u{701b}\ ++ \u{3d96}\ ++ \u{704a}\ ++ \u{707d}\ ++ \u{7077}\ ++ \u{70ad}\ ++ \u{20525}\ ++ \u{7145}\ ++ \u{24263}\ ++ \u{719c}\ ++ \u{7228}\ ++ \u{7250}\ ++ \u{24608}\ ++ \u{7280}\ ++ \u{7295}\ ++ \u{24735}\ ++ \u{24814}\ ++ \u{737a}\ ++ \u{738b}\ ++ \u{3eac}\ ++ \u{73a5}\ ++ \u{3eb8}\ ++ \u{7447}\ ++ \u{745c}\ ++ \u{7485}\ ++ \u{74ca}\ ++ \u{3f1b}\ ++ \u{7524}\ ++ \u{24c36}\ ++ \u{753e}\ ++ \u{24c92}\ ++ \u{2219f}\ ++ \u{7610}\ ++ \u{24fa1}\ ++ \u{24fb8}\ ++ \u{25044}\ ++ \u{3ffc}\ ++ \u{4008}\ ++ \u{250f3}\ ++ \u{250f2}\ ++ \u{25119}\ ++ \u{25133}\ ++ \u{771e}\ ++ \u{771f}\ ++ \u{778b}\ ++ \u{4046}\ ++ \u{4096}\ ++ \u{2541d}\ ++ \u{784e}\ ++ \u{40e3}\ ++ \u{25626}\ ++ \u{2569a}\ ++ \u{256c5}\ ++ \u{79eb}\ ++ \u{412f}\ ++ \u{7a4a}\ ++ \u{7a4f}\ ++ \u{2597c}\ ++ \u{25aa7}\ ++ \u{4202}\ ++ \u{25bab}\ ++ \u{7bc6}\ ++ \u{7bc9}\ ++ \u{4227}\ ++ \u{25c80}\ ++ \u{7cd2}\ ++ \u{42a0}\ ++ \u{7ce8}\ ++ \u{7ce3}\ ++ \u{7d00}\ ++ \u{25f86}\ ++ \u{7d63}\ ++ \u{4301}\ ++ \u{7dc7}\ ++ \u{7e02}\ ++ \u{7e45}\ ++ \u{4334}\ ++ \u{26228}\ ++ \u{26247}\ ++ \u{4359}\ ++ \u{262d9}\ ++ \u{7f7a}\ ++ \u{2633e}\ ++ \u{7f95}\ ++ \u{7ffa}\ ++ \u{264da}\ ++ \u{26523}\ ++ \u{8060}\ ++ \u{265a8}\ ++ \u{8070}\ ++ \u{2335f}\ ++ \u{43d5}\ ++ \u{80b2}\ ++ \u{8103}\ ++ \u{440b}\ ++ \u{813e}\ ++ \u{5ab5}\ ++ \u{267a7}\ ++ \u{267b5}\ ++ \u{23393}\ ++ \u{2339c}\ ++ \u{8204}\ ++ \u{8f9e}\ ++ \u{446b}\ ++ \u{8291}\ ++ \u{828b}\ ++ \u{829d}\ ++ \u{52b3}\ ++ \u{82b1}\ ++ \u{82b3}\ ++ \u{82bd}\ ++ \u{82e6}\ ++ \u{26b3c}\ ++ \u{831d}\ ++ \u{8363}\ ++ \u{83ad}\ ++ \u{8323}\ ++ \u{83bd}\ ++ \u{83e7}\ ++ \u{8353}\ ++ \u{83ca}\ ++ \u{83cc}\ ++ \u{83dc}\ ++ \u{26c36}\ ++ \u{26d6b}\ ++ \u{26cd5}\ ++ \u{452b}\ ++ \u{84f1}\ ++ \u{84f3}\ ++ \u{8516}\ ++ \u{273ca}\ ++ \u{8564}\ ++ \u{26f2c}\ ++ \u{455d}\ ++ \u{4561}\ ++ \u{26fb1}\ ++ \u{270d2}\ ++ \u{456b}\ ++ \u{8650}\ ++ \u{8667}\ ++ \u{8669}\ ++ \u{86a9}\ ++ \u{8688}\ ++ \u{870e}\ ++ \u{86e2}\ ++ \u{8728}\ ++ \u{876b}\ ++ \u{8786}\ ++ \u{87e1}\ ++ \u{8801}\ ++ \u{45f9}\ ++ \u{8860}\ ++ \u{27667}\ ++ \u{88d7}\ ++ \u{88de}\ ++ \u{4635}\ ++ \u{88fa}\ ++ \u{34bb}\ ++ \u{278ae}\ ++ \u{27966}\ ++ \u{46be}\ ++ \u{46c7}\ ++ \u{8aa0}\ ++ \u{27ca8}\ ++ \u{8cab}\ ++ \u{8cc1}\ ++ \u{8d1b}\ ++ \u{8d77}\ ++ \u{27f2f}\ ++ \u{20804}\ ++ \u{8dcb}\ ++ \u{8dbc}\ ++ \u{8df0}\ ++ \u{208de}\ ++ \u{8ed4}\ ++ \u{285d2}\ ++ \u{285ed}\ ++ \u{9094}\ ++ \u{90f1}\ ++ \u{9111}\ ++ \u{2872e}\ ++ \u{911b}\ ++ \u{9238}\ ++ \u{92d7}\ ++ \u{92d8}\ ++ \u{927c}\ ++ \u{93f9}\ ++ \u{9415}\ ++ \u{28bfa}\ ++ \u{958b}\ ++ \u{4995}\ ++ \u{95b7}\ ++ \u{28d77}\ ++ \u{49e6}\ ++ \u{96c3}\ ++ \u{5db2}\ ++ \u{9723}\ ++ \u{29145}\ ++ \u{2921a}\ ++ \u{4a6e}\ ++ \u{4a76}\ ++ \u{97e0}\ ++ \u{2940a}\ ++ \u{4ab2}\ ++ \u{29496}\ ++ \u{9829}\ ++ \u{295b6}\ ++ \u{98e2}\ ++ \u{4b33}\ ++ \u{9929}\ ++ \u{99a7}\ ++ \u{99c2}\ ++ \u{99fe}\ ++ \u{4bce}\ ++ \u{29b30}\ ++ \u{9c40}\ ++ \u{9cfd}\ ++ \u{4cce}\ ++ \u{4ced}\ ++ \u{9d67}\ ++ \u{2a0ce}\ ++ \u{4cf8}\ ++ \u{2a105}\ ++ \u{2a20e}\ ++ \u{2a291}\ ++ \u{4d56}\ ++ \u{9efe}\ ++ \u{9f05}\ ++ \u{9f0f}\ ++ \u{9f16}\ ++ \u{2a600}"; diff --cc vendor/idna-0.1.4/tests/IdnaTest.txt index 000000000,000000000..123a1f061 new file mode 100644 --- /dev/null +++ b/vendor/idna-0.1.4/tests/IdnaTest.txt @@@ -1,0 -1,0 +1,7848 @@@ ++# IdnaTest.txt ++# Date: 2017-06-02, 14:19:52 GMT ++# © 2017 Unicode®, Inc. ++# Unicode and the Unicode Logo are registered trademarks of Unicode, Inc. in the U.S. and other countries. ++# For terms of use, see http://www.unicode.org/terms_of_use.html ++# ++# Contains test cases for verifying UTS46 conformance. For more information, ++# see http://www.unicode.org/reports/tr46/ ++# ++# FORMAT: ++# ++# This file is in UTF8, with certain characters escaped using the \uXXXX or \x{XXXX} ++# convention where they could otherwise have a confusing display. ++# These characters include: ++# ++# - General Categories C, Z, and M ++# - Default ignorable characters ++# - Bidi categories R, AL, AN ++# ++# Columns (c1, c2,...) are separated by semicolons. ++# Leading and trailing spaces and tabs in each column are ignored. ++# Comments are indicated with hash marks. ++# ++# Column 1: type - T for transitional, N for nontransitional, B for both ++# Column 2: source - The source string to be tested ++# Column 3: toUnicode - The result of applying toUnicode to the source, using nontransitional. ++# A blank value means the same as the source value; a value in [...] is a set of error codes. ++# Column 4: toASCII - The result of applying toASCII to the source, using the specified type: T, N, or B. ++# A blank value means the same as the toUnicode value; a value in [...] is a set of error codes. ++# Column 5: idna2008 - NV8 is only present if the status is valid but the character is excluded by IDNA2008 ++# from all domain names for all versions of Unicode. ++# XV8 is present when the character is excluded by IDNA2008 for the current version of Unicode. ++# These are informative values only. ++# ++# If the value of toUnicode is the same as source, the column will be blank. ++# The line comments currently show visible characters that have been escaped ++# (after removing default-ignorables and controls, except for whitespace) ++# ++# The test is performed with the following flag settings: ++# ++# VerifyDnsLength: true ++# CheckHyphens: true ++# CheckBidi: true ++# CheckJoiners: true ++# UseSTD3ASCIIRules: true ++# ++# An error in toUnicode or toASCII is indicated by a value in square brackets, such as "[B5 B6]". ++# In such a case, the contents is a list of error codes based on the step numbers in UTS46 and IDNA2008, ++# with the following formats: ++# ++# Pn for Section 4 Processing step n ++# Vn for 4.1 Validity Criteria step n ++# An for 4.2 ToASCII step n ++# Bn for Bidi (in IDNA2008) ++# Cn for ContextJ (in IDNA2008) ++# ++# However, these particular error codes are only informative; ++# the important feature is whether or not there is an error. ++# ++# CONFORMANCE: ++# ++# To test for conformance to UTS46, an implementation must first perform the toUnicode operation ++# on the source string, then the toASCII operation (with the indicated type) on the source string. ++# Implementations may be more strict than UTS46; thus they may have errors where the file indicates results. ++# In particular, an implementation conformant to IDNA2008 would disallow the input for lines marked with NV8. ++# ++# Moreover, the error codes in the file are informative; implementations need only record that there is an error: ++# they need not reproduce those codes. Thus to then verify conformance for the toASCII and toUnicode columns: ++# ++# - If the file indicates an error, the implementation must also have an error. ++# - If the file does not indicate an error, then the implementation must either have an error, ++# or must have a matching result. ++# ++# ==================================================================================================== ++B; fass.de; ; ++T; faß.de; ; fass.de ++N; faß.de; ; xn--fa-hia.de ++T; Faß.de; faß.de; fass.de ++N; Faß.de; faß.de; xn--fa-hia.de ++B; xn--fa-hia.de; faß.de; xn--fa-hia.de ++ ++# BIDI TESTS ++ ++B; à\u05D0; [B5 B6]; [B5 B6] # àא ++B; a\u0300\u05D0; [B5 B6]; [B5 B6] # àא ++B; A\u0300\u05D0; [B5 B6]; [B5 B6] # àא ++B; À\u05D0; [B5 B6]; [B5 B6] # àא ++B; xn--0ca24w; [B5 B6]; [B5 B6] # àא ++B; 0à.\u05D0; [B1]; [B1] # 0à.א ++B; 0a\u0300.\u05D0; [B1]; [B1] # 0à.א ++B; 0A\u0300.\u05D0; [B1]; [B1] # 0à.א ++B; 0À.\u05D0; [B1]; [B1] # 0à.א ++B; xn--0-sfa.xn--4db; [B1]; [B1] # 0à.א ++B; à.\u05D0\u0308; ; xn--0ca.xn--ssa73l # à.א̈ ++B; a\u0300.\u05D0\u0308; à.\u05D0\u0308; xn--0ca.xn--ssa73l # à.א̈ ++B; A\u0300.\u05D0\u0308; à.\u05D0\u0308; xn--0ca.xn--ssa73l # à.א̈ ++B; À.\u05D0\u0308; à.\u05D0\u0308; xn--0ca.xn--ssa73l # à.א̈ ++B; xn--0ca.xn--ssa73l; à.\u05D0\u0308; xn--0ca.xn--ssa73l # à.א̈ ++B; à.\u05D00\u0660\u05D0; [B4]; [B4] # à.א0٠א ++B; a\u0300.\u05D00\u0660\u05D0; [B4]; [B4] # à.א0٠א ++B; A\u0300.\u05D00\u0660\u05D0; [B4]; [B4] # à.א0٠א ++B; À.\u05D00\u0660\u05D0; [B4]; [B4] # à.א0٠א ++B; xn--0ca.xn--0-zhcb98c; [B4]; [B4] # à.א0٠א ++B; \u0308.\u05D0; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ̈.א ++B; xn--ssa.xn--4db; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ̈.א ++B; à.\u05D00\u0660; [B4]; [B4] # à.א0٠ ++B; a\u0300.\u05D00\u0660; [B4]; [B4] # à.א0٠ ++B; A\u0300.\u05D00\u0660; [B4]; [B4] # à.א0٠ ++B; À.\u05D00\u0660; [B4]; [B4] # à.א0٠ ++B; xn--0ca.xn--0-zhc74b; [B4]; [B4] # à.א0٠ ++B; àˇ.\u05D0; [B6]; [B6] # àˇ.א ++B; a\u0300ˇ.\u05D0; [B6]; [B6] # àˇ.א ++B; A\u0300ˇ.\u05D0; [B6]; [B6] # àˇ.א ++B; Àˇ.\u05D0; [B6]; [B6] # àˇ.א ++B; xn--0ca88g.xn--4db; [B6]; [B6] # àˇ.א ++B; à\u0308.\u05D0; ; xn--0ca81i.xn--4db # à̈.א ++B; a\u0300\u0308.\u05D0; à\u0308.\u05D0; xn--0ca81i.xn--4db # à̈.א ++B; A\u0300\u0308.\u05D0; à\u0308.\u05D0; xn--0ca81i.xn--4db # à̈.א ++B; À\u0308.\u05D0; à\u0308.\u05D0; xn--0ca81i.xn--4db # à̈.א ++B; xn--0ca81i.xn--4db; à\u0308.\u05D0; xn--0ca81i.xn--4db # à̈.א ++ ++# CONTEXT TESTS ++ ++T; a\u200Cb; [C1]; ab # ab ++N; a\u200Cb; [C1]; [C1] # ab ++T; A\u200CB; [C1]; ab # ab ++N; A\u200CB; [C1]; [C1] # ab ++T; A\u200Cb; [C1]; ab # ab ++N; A\u200Cb; [C1]; [C1] # ab ++B; ab; ; ++B; xn--ab-j1t; [C1]; [C1] # ab ++T; a\u094D\u200Cb; ; xn--ab-fsf # a्b ++N; a\u094D\u200Cb; ; xn--ab-fsf604u # a्b ++T; A\u094D\u200CB; a\u094D\u200Cb; xn--ab-fsf # a्b ++N; A\u094D\u200CB; a\u094D\u200Cb; xn--ab-fsf604u # a्b ++T; A\u094D\u200Cb; a\u094D\u200Cb; xn--ab-fsf # a्b ++N; A\u094D\u200Cb; a\u094D\u200Cb; xn--ab-fsf604u # a्b ++B; xn--ab-fsf; a\u094Db; xn--ab-fsf # a्b ++B; a\u094Db; ; xn--ab-fsf # a्b ++B; A\u094DB; a\u094Db; xn--ab-fsf # a्b ++B; A\u094Db; a\u094Db; xn--ab-fsf # a्b ++B; xn--ab-fsf604u; a\u094D\u200Cb; xn--ab-fsf604u # a्b ++T; \u0308\u200C\u0308\u0628b; [B1 C1 V5]; [B1 V5] # ̈̈بb ++N; \u0308\u200C\u0308\u0628b; [B1 C1 V5]; [B1 C1 V5] # ̈̈بb ++T; \u0308\u200C\u0308\u0628B; [B1 C1 V5]; [B1 V5] # ̈̈بb ++N; \u0308\u200C\u0308\u0628B; [B1 C1 V5]; [B1 C1 V5] # ̈̈بb ++B; xn--b-bcba413a; [B1 V5]; [B1 V5] # ̈̈بb ++B; xn--b-bcba413a2w8b; [B1 C1 V5]; [B1 C1 V5] # ̈̈بb ++T; a\u0628\u0308\u200C\u0308; [B5 B6 C1]; [B5 B6] # aب̈̈ ++N; a\u0628\u0308\u200C\u0308; [B5 B6 C1]; [B5 B6 C1] # aب̈̈ ++T; A\u0628\u0308\u200C\u0308; [B5 B6 C1]; [B5 B6] # aب̈̈ ++N; A\u0628\u0308\u200C\u0308; [B5 B6 C1]; [B5 B6 C1] # aب̈̈ ++B; xn--a-ccba213a; [B5 B6]; [B5 B6] # aب̈̈ ++B; xn--a-ccba213a5w8b; [B5 B6 C1]; [B5 B6 C1] # aب̈̈ ++T; a\u0628\u0308\u200C\u0308\u0628b; [B5]; [B5] # aب̈̈بb ++N; a\u0628\u0308\u200C\u0308\u0628b; [B5]; [B5] # aب̈̈بb ++T; A\u0628\u0308\u200C\u0308\u0628B; [B5]; [B5] # aب̈̈بb ++N; A\u0628\u0308\u200C\u0308\u0628B; [B5]; [B5] # aب̈̈بb ++T; A\u0628\u0308\u200C\u0308\u0628b; [B5]; [B5] # aب̈̈بb ++N; A\u0628\u0308\u200C\u0308\u0628b; [B5]; [B5] # aب̈̈بb ++B; xn--ab-uuba211bca; [B5]; [B5] # aب̈̈بb ++B; xn--ab-uuba211bca8057b; [B5]; [B5] # aب̈̈بb ++T; a\u200Db; [C2]; ab # ab ++N; a\u200Db; [C2]; [C2] # ab ++T; A\u200DB; [C2]; ab # ab ++N; A\u200DB; [C2]; [C2] # ab ++T; A\u200Db; [C2]; ab # ab ++N; A\u200Db; [C2]; [C2] # ab ++B; xn--ab-m1t; [C2]; [C2] # ab ++T; a\u094D\u200Db; ; xn--ab-fsf # a्b ++N; a\u094D\u200Db; ; xn--ab-fsf014u # a्b ++T; A\u094D\u200DB; a\u094D\u200Db; xn--ab-fsf # a्b ++N; A\u094D\u200DB; a\u094D\u200Db; xn--ab-fsf014u # a्b ++T; A\u094D\u200Db; a\u094D\u200Db; xn--ab-fsf # a्b ++N; A\u094D\u200Db; a\u094D\u200Db; xn--ab-fsf014u # a्b ++B; xn--ab-fsf014u; a\u094D\u200Db; xn--ab-fsf014u # a्b ++T; \u0308\u200D\u0308\u0628b; [B1 C2 V5]; [B1 V5] # ̈̈بb ++N; \u0308\u200D\u0308\u0628b; [B1 C2 V5]; [B1 C2 V5] # ̈̈بb ++T; \u0308\u200D\u0308\u0628B; [B1 C2 V5]; [B1 V5] # ̈̈بb ++N; \u0308\u200D\u0308\u0628B; [B1 C2 V5]; [B1 C2 V5] # ̈̈بb ++B; xn--b-bcba413a7w8b; [B1 C2 V5]; [B1 C2 V5] # ̈̈بb ++T; a\u0628\u0308\u200D\u0308; [B5 B6 C2]; [B5 B6] # aب̈̈ ++N; a\u0628\u0308\u200D\u0308; [B5 B6 C2]; [B5 B6 C2] # aب̈̈ ++T; A\u0628\u0308\u200D\u0308; [B5 B6 C2]; [B5 B6] # aب̈̈ ++N; A\u0628\u0308\u200D\u0308; [B5 B6 C2]; [B5 B6 C2] # aب̈̈ ++B; xn--a-ccba213abx8b; [B5 B6 C2]; [B5 B6 C2] # aب̈̈ ++T; a\u0628\u0308\u200D\u0308\u0628b; [B5 C2]; [B5] # aب̈̈بb ++N; a\u0628\u0308\u200D\u0308\u0628b; [B5 C2]; [B5 C2] # aب̈̈بb ++T; A\u0628\u0308\u200D\u0308\u0628B; [B5 C2]; [B5] # aب̈̈بb ++N; A\u0628\u0308\u200D\u0308\u0628B; [B5 C2]; [B5 C2] # aب̈̈بb ++T; A\u0628\u0308\u200D\u0308\u0628b; [B5 C2]; [B5] # aب̈̈بb ++N; A\u0628\u0308\u200D\u0308\u0628b; [B5 C2]; [B5 C2] # aب̈̈بb ++B; xn--ab-uuba211bca5157b; [B5 C2]; [B5 C2] # aب̈̈بb ++ ++# SELECTED TESTS ++ ++B; ¡; ; xn--7a; NV8 ++B; xn--7a; ¡; xn--7a; NV8 ++B; ᧚; ; xn--pkf; XV8 ++B; xn--pkf; ᧚; xn--pkf; XV8 ++B; 。; [A4_2]; [A4_2] ++B; .; [A4_2]; [A4_2] ++B; ꭠ; ; xn--3y9a ++B; xn--3y9a; ꭠ; xn--3y9a ++B; 1234567890ä1234567890123456789012345678901234567890123456; ; [A4_2] ++B; 1234567890a\u03081234567890123456789012345678901234567890123456; 1234567890ä1234567890123456789012345678901234567890123456; [A4_2] ++B; 1234567890A\u03081234567890123456789012345678901234567890123456; 1234567890ä1234567890123456789012345678901234567890123456; [A4_2] ++B; 1234567890Ä1234567890123456789012345678901234567890123456; 1234567890ä1234567890123456789012345678901234567890123456; [A4_2] ++B; xn--12345678901234567890123456789012345678901234567890123456-fxe; 1234567890ä1234567890123456789012345678901234567890123456; [A4_2] ++B; www.eXample.cOm; www.example.com; ++B; Bücher.de; bücher.de; xn--bcher-kva.de ++B; Bu\u0308cher.de; bücher.de; xn--bcher-kva.de ++B; bu\u0308cher.de; bücher.de; xn--bcher-kva.de ++B; bücher.de; ; xn--bcher-kva.de ++B; BÜCHER.DE; bücher.de; xn--bcher-kva.de ++B; BU\u0308CHER.DE; bücher.de; xn--bcher-kva.de ++B; xn--bcher-kva.de; bücher.de; xn--bcher-kva.de ++B; ÖBB; öbb; xn--bb-eka ++B; O\u0308BB; öbb; xn--bb-eka ++B; o\u0308bb; öbb; xn--bb-eka ++B; öbb; ; xn--bb-eka ++B; Öbb; öbb; xn--bb-eka ++B; O\u0308bb; öbb; xn--bb-eka ++B; xn--bb-eka; öbb; xn--bb-eka ++T; βόλος.com; ; xn--nxasmq6b.com ++N; βόλος.com; ; xn--nxasmm1c.com ++T; βο\u0301λος.com; βόλος.com; xn--nxasmq6b.com ++N; βο\u0301λος.com; βόλος.com; xn--nxasmm1c.com ++B; ΒΟ\u0301ΛΟΣ.COM; βόλοσ.com; xn--nxasmq6b.com ++B; ΒΌΛΟΣ.COM; βόλοσ.com; xn--nxasmq6b.com ++B; βόλοσ.com; ; xn--nxasmq6b.com ++B; βο\u0301λοσ.com; βόλοσ.com; xn--nxasmq6b.com ++B; Βο\u0301λοσ.com; βόλοσ.com; xn--nxasmq6b.com ++B; Βόλοσ.com; βόλοσ.com; xn--nxasmq6b.com ++B; xn--nxasmq6b.com; βόλοσ.com; xn--nxasmq6b.com ++T; Βο\u0301λος.com; βόλος.com; xn--nxasmq6b.com ++N; Βο\u0301λος.com; βόλος.com; xn--nxasmm1c.com ++T; Βόλος.com; βόλος.com; xn--nxasmq6b.com ++N; Βόλος.com; βόλος.com; xn--nxasmm1c.com ++B; xn--nxasmm1c.com; βόλος.com; xn--nxasmm1c.com ++B; xn--nxasmm1c; βόλος; xn--nxasmm1c ++T; βόλος; ; xn--nxasmq6b ++N; βόλος; ; xn--nxasmm1c ++T; βο\u0301λος; βόλος; xn--nxasmq6b ++N; βο\u0301λος; βόλος; xn--nxasmm1c ++B; ΒΟ\u0301ΛΟΣ; βόλοσ; xn--nxasmq6b ++B; ΒΌΛΟΣ; βόλοσ; xn--nxasmq6b ++B; βόλοσ; ; xn--nxasmq6b ++B; βο\u0301λοσ; βόλοσ; xn--nxasmq6b ++B; Βο\u0301λοσ; βόλοσ; xn--nxasmq6b ++B; Βόλοσ; βόλοσ; xn--nxasmq6b ++B; xn--nxasmq6b; βόλοσ; xn--nxasmq6b ++T; Βόλος; βόλος; xn--nxasmq6b ++N; Βόλος; βόλος; xn--nxasmm1c ++T; Βο\u0301λος; βόλος; xn--nxasmq6b ++N; Βο\u0301λος; βόλος; xn--nxasmm1c ++T; www.ශ\u0DCA\u200Dර\u0DD3.com; ; www.xn--10cl1a0b.com # www.ශ්රී.com ++N; www.ශ\u0DCA\u200Dර\u0DD3.com; ; www.xn--10cl1a0b660p.com # www.ශ්රී.com ++T; WWW.ශ\u0DCA\u200Dර\u0DD3.COM; www.ශ\u0DCA\u200Dර\u0DD3.com; www.xn--10cl1a0b.com # www.ශ්රී.com ++N; WWW.ශ\u0DCA\u200Dර\u0DD3.COM; www.ශ\u0DCA\u200Dර\u0DD3.com; www.xn--10cl1a0b660p.com # www.ශ්රී.com ++T; Www.ශ\u0DCA\u200Dර\u0DD3.com; www.ශ\u0DCA\u200Dර\u0DD3.com; www.xn--10cl1a0b.com # www.ශ්රී.com ++N; Www.ශ\u0DCA\u200Dර\u0DD3.com; www.ශ\u0DCA\u200Dර\u0DD3.com; www.xn--10cl1a0b660p.com # www.ශ්රී.com ++B; www.xn--10cl1a0b.com; www.ශ\u0DCAර\u0DD3.com; www.xn--10cl1a0b.com # www.ශ්රී.com ++B; www.ශ\u0DCAර\u0DD3.com; ; www.xn--10cl1a0b.com # www.ශ්රී.com ++B; WWW.ශ\u0DCAර\u0DD3.COM; www.ශ\u0DCAර\u0DD3.com; www.xn--10cl1a0b.com # www.ශ්රී.com ++B; Www.ශ\u0DCAර\u0DD3.com; www.ශ\u0DCAර\u0DD3.com; www.xn--10cl1a0b.com # www.ශ්රී.com ++B; www.xn--10cl1a0b660p.com; www.ශ\u0DCA\u200Dර\u0DD3.com; www.xn--10cl1a0b660p.com # www.ශ්රී.com ++T; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC; ; xn--mgba3gch31f # نامهای ++N; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC; ; xn--mgba3gch31f060k # نامهای ++B; xn--mgba3gch31f; \u0646\u0627\u0645\u0647\u0627\u06CC; xn--mgba3gch31f # نامهای ++B; \u0646\u0627\u0645\u0647\u0627\u06CC; ; xn--mgba3gch31f # نامهای ++B; xn--mgba3gch31f060k; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC; xn--mgba3gch31f060k # نامهای ++B; xn--mgba3gch31f060k.com; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; xn--mgba3gch31f060k.com # نامهای.com ++T; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; ; xn--mgba3gch31f.com # نامهای.com ++N; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; ; xn--mgba3gch31f060k.com # نامهای.com ++T; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.COM; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; xn--mgba3gch31f.com # نامهای.com ++N; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.COM; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; xn--mgba3gch31f060k.com # نامهای.com ++T; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.Com; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; xn--mgba3gch31f.com # نامهای.com ++N; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.Com; \u0646\u0627\u0645\u0647\u200C\u0627\u06CC.com; xn--mgba3gch31f060k.com # نامهای.com ++B; xn--mgba3gch31f.com; \u0646\u0627\u0645\u0647\u0627\u06CC.com; xn--mgba3gch31f.com # نامهای.com ++B; \u0646\u0627\u0645\u0647\u0627\u06CC.com; ; xn--mgba3gch31f.com # نامهای.com ++B; \u0646\u0627\u0645\u0647\u0627\u06CC.COM; \u0646\u0627\u0645\u0647\u0627\u06CC.com; xn--mgba3gch31f.com # نامهای.com ++B; \u0646\u0627\u0645\u0647\u0627\u06CC.Com; \u0646\u0627\u0645\u0647\u0627\u06CC.com; xn--mgba3gch31f.com # نامهای.com ++B; a.b.c。d。; a.b.c.d.; ++B; a.b.c。d。; a.b.c.d.; ++B; A.B.C。D。; a.b.c.d.; ++B; A.b.c。D。; a.b.c.d.; ++B; a.b.c.d.; ; ++B; A.B.C。D。; a.b.c.d.; ++B; A.b.c。D。; a.b.c.d.; ++B; U\u0308.xn--tda; ü.ü; xn--tda.xn--tda ++B; Ü.xn--tda; ü.ü; xn--tda.xn--tda ++B; ü.xn--tda; ü.ü; xn--tda.xn--tda ++B; u\u0308.xn--tda; ü.ü; xn--tda.xn--tda ++B; U\u0308.XN--TDA; ü.ü; xn--tda.xn--tda ++B; Ü.XN--TDA; ü.ü; xn--tda.xn--tda ++B; Ü.xn--Tda; ü.ü; xn--tda.xn--tda ++B; U\u0308.xn--Tda; ü.ü; xn--tda.xn--tda ++B; xn--tda.xn--tda; ü.ü; xn--tda.xn--tda ++B; ü.ü; ; xn--tda.xn--tda ++B; u\u0308.u\u0308; ü.ü; xn--tda.xn--tda ++B; U\u0308.U\u0308; ü.ü; xn--tda.xn--tda ++B; Ü.Ü; ü.ü; xn--tda.xn--tda ++B; Ü.ü; ü.ü; xn--tda.xn--tda ++B; U\u0308.u\u0308; ü.ü; xn--tda.xn--tda ++B; xn--u-ccb; [V1]; [V1] # ü ++B; a⒈com; [P1 V6]; [P1 V6] ++B; a1.com; ; ++B; A⒈COM; [P1 V6]; [P1 V6] ++B; A⒈Com; [P1 V6]; [P1 V6] ++B; xn--acom-0w1b; [V6]; [V6] ++B; xn--a-ecp.ru; [V6]; [V6] ++B; xn--0.pt; [A3]; [A3] ++B; xn--a.pt; [V6]; [V6] # .pt ++B; xn--a-Ä.pt; [A3]; [A3] ++B; xn--a-A\u0308.pt; [A3]; [A3] ++B; xn--a-a\u0308.pt; [A3]; [A3] ++B; xn--a-ä.pt; [A3]; [A3] ++B; XN--A-Ä.PT; [A3]; [A3] ++B; XN--A-A\u0308.PT; [A3]; [A3] ++B; Xn--A-A\u0308.pt; [A3]; [A3] ++B; Xn--A-Ä.pt; [A3]; [A3] ++B; xn--xn--a--gua.pt; [V2]; [V2] ++B; 日本語。JP; 日本語.jp; xn--wgv71a119e.jp ++B; 日本語。JP; 日本語.jp; xn--wgv71a119e.jp ++B; 日本語。jp; 日本語.jp; xn--wgv71a119e.jp ++B; 日本語。Jp; 日本語.jp; xn--wgv71a119e.jp ++B; xn--wgv71a119e.jp; 日本語.jp; xn--wgv71a119e.jp ++B; 日本語.jp; ; xn--wgv71a119e.jp ++B; 日本語.JP; 日本語.jp; xn--wgv71a119e.jp ++B; 日本語.Jp; 日本語.jp; xn--wgv71a119e.jp ++B; 日本語。jp; 日本語.jp; xn--wgv71a119e.jp ++B; 日本語。Jp; 日本語.jp; xn--wgv71a119e.jp ++B; ☕; ; xn--53h; NV8 ++B; xn--53h; ☕; xn--53h; NV8 ++T; 1.aß\u200C\u200Db\u200C\u200Dcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß\u0302ßz; [C1 C2]; [A4_2] # 1.aßbcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß̂ßz ++N; 1.aß\u200C\u200Db\u200C\u200Dcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß\u0302ßz; [C1 C2]; [C1 C2 A4_2] # 1.aßbcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß̂ßz ++T; 1.ASS\u200C\u200DB\u200C\u200DCSSSSSSSSDΣΣSSSSSSSSSSSSSSSSESSSSSSSSSSSSSSSSSSSSXSSSSSSSSSSSSSSSSSSSSYSSSSSSSSSSSSSSSS\u0302SSZ; [C1 C2]; [A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz ++N; 1.ASS\u200C\u200DB\u200C\u200DCSSSSSSSSDΣΣSSSSSSSSSSSSSSSSESSSSSSSSSSSSSSSSSSSSXSSSSSSSSSSSSSSSSSSSSYSSSSSSSSSSSSSSSS\u0302SSZ; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz ++T; 1.ASS\u200C\u200DB\u200C\u200DCSSSSSSSSDΣΣSSSSSSSSSSSSSSSSESSSSSSSSSSSSSSSSSSSSXSSSSSSSSSSSSSSSSSSSSYSSSSSSSSSSSSSSSŜSSZ; [C1 C2]; [A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz ++N; 1.ASS\u200C\u200DB\u200C\u200DCSSSSSSSSDΣΣSSSSSSSSSSSSSSSSESSSSSSSSSSSSSSSSSSSSXSSSSSSSSSSSSSSSSSSSSYSSSSSSSSSSSSSSSŜSSZ; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz ++T; 1.ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [C1 C2]; [A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz ++N; 1.ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz ++T; 1.ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssyssssssssssssssss\u0302ssz; [C1 C2]; [A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz ++N; 1.ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssyssssssssssssssss\u0302ssz; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz ++T; 1.Ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssyssssssssssssssss\u0302ssz; [C1 C2]; [A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz ++N; 1.Ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssyssssssssssssssss\u0302ssz; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz ++T; 1.Ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [C1 C2]; [A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz ++N; 1.Ass\u200C\u200Db\u200C\u200Dcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz ++B; 1.xn--assbcssssssssdssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssssz-pxq1419aa; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [A4_2] ++B; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; ; [A4_2] ++B; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssyssssssssssssssss\u0302ssz; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [A4_2] ++B; 1.ASSBCSSSSSSSSDΣΣSSSSSSSSSSSSSSSSESSSSSSSSSSSSSSSSSSSSXSSSSSSSSSSSSSSSSSSSSYSSSSSSSSSSSSSSSS\u0302SSZ; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [A4_2] ++B; 1.ASSBCSSSSSSSSDΣΣSSSSSSSSSSSSSSSSESSSSSSSSSSSSSSSSSSSSXSSSSSSSSSSSSSSSSSSSSYSSSSSSSSSSSSSSSŜSSZ; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [A4_2] ++B; 1.Assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [A4_2] ++B; 1.Assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssyssssssssssssssss\u0302ssz; 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz; [A4_2] ++B; 1.xn--assbcssssssssdssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssssz-pxq1419aa69989dba9gc; [C1 C2]; [C1 C2 A4_2] # 1.assbcssssssssdσσssssssssssssssssessssssssssssssssssssxssssssssssssssssssssysssssssssssssssŝssz ++T; 1.Aß\u200C\u200Db\u200C\u200Dcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß\u0302ßz; [C1 C2]; [A4_2] # 1.aßbcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß̂ßz ++N; 1.Aß\u200C\u200Db\u200C\u200Dcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß\u0302ßz; [C1 C2]; [C1 C2 A4_2] # 1.aßbcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß̂ßz ++B; 1.xn--abcdexyz-qyacaaabaaaaaaabaaaaaaaaabaaaaaaaaabaaaaaaaa010ze2isb1140zba8cc; [C1 C2]; [C1 C2 A4_2] # 1.aßbcßßßßdςσßßßßßßßßeßßßßßßßßßßxßßßßßßßßßßyßßßßßßßß̂ßz ++T; \u200Cx\u200Dn\u200C-\u200D-bß; [C1 C2]; xn--bss # xn--bß ++N; \u200Cx\u200Dn\u200C-\u200D-bß; [C1 C2]; [C1 C2] # xn--bß ++T; \u200CX\u200DN\u200C-\u200D-BSS; [C1 C2]; xn--bss # xn--bss ++N; \u200CX\u200DN\u200C-\u200D-BSS; [C1 C2]; [C1 C2] # xn--bss ++T; \u200Cx\u200Dn\u200C-\u200D-bss; [C1 C2]; xn--bss # xn--bss ++N; \u200Cx\u200Dn\u200C-\u200D-bss; [C1 C2]; [C1 C2] # xn--bss ++T; \u200CX\u200Dn\u200C-\u200D-Bss; [C1 C2]; xn--bss # xn--bss ++N; \u200CX\u200Dn\u200C-\u200D-Bss; [C1 C2]; [C1 C2] # xn--bss ++B; xn--bss; 夙; xn--bss ++B; 夙; ; xn--bss ++B; xn--xn--bss-7z6ccid; [C1 C2]; [C1 C2] # xn--bss ++T; \u200CX\u200Dn\u200C-\u200D-Bß; [C1 C2]; xn--bss # xn--bß ++N; \u200CX\u200Dn\u200C-\u200D-Bß; [C1 C2]; [C1 C2] # xn--bß ++B; xn--xn--b-pqa5796ccahd; [C1 C2]; [C1 C2] # xn--bß ++B; ˣ\u034Fℕ\u200B﹣\u00AD-\u180Cℬ\uFE00ſ\u2064𝔰󠇯ffl; 夡夞夜夙; xn--bssffl ++B; x\u034FN\u200B-\u00AD-\u180CB\uFE00s\u2064s󠇯ffl; 夡夞夜夙; xn--bssffl ++B; x\u034Fn\u200B-\u00AD-\u180Cb\uFE00s\u2064s󠇯ffl; 夡夞夜夙; xn--bssffl ++B; X\u034FN\u200B-\u00AD-\u180CB\uFE00S\u2064S󠇯FFL; 夡夞夜夙; xn--bssffl ++B; X\u034Fn\u200B-\u00AD-\u180CB\uFE00s\u2064s󠇯ffl; 夡夞夜夙; xn--bssffl ++B; xn--bssffl; 夡夞夜夙; xn--bssffl ++B; 夡夞夜夙; ; xn--bssffl ++B; ˣ\u034Fℕ\u200B﹣\u00AD-\u180Cℬ\uFE00S\u2064𝔰󠇯FFL; 夡夞夜夙; xn--bssffl ++B; x\u034FN\u200B-\u00AD-\u180CB\uFE00S\u2064s󠇯FFL; 夡夞夜夙; xn--bssffl ++B; ˣ\u034Fℕ\u200B﹣\u00AD-\u180Cℬ\uFE00s\u2064𝔰󠇯ffl; 夡夞夜夙; xn--bssffl ++B; 123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; ; ++B; 123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; ; ++B; 123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; ; [A4_1] ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901234.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; ; [A4_2] ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901234.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; ; [A4_2] ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901234.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; ; [A4_1 A4_2] ++B; ä1234567890123456789012345678901234567890123456789012345; ; xn--1234567890123456789012345678901234567890123456789012345-9te ++B; a\u03081234567890123456789012345678901234567890123456789012345; ä1234567890123456789012345678901234567890123456789012345; xn--1234567890123456789012345678901234567890123456789012345-9te ++B; A\u03081234567890123456789012345678901234567890123456789012345; ä1234567890123456789012345678901234567890123456789012345; xn--1234567890123456789012345678901234567890123456789012345-9te ++B; Ä1234567890123456789012345678901234567890123456789012345; ä1234567890123456789012345678901234567890123456789012345; xn--1234567890123456789012345678901234567890123456789012345-9te ++B; xn--1234567890123456789012345678901234567890123456789012345-9te; ä1234567890123456789012345678901234567890123456789012345; xn--1234567890123456789012345678901234567890123456789012345-9te ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; ; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901 ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890a\u0308123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901 ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890A\u0308123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901 ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890Ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901 ++B; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901 ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; ; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901. ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890a\u0308123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901. ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890A\u0308123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901. ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890Ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901. ++B; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901.; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901. ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; ; [A4_1] ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890a\u0308123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; [A4_1] ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890A\u0308123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; [A4_1] ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890Ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; [A4_1] ++B; 123456789012345678901234567890123456789012345678901234567890123.xn--1234567890123456789012345678901234567890123456789012345-kue.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä123456789012345678901234567890123456789012345.123456789012345678901234567890123456789012345678901234567890123.12345678901234567890123456789012345678901234567890123456789012; [A4_1] ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; ; [A4_2] ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890a\u03081234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; [A4_2] ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890A\u03081234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; [A4_2] ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890Ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; [A4_2] ++B; 123456789012345678901234567890123456789012345678901234567890123.xn--12345678901234567890123456789012345678901234567890123456-fxe.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890; [A4_2] ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; ; [A4_2] ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890a\u03081234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; [A4_2] ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890A\u03081234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; [A4_2] ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890Ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; [A4_2] ++B; 123456789012345678901234567890123456789012345678901234567890123.xn--12345678901234567890123456789012345678901234567890123456-fxe.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.123456789012345678901234567890123456789012345678901234567890.; [A4_2] ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; ; [A4_1 A4_2] ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890a\u03081234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; [A4_1 A4_2] ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890A\u03081234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; [A4_1 A4_2] ++B; 123456789012345678901234567890123456789012345678901234567890123.1234567890Ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; [A4_1 A4_2] ++B; 123456789012345678901234567890123456789012345678901234567890123.xn--12345678901234567890123456789012345678901234567890123456-fxe.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; 123456789012345678901234567890123456789012345678901234567890123.1234567890ä1234567890123456789012345678901234567890123456.123456789012345678901234567890123456789012345678901234567890123.1234567890123456789012345678901234567890123456789012345678901; [A4_1 A4_2] ++B; a.b..-q--a-.e; [V2 V3 A4_2]; [V2 V3 A4_2] ++B; a.b..-q--ä-.e; [V2 V3 A4_2]; [V2 V3 A4_2] ++B; a.b..-q--a\u0308-.e; [V2 V3 A4_2]; [V2 V3 A4_2] ++B; A.B..-Q--A\u0308-.E; [V2 V3 A4_2]; [V2 V3 A4_2] ++B; A.B..-Q--Ä-.E; [V2 V3 A4_2]; [V2 V3 A4_2] ++B; A.b..-Q--Ä-.E; [V2 V3 A4_2]; [V2 V3 A4_2] ++B; A.b..-Q--A\u0308-.E; [V2 V3 A4_2]; [V2 V3 A4_2] ++B; a.b..xn---q----jra.e; [V2 V3 A4_2]; [V2 V3 A4_2] ++B; a..c; [A4_2]; [A4_2] ++B; a.-b.; [V3]; [V3] ++B; a.b-.c; [V3]; [V3] ++B; a.-.c; [V3]; [V3] ++B; a.bc--de.f; [V2]; [V2] ++B; ä.\u00AD.c; [A4_2]; [A4_2] ++B; a\u0308.\u00AD.c; [A4_2]; [A4_2] ++B; A\u0308.\u00AD.C; [A4_2]; [A4_2] ++B; Ä.\u00AD.C; [A4_2]; [A4_2] ++B; xn--4ca..c; [A4_2]; [A4_2] ++B; ä.-b.; [V3]; [V3] ++B; a\u0308.-b.; [V3]; [V3] ++B; A\u0308.-B.; [V3]; [V3] ++B; Ä.-B.; [V3]; [V3] ++B; xn--4ca.-b.; [V3]; [V3] ++B; ä.b-.c; [V3]; [V3] ++B; a\u0308.b-.c; [V3]; [V3] ++B; A\u0308.B-.C; [V3]; [V3] ++B; Ä.B-.C; [V3]; [V3] ++B; Ä.b-.C; [V3]; [V3] ++B; A\u0308.b-.C; [V3]; [V3] ++B; xn--4ca.b-.c; [V3]; [V3] ++B; ä.-.c; [V3]; [V3] ++B; a\u0308.-.c; [V3]; [V3] ++B; A\u0308.-.C; [V3]; [V3] ++B; Ä.-.C; [V3]; [V3] ++B; xn--4ca.-.c; [V3]; [V3] ++B; ä.bc--de.f; [V2]; [V2] ++B; a\u0308.bc--de.f; [V2]; [V2] ++B; A\u0308.BC--DE.F; [V2]; [V2] ++B; Ä.BC--DE.F; [V2]; [V2] ++B; Ä.bc--De.f; [V2]; [V2] ++B; A\u0308.bc--De.f; [V2]; [V2] ++B; xn--4ca.bc--de.f; [V2]; [V2] ++B; a.b.\u0308c.d; [V5]; [V5] # a.b.̈c.d ++B; A.B.\u0308C.D; [V5]; [V5] # a.b.̈c.d ++B; A.b.\u0308c.d; [V5]; [V5] # a.b.̈c.d ++B; a.b.xn--c-bcb.d; [V5]; [V5] # a.b.̈c.d ++B; A0; a0; ++B; 0A; 0a; ++B; 0A.\u05D0; [B1]; [B1] # 0a.א ++B; 0a.\u05D0; [B1]; [B1] # 0a.א ++B; 0a.xn--4db; [B1]; [B1] # 0a.א ++B; c.xn--0-eha.xn--4db; [B1]; [B1] # c.0ü.א ++B; b-.\u05D0; [B6 V3]; [B6 V3] # b-.א ++B; B-.\u05D0; [B6 V3]; [B6 V3] # b-.א ++B; b-.xn--4db; [B6 V3]; [B6 V3] # b-.א ++B; d.xn----dha.xn--4db; [B6 V3]; [B6 V3] # d.ü-.א ++B; a\u05D0; [B5 B6]; [B5 B6] # aא ++B; A\u05D0; [B5 B6]; [B5 B6] # aא ++B; xn--a-0hc; [B5 B6]; [B5 B6] # aא ++B; \u05D0\u05C7; ; xn--vdbr # אׇ ++B; xn--vdbr; \u05D0\u05C7; xn--vdbr # אׇ ++B; \u05D09\u05C7; ; xn--9-ihcz # א9ׇ ++B; xn--9-ihcz; \u05D09\u05C7; xn--9-ihcz # א9ׇ ++B; \u05D0a\u05C7; [B2 B3]; [B2 B3] # אaׇ ++B; \u05D0A\u05C7; [B2 B3]; [B2 B3] # אaׇ ++B; xn--a-ihcz; [B2 B3]; [B2 B3] # אaׇ ++B; \u05D0\u05EA; ; xn--4db6c # את ++B; xn--4db6c; \u05D0\u05EA; xn--4db6c # את ++B; \u05D0\u05F3\u05EA; ; xn--4db6c0a # א׳ת ++B; xn--4db6c0a; \u05D0\u05F3\u05EA; xn--4db6c0a # א׳ת ++B; a\u05D0Tz; [B5]; [B5] # aאtz ++B; a\u05D0tz; [B5]; [B5] # aאtz ++B; A\u05D0TZ; [B5]; [B5] # aאtz ++B; A\u05D0tz; [B5]; [B5] # aאtz ++B; xn--atz-qpe; [B5]; [B5] # aאtz ++B; \u05D0T\u05EA; [B2]; [B2] # אtת ++B; \u05D0t\u05EA; [B2]; [B2] # אtת ++B; xn--t-zhc3f; [B2]; [B2] # אtת ++B; \u05D07\u05EA; ; xn--7-zhc3f # א7ת ++B; xn--7-zhc3f; \u05D07\u05EA; xn--7-zhc3f # א7ת ++B; \u05D0\u0667\u05EA; ; xn--4db6c6t # א٧ת ++B; xn--4db6c6t; \u05D0\u0667\u05EA; xn--4db6c6t # א٧ת ++B; a7\u0667z; [B5]; [B5] # a7٧z ++B; A7\u0667Z; [B5]; [B5] # a7٧z ++B; A7\u0667z; [B5]; [B5] # a7٧z ++B; xn--a7z-06e; [B5]; [B5] # a7٧z ++B; \u05D07\u0667\u05EA; [B4]; [B4] # א7٧ת ++B; xn--7-zhc3fty; [B4]; [B4] # א7٧ת ++T; ஹ\u0BCD\u200D; ; xn--dmc4b # ஹ் ++N; ஹ\u0BCD\u200D; ; xn--dmc4b194h # ஹ் ++B; xn--dmc4b; ஹ\u0BCD; xn--dmc4b # ஹ் ++B; ஹ\u0BCD; ; xn--dmc4b # ஹ் ++B; xn--dmc4b194h; ஹ\u0BCD\u200D; xn--dmc4b194h # ஹ் ++T; ஹ\u200D; [C2]; xn--dmc # ஹ ++N; ஹ\u200D; [C2]; [C2] # ஹ ++B; xn--dmc; ஹ; xn--dmc ++B; ஹ; ; xn--dmc ++B; xn--dmc225h; [C2]; [C2] # ஹ ++T; \u200D; [C2]; [A4_2] # ++N; \u200D; [C2]; [C2] # ++B; ; [A4_2]; [A4_2] ++B; xn--1ug; [C2]; [C2] # ++T; ஹ\u0BCD\u200C; ; xn--dmc4b # ஹ் ++N; ஹ\u0BCD\u200C; ; xn--dmc4by94h # ஹ் ++B; xn--dmc4by94h; ஹ\u0BCD\u200C; xn--dmc4by94h # ஹ் ++T; ஹ\u200C; [C1]; xn--dmc # ஹ ++N; ஹ\u200C; [C1]; [C1] # ஹ ++B; xn--dmc025h; [C1]; [C1] # ஹ ++T; \u200C; [C1]; [A4_2] # ++N; \u200C; [C1]; [C1] # ++B; xn--0ug; [C1]; [C1] # ++T; \u0644\u0670\u200C\u06ED\u06EF; ; xn--ghb2gxqia # لٰۭۯ ++N; \u0644\u0670\u200C\u06ED\u06EF; ; xn--ghb2gxqia7523a # لٰۭۯ ++B; xn--ghb2gxqia; \u0644\u0670\u06ED\u06EF; xn--ghb2gxqia # لٰۭۯ ++B; \u0644\u0670\u06ED\u06EF; ; xn--ghb2gxqia # لٰۭۯ ++B; xn--ghb2gxqia7523a; \u0644\u0670\u200C\u06ED\u06EF; xn--ghb2gxqia7523a # لٰۭۯ ++T; \u0644\u0670\u200C\u06EF; ; xn--ghb2g3q # لٰۯ ++N; \u0644\u0670\u200C\u06EF; ; xn--ghb2g3qq34f # لٰۯ ++B; xn--ghb2g3q; \u0644\u0670\u06EF; xn--ghb2g3q # لٰۯ ++B; \u0644\u0670\u06EF; ; xn--ghb2g3q # لٰۯ ++B; xn--ghb2g3qq34f; \u0644\u0670\u200C\u06EF; xn--ghb2g3qq34f # لٰۯ ++T; \u0644\u200C\u06ED\u06EF; ; xn--ghb25aga # لۭۯ ++N; \u0644\u200C\u06ED\u06EF; ; xn--ghb25aga828w # لۭۯ ++B; xn--ghb25aga; \u0644\u06ED\u06EF; xn--ghb25aga # لۭۯ ++B; \u0644\u06ED\u06EF; ; xn--ghb25aga # لۭۯ ++B; xn--ghb25aga828w; \u0644\u200C\u06ED\u06EF; xn--ghb25aga828w # لۭۯ ++T; \u0644\u200C\u06EF; ; xn--ghb65a # لۯ ++N; \u0644\u200C\u06EF; ; xn--ghb65a953d # لۯ ++B; xn--ghb65a; \u0644\u06EF; xn--ghb65a # لۯ ++B; \u0644\u06EF; ; xn--ghb65a # لۯ ++B; xn--ghb65a953d; \u0644\u200C\u06EF; xn--ghb65a953d # لۯ ++T; \u0644\u0670\u200C\u06ED; [B3 C1]; xn--ghb2gxq # لٰۭ ++N; \u0644\u0670\u200C\u06ED; [B3 C1]; [B3 C1] # لٰۭ ++B; xn--ghb2gxq; \u0644\u0670\u06ED; xn--ghb2gxq # لٰۭ ++B; \u0644\u0670\u06ED; ; xn--ghb2gxq # لٰۭ ++B; xn--ghb2gxqy34f; [B3 C1]; [B3 C1] # لٰۭ ++T; \u06EF\u200C\u06EF; [C1]; xn--cmba # ۯۯ ++N; \u06EF\u200C\u06EF; [C1]; [C1] # ۯۯ ++B; xn--cmba; \u06EF\u06EF; xn--cmba # ۯۯ ++B; \u06EF\u06EF; ; xn--cmba # ۯۯ ++B; xn--cmba004q; [C1]; [C1] # ۯۯ ++T; \u0644\u200C; [B3 C1]; xn--ghb # ل ++N; \u0644\u200C; [B3 C1]; [B3 C1] # ل ++B; xn--ghb; \u0644; xn--ghb # ل ++B; \u0644; ; xn--ghb # ل ++B; xn--ghb413k; [B3 C1]; [B3 C1] # ل ++B; a。。b; [A4_2]; [A4_2] ++B; A。。B; [A4_2]; [A4_2] ++B; a..b; [A4_2]; [A4_2] ++T; \u200D。。\u06B9\u200C; [B1 B3 C1 C2 A4_2]; [A4_2] # ..ڹ ++N; \u200D。。\u06B9\u200C; [B1 B3 C1 C2 A4_2]; [B1 B3 C1 C2 A4_2] # ..ڹ ++B; ..xn--skb; [A4_2]; [A4_2] # ..ڹ ++B; xn--1ug..xn--skb080k; [B1 B3 C1 C2 A4_2]; [B1 B3 C1 C2 A4_2] # ..ڹ ++B; \u05D00\u0660; [B4]; [B4] # א0٠ ++B; xn--0-zhc74b; [B4]; [B4] # א0٠ ++B; $; [P1 V6]; [P1 V6] ++ ++# RANDOMIZED TESTS ++ ++B; c.0ü.\u05D0; [B1]; [B1] # c.0ü.א ++B; c.0u\u0308.\u05D0; [B1]; [B1] # c.0ü.א ++B; C.0U\u0308.\u05D0; [B1]; [B1] # c.0ü.א ++B; C.0Ü.\u05D0; [B1]; [B1] # c.0ü.א ++B; ⒕∝\u065F򓤦.-󠄯; [P1 V3 V6]; [P1 V3 V6] # ⒕∝ٟ.- ++B; 14.∝\u065F򓤦.-󠄯; [P1 V3 V6]; [P1 V3 V6] # 14.∝ٟ.- ++B; 14.xn--7hb713l3v90n.-; [V3 V6]; [V3 V6] # 14.∝ٟ.- ++B; xn--7hb713lfwbi1311b.-; [V3 V6]; [V3 V6] # ⒕∝ٟ.- ++B; ꡣ.\u07CF; ; xn--8c9a.xn--qsb # ꡣ.ߏ ++B; xn--8c9a.xn--qsb; ꡣ.\u07CF; xn--8c9a.xn--qsb # ꡣ.ߏ ++B; ≯\u0603。-; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≯.- ++B; >\u0338\u0603。-; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≯.- ++B; ≯\u0603。-; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≯.- ++B; >\u0338\u0603。-; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≯.- ++B; xn--lfb566l.-; [B1 V3 V6]; [B1 V3 V6] # ≯.- ++T; ⾛𐹧⾕.\u115F󠗰ςႭ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςႭ ++N; ⾛𐹧⾕.\u115F󠗰ςႭ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςႭ ++T; 走𐹧谷.\u115F󠗰ςႭ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςႭ ++N; 走𐹧谷.\u115F󠗰ςႭ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςႭ ++T; 走𐹧谷.\u115F󠗰ςⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςⴍ ++N; 走𐹧谷.\u115F󠗰ςⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςⴍ ++B; 走𐹧谷.\u115F󠗰ΣႭ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.σႭ ++B; 走𐹧谷.\u115F󠗰σⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.σⴍ ++B; 走𐹧谷.\u115F󠗰Σⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.σⴍ ++B; xn--6g3a1x434z.xn--4xa180eotvh7453a; [B5 V6]; [B5 V6] # 走𐹧谷.σⴍ ++B; xn--6g3a1x434z.xn--4xa627dhpae6345i; [B5 V6]; [B5 V6] # 走𐹧谷.σႭ ++B; xn--6g3a1x434z.xn--3xa380eotvh7453a; [B5 V6]; [B5 V6] # 走𐹧谷.ςⴍ ++B; xn--6g3a1x434z.xn--3xa827dhpae6345i; [B5 V6]; [B5 V6] # 走𐹧谷.ςႭ ++T; ⾛𐹧⾕.\u115F󠗰ςⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςⴍ ++N; ⾛𐹧⾕.\u115F󠗰ςⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.ςⴍ ++B; ⾛𐹧⾕.\u115F󠗰ΣႭ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.σႭ ++B; ⾛𐹧⾕.\u115F󠗰σⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.σⴍ ++B; ⾛𐹧⾕.\u115F󠗰Σⴍ; [B5 P1 V6]; [B5 P1 V6] # 走𐹧谷.σⴍ ++T; \u200D≠ᢙ≯.솣-ᡴႠ; [C2 P1 V6]; [P1 V6] # ≠ᢙ≯.솣-ᡴႠ ++N; \u200D≠ᢙ≯.솣-ᡴႠ; [C2 P1 V6]; [C2 P1 V6] # ≠ᢙ≯.솣-ᡴႠ ++T; \u200D=\u0338ᢙ>\u0338.솣-ᡴႠ; [C2 P1 V6]; [P1 V6] # ≠ᢙ≯.솣-ᡴႠ ++N; \u200D=\u0338ᢙ>\u0338.솣-ᡴႠ; [C2 P1 V6]; [C2 P1 V6] # ≠ᢙ≯.솣-ᡴႠ ++T; \u200D=\u0338ᢙ>\u0338.솣-ᡴⴀ; [C2 P1 V6]; [P1 V6] # ≠ᢙ≯.솣-ᡴⴀ ++N; \u200D=\u0338ᢙ>\u0338.솣-ᡴⴀ; [C2 P1 V6]; [C2 P1 V6] # ≠ᢙ≯.솣-ᡴⴀ ++T; \u200D≠ᢙ≯.솣-ᡴⴀ; [C2 P1 V6]; [P1 V6] # ≠ᢙ≯.솣-ᡴⴀ ++N; \u200D≠ᢙ≯.솣-ᡴⴀ; [C2 P1 V6]; [C2 P1 V6] # ≠ᢙ≯.솣-ᡴⴀ ++B; xn--jbf911clb.xn----p9j493ivi4l; [V6]; [V6] ++B; xn--jbf929a90b0b.xn----p9j493ivi4l; [C2 V6]; [C2 V6] # ≠ᢙ≯.솣-ᡴⴀ ++B; xn--jbf911clb.xn----6zg521d196p; [V6]; [V6] ++B; xn--jbf929a90b0b.xn----6zg521d196p; [C2 V6]; [C2 V6] # ≠ᢙ≯.솣-ᡴႠ ++B; 񯞜.𐿇\u0FA2\u077D\u0600; [P1 V6]; [P1 V6] # .ྡྷݽ ++B; 񯞜.𐿇\u0FA1\u0FB7\u077D\u0600; [P1 V6]; [P1 V6] # .ྡྷݽ ++B; 񯞜.𐿇\u0FA1\u0FB7\u077D\u0600; [P1 V6]; [P1 V6] # .ྡྷݽ ++B; xn--gw68a.xn--ifb57ev2psc6027m; [V6]; [V6] # .ྡྷݽ ++B; 𣳔\u0303.𑓂; [V5]; [V5] # 𣳔̃.𑓂 ++B; xn--nsa95820a.xn--wz1d; [V5]; [V5] # 𣳔̃.𑓂 ++B; 𞤀𞥅񘐱。󠄌Ⴣꡥ; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++B; 𞤢𞥅񘐱。󠄌ⴣꡥ; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++B; xn--9d6hgcy3556a.xn--rlju750b; [B2 B3 V6]; [B2 B3 V6] ++B; xn--9d6hgcy3556a.xn--7nd0578e; [B2 B3 V6]; [B2 B3 V6] ++B; 𞤀𞥅񘐱。󠄌ⴣꡥ; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++T; \u08E2𑁿ς𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿ς𖬱.렧 ++N; \u08E2𑁿ς𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿ς𖬱.렧 ++T; \u08E2𑁿ς𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿ς𖬱.렧 ++N; \u08E2𑁿ς𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿ς𖬱.렧 ++B; \u08E2𑁿Σ𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿σ𖬱.렧 ++B; \u08E2𑁿Σ𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿σ𖬱.렧 ++B; \u08E2𑁿σ𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿σ𖬱.렧 ++B; \u08E2𑁿σ𖬱。󠅡렧; [B1 P1 V6]; [B1 P1 V6] # 𑁿σ𖬱.렧 ++B; xn--4xa53xp48ys2xc.xn--kn2b; [B1 V6]; [B1 V6] # 𑁿σ𖬱.렧 ++B; xn--3xa73xp48ys2xc.xn--kn2b; [B1 V6]; [B1 V6] # 𑁿ς𖬱.렧 ++T; -\u200D。𞤍\u200C\u200D⒈; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V6] # -.𞤯⒈ ++N; -\u200D。𞤍\u200C\u200D⒈; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # -.𞤯⒈ ++T; -\u200D。𞤍\u200C\u200D1.; [B1 C1 C2 V3]; [B1 V3] # -.𞤯1. ++N; -\u200D。𞤍\u200C\u200D1.; [B1 C1 C2 V3]; [B1 C1 C2 V3] # -.𞤯1. ++T; -\u200D。𞤯\u200C\u200D1.; [B1 C1 C2 V3]; [B1 V3] # -.𞤯1. ++N; -\u200D。𞤯\u200C\u200D1.; [B1 C1 C2 V3]; [B1 C1 C2 V3] # -.𞤯1. ++B; -.xn--1-0i8r.; [B1 V3]; [B1 V3] ++B; xn----ugn.xn--1-rgnd61297b.; [B1 C1 C2 V3]; [B1 C1 C2 V3] # -.𞤯1. ++T; -\u200D。𞤯\u200C\u200D⒈; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V6] # -.𞤯⒈ ++N; -\u200D。𞤯\u200C\u200D⒈; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # -.𞤯⒈ ++B; -.xn--tsh3666n; [B1 V3 V6]; [B1 V3 V6] ++B; xn----ugn.xn--0ugc555aiv51d; [B1 C1 C2 V3 V6]; [B1 C1 C2 V3 V6] # -.𞤯⒈ ++T; \u200C򅎭.Ⴒ𑇀; [C1 P1 V6]; [P1 V6] # .Ⴒ𑇀 ++N; \u200C򅎭.Ⴒ𑇀; [C1 P1 V6]; [C1 P1 V6] # .Ⴒ𑇀 ++T; \u200C򅎭.ⴒ𑇀; [C1 P1 V6]; [P1 V6] # .ⴒ𑇀 ++N; \u200C򅎭.ⴒ𑇀; [C1 P1 V6]; [C1 P1 V6] # .ⴒ𑇀 ++B; xn--bn95b.xn--9kj2034e; [V6]; [V6] ++B; xn--0ug15083f.xn--9kj2034e; [C1 V6]; [C1 V6] # .ⴒ𑇀 ++B; xn--bn95b.xn--qnd6272k; [V6]; [V6] ++B; xn--0ug15083f.xn--qnd6272k; [C1 V6]; [C1 V6] # .Ⴒ𑇀 ++T; 繱𑖿\u200D.8︒; [P1 V6]; [P1 V6] # 繱𑖿.8︒ ++N; 繱𑖿\u200D.8︒; [P1 V6]; [P1 V6] # 繱𑖿.8︒ ++T; 繱𑖿\u200D.8。; 繱𑖿\u200D.8.; xn--gl0as212a.8. # 繱𑖿.8. ++N; 繱𑖿\u200D.8。; 繱𑖿\u200D.8.; xn--1ug6928ac48e.8. # 繱𑖿.8. ++B; xn--gl0as212a.8.; 繱𑖿.8.; xn--gl0as212a.8. ++B; 繱𑖿.8.; ; xn--gl0as212a.8. ++B; xn--1ug6928ac48e.8.; 繱𑖿\u200D.8.; xn--1ug6928ac48e.8. # 繱𑖿.8. ++T; 繱𑖿\u200D.8.; ; xn--gl0as212a.8. # 繱𑖿.8. ++N; 繱𑖿\u200D.8.; ; xn--1ug6928ac48e.8. # 繱𑖿.8. ++B; xn--gl0as212a.xn--8-o89h; [V6]; [V6] ++B; xn--1ug6928ac48e.xn--8-o89h; [V6]; [V6] # 繱𑖿.8︒ ++B; 󠆾.𞀈; [V5 A4_2]; [V5 A4_2] ++B; 󠆾.𞀈; [V5 A4_2]; [V5 A4_2] ++B; .xn--ph4h; [V5 A4_2]; [V5 A4_2] ++T; ß\u06EB。\u200D; [C2]; xn--ss-59d. # ß۫. ++N; ß\u06EB。\u200D; [C2]; [C2] # ß۫. ++T; SS\u06EB。\u200D; [C2]; xn--ss-59d. # ss۫. ++N; SS\u06EB。\u200D; [C2]; [C2] # ss۫. ++T; ss\u06EB。\u200D; [C2]; xn--ss-59d. # ss۫. ++N; ss\u06EB。\u200D; [C2]; [C2] # ss۫. ++T; Ss\u06EB。\u200D; [C2]; xn--ss-59d. # ss۫. ++N; Ss\u06EB。\u200D; [C2]; [C2] # ss۫. ++B; xn--ss-59d.; ss\u06EB.; xn--ss-59d. # ss۫. ++B; ss\u06EB.; ; xn--ss-59d. # ss۫. ++B; SS\u06EB.; ss\u06EB.; xn--ss-59d. # ss۫. ++B; Ss\u06EB.; ss\u06EB.; xn--ss-59d. # ss۫. ++B; xn--ss-59d.xn--1ug; [C2]; [C2] # ss۫. ++B; xn--zca012a.xn--1ug; [C2]; [C2] # ß۫. ++T; 󠐵\u200C⒈.󠎇; [C1 P1 V6]; [P1 V6] # ⒈. ++N; 󠐵\u200C⒈.󠎇; [C1 P1 V6]; [C1 P1 V6] # ⒈. ++T; 󠐵\u200C1..󠎇; [C1 P1 V6 A4_2]; [P1 V6 A4_2] # 1.. ++N; 󠐵\u200C1..󠎇; [C1 P1 V6 A4_2]; [C1 P1 V6 A4_2] # 1.. ++B; xn--1-bs31m..xn--tv36e; [V6 A4_2]; [V6 A4_2] ++B; xn--1-rgn37671n..xn--tv36e; [C1 V6 A4_2]; [C1 V6 A4_2] # 1.. ++B; xn--tshz2001k.xn--tv36e; [V6]; [V6] ++B; xn--0ug88o47900b.xn--tv36e; [C1 V6]; [C1 V6] # ⒈. ++T; 󟈣\u065F\uAAB2ß。󌓧; [P1 V6]; [P1 V6] # ٟꪲß. ++N; 󟈣\u065F\uAAB2ß。󌓧; [P1 V6]; [P1 V6] # ٟꪲß. ++B; 󟈣\u065F\uAAB2SS。󌓧; [P1 V6]; [P1 V6] # ٟꪲss. ++B; 󟈣\u065F\uAAB2ss。󌓧; [P1 V6]; [P1 V6] # ٟꪲss. ++B; 󟈣\u065F\uAAB2Ss。󌓧; [P1 V6]; [P1 V6] # ٟꪲss. ++B; xn--ss-3xd2839nncy1m.xn--bb79d; [V6]; [V6] # ٟꪲss. ++B; xn--zca92z0t7n5w96j.xn--bb79d; [V6]; [V6] # ٟꪲß. ++T; \u0774\u200C𞤿。𽘐䉜\u200D񿤼; [C1 C2 P1 V6]; [P1 V6] # ݴ𞤿.䉜 ++N; \u0774\u200C𞤿。𽘐䉜\u200D񿤼; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ݴ𞤿.䉜 ++T; \u0774\u200C𞤝。𽘐䉜\u200D񿤼; [C1 C2 P1 V6]; [P1 V6] # ݴ𞤿.䉜 ++N; \u0774\u200C𞤝。𽘐䉜\u200D񿤼; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ݴ𞤿.䉜 ++B; xn--4pb2977v.xn--z0nt555ukbnv; [V6]; [V6] # ݴ𞤿.䉜 ++B; xn--4pb607jjt73a.xn--1ug236ke314donv1a; [C1 C2 V6]; [C1 C2 V6] # ݴ𞤿.䉜 ++T; 򔭜ςᡱ⒈.≮𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # ςᡱ⒈.≮𑄳𐮍 ++N; 򔭜ςᡱ⒈.≮𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # ςᡱ⒈.≮𑄳𐮍 ++T; 򔭜ςᡱ⒈.<\u0338𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # ςᡱ⒈.≮𑄳𐮍 ++N; 򔭜ςᡱ⒈.<\u0338𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # ςᡱ⒈.≮𑄳𐮍 ++T; 򔭜ςᡱ1..≮𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ςᡱ1..≮𑄳𐮍 ++N; 򔭜ςᡱ1..≮𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ςᡱ1..≮𑄳𐮍 ++T; 򔭜ςᡱ1..<\u0338𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ςᡱ1..≮𑄳𐮍 ++N; 򔭜ςᡱ1..<\u0338𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ςᡱ1..≮𑄳𐮍 ++T; 򔭜Σᡱ1..<\u0338𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 ++N; 򔭜Σᡱ1..<\u0338𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 ++T; 򔭜Σᡱ1..≮𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 ++N; 򔭜Σᡱ1..≮𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 ++T; 򔭜σᡱ1..≮𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 ++N; 򔭜σᡱ1..≮𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 ++T; 򔭜σᡱ1..<\u0338𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 ++N; 򔭜σᡱ1..<\u0338𑄳\u200D𐮍; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # σᡱ1..≮𑄳𐮍 ++B; xn--1-zmb699meq63t..xn--gdh5392g6sd; [B1 V6 A4_2]; [B1 V6 A4_2] ++B; xn--1-zmb699meq63t..xn--1ug85gn777ahze; [B1 V6 A4_2]; [B1 V6 A4_2] # σᡱ1..≮𑄳𐮍 ++B; xn--1-xmb999meq63t..xn--1ug85gn777ahze; [B1 V6 A4_2]; [B1 V6 A4_2] # ςᡱ1..≮𑄳𐮍 ++T; 򔭜Σᡱ⒈.<\u0338𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 ++N; 򔭜Σᡱ⒈.<\u0338𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 ++T; 򔭜Σᡱ⒈.≮𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 ++N; 򔭜Σᡱ⒈.≮𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 ++T; 򔭜σᡱ⒈.≮𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 ++N; 򔭜σᡱ⒈.≮𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 ++T; 򔭜σᡱ⒈.<\u0338𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 ++N; 򔭜σᡱ⒈.<\u0338𑄳\u200D𐮍; [B1 P1 V6]; [B1 P1 V6] # σᡱ⒈.≮𑄳𐮍 ++B; xn--4xa207hkzinr77u.xn--gdh5392g6sd; [B1 V6]; [B1 V6] ++B; xn--4xa207hkzinr77u.xn--1ug85gn777ahze; [B1 V6]; [B1 V6] # σᡱ⒈.≮𑄳𐮍 ++B; xn--3xa407hkzinr77u.xn--1ug85gn777ahze; [B1 V6]; [B1 V6] # ςᡱ⒈.≮𑄳𐮍 ++B; \u3164\u094DႠ\u17D0.\u180B; [P1 V6]; [P1 V6] # ्Ⴀ័. ++B; \u1160\u094DႠ\u17D0.\u180B; [P1 V6]; [P1 V6] # ्Ⴀ័. ++B; \u1160\u094Dⴀ\u17D0.\u180B; [P1 V6]; [P1 V6] # ्ⴀ័. ++B; xn--n3b742bkqf4ty.; [V6]; [V6] # ्ⴀ័. ++B; xn--n3b468aoqa89r.; [V6]; [V6] # ्Ⴀ័. ++B; \u3164\u094Dⴀ\u17D0.\u180B; [P1 V6]; [P1 V6] # ्ⴀ័. ++B; xn--n3b445e53po6d.; [V6]; [V6] # ्ⴀ័. ++B; xn--n3b468azngju2a.; [V6]; [V6] # ्Ⴀ័. ++T; ❣\u200D.\u09CD𑰽\u0612\uA929; [C2 V5]; [V5] # ❣.্𑰽ؒꤩ ++N; ❣\u200D.\u09CD𑰽\u0612\uA929; [C2 V5]; [C2 V5] # ❣.্𑰽ؒꤩ ++T; ❣\u200D.\u09CD𑰽\u0612\uA929; [C2 V5]; [V5] # ❣.্𑰽ؒꤩ ++N; ❣\u200D.\u09CD𑰽\u0612\uA929; [C2 V5]; [C2 V5] # ❣.্𑰽ؒꤩ ++B; xn--pei.xn--0fb32q3w7q2g4d; [V5]; [V5] # ❣.্𑰽ؒꤩ ++B; xn--1ugy10a.xn--0fb32q3w7q2g4d; [C2 V5]; [C2 V5] # ❣.্𑰽ؒꤩ ++B; ≮𐳺𐹄.≯񪮸ꡅ; [B1 P1 V6]; [B1 P1 V6] ++B; <\u0338𐳺𐹄.>\u0338񪮸ꡅ; [B1 P1 V6]; [B1 P1 V6] ++B; xn--gdh7943gk2a.xn--hdh1383c5e36c; [B1 V6]; [B1 V6] ++B; \u0CCC𐧅𐳏󠲺。\u0CCDᠦ; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ೌ𐧅𐳏.್ᠦ ++B; \u0CCC𐧅𐳏󠲺。\u0CCDᠦ; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ೌ𐧅𐳏.್ᠦ ++B; \u0CCC𐧅𐲏󠲺。\u0CCDᠦ; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ೌ𐧅𐳏.್ᠦ ++B; xn--7tc6360ky5bn2732c.xn--8tc429c; [B1 V5 V6]; [B1 V5 V6] # ೌ𐧅𐳏.್ᠦ ++B; \u0CCC𐧅𐲏󠲺。\u0CCDᠦ; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ೌ𐧅𐳏.್ᠦ ++B; \u0349。𧡫; [V5]; [V5] # ͉.𧡫 ++B; xn--nua.xn--bc6k; [V5]; [V5] # ͉.𧡫 ++B; 𑰿󠅦.\u1160; [P1 V5 V6]; [P1 V5 V6] # 𑰿. ++B; 𑰿󠅦.\u1160; [P1 V5 V6]; [P1 V5 V6] # 𑰿. ++B; xn--ok3d.xn--psd; [V5 V6]; [V5 V6] # 𑰿. ++T; -𞤆\u200D。󸼄𞳒; [B1 B5 B6 C2 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # -𞤨. ++N; -𞤆\u200D。󸼄𞳒; [B1 B5 B6 C2 P1 V3 V6]; [B1 B5 B6 C2 P1 V3 V6] # -𞤨. ++T; -𞤨\u200D。󸼄𞳒; [B1 B5 B6 C2 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # -𞤨. ++N; -𞤨\u200D。󸼄𞳒; [B1 B5 B6 C2 P1 V3 V6]; [B1 B5 B6 C2 P1 V3 V6] # -𞤨. ++B; xn----ni8r.xn--846h96596c; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] ++B; xn----ugnx367r.xn--846h96596c; [B1 B5 B6 C2 V3 V6]; [B1 B5 B6 C2 V3 V6] # -𞤨. ++B; ꡏ󠇶≯𳾽。\u1DFD⾇滸𐹰; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꡏ≯.᷽舛滸𐹰 ++B; ꡏ󠇶>\u0338𳾽。\u1DFD⾇滸𐹰; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꡏ≯.᷽舛滸𐹰 ++B; ꡏ󠇶≯𳾽。\u1DFD舛滸𐹰; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꡏ≯.᷽舛滸𐹰 ++B; ꡏ󠇶>\u0338𳾽。\u1DFD舛滸𐹰; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꡏ≯.᷽舛滸𐹰 ++B; xn--hdh7483cu6twwki8e.xn--yfg0765a58l0n6k; [B1 V5 V6]; [B1 V5 V6] # ꡏ≯.᷽舛滸𐹰 ++B; 蔏。𑰺; [V5]; [V5] ++B; 蔏。𑰺; [V5]; [V5] ++B; xn--uy1a.xn--jk3d; [V5]; [V5] ++B; 𝟿𐮋。󠄊; [B1]; [B1] ++B; 9𐮋。󠄊; [B1]; [B1] ++B; xn--9-rv5i.; [B1]; [B1] ++B; 󟇇-䟖F。\u07CB⒈\u0662; [B4 P1 V6]; [B4 P1 V6] # -䟖f.ߋ⒈٢ ++B; 󟇇-䟖F。\u07CB1.\u0662; [B1 P1 V6]; [B1 P1 V6] # -䟖f.ߋ1.٢ ++B; 󟇇-䟖f。\u07CB1.\u0662; [B1 P1 V6]; [B1 P1 V6] # -䟖f.ߋ1.٢ ++B; xn---f-mz8b08788k.xn--1-ybd.xn--bib; [B1 V6]; [B1 V6] # -䟖f.ߋ1.٢ ++B; 󟇇-䟖f。\u07CB⒈\u0662; [B4 P1 V6]; [B4 P1 V6] # -䟖f.ߋ⒈٢ ++B; xn---f-mz8b08788k.xn--bib53ev44d; [B4 V6]; [B4 V6] # -䟖f.ߋ⒈٢ ++T; \u200C。𐹺; [B1 C1]; [B1 A4_2] # .𐹺 ++N; \u200C。𐹺; [B1 C1]; [B1 C1] # .𐹺 ++T; \u200C。𐹺; [B1 C1]; [B1 A4_2] # .𐹺 ++N; \u200C。𐹺; [B1 C1]; [B1 C1] # .𐹺 ++B; .xn--yo0d; [B1 A4_2]; [B1 A4_2] ++B; xn--0ug.xn--yo0d; [B1 C1]; [B1 C1] # .𐹺 ++T; 𐡆.≯\u200C-𞥀; [B1 C1 P1 V6]; [B1 P1 V6] # 𐡆.≯-𞥀 ++N; 𐡆.≯\u200C-𞥀; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐡆.≯-𞥀 ++T; 𐡆.>\u0338\u200C-𞥀; [B1 C1 P1 V6]; [B1 P1 V6] # 𐡆.≯-𞥀 ++N; 𐡆.>\u0338\u200C-𞥀; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐡆.≯-𞥀 ++T; 𐡆.>\u0338\u200C-𞤞; [B1 C1 P1 V6]; [B1 P1 V6] # 𐡆.≯-𞥀 ++N; 𐡆.>\u0338\u200C-𞤞; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐡆.≯-𞥀 ++T; 𐡆.≯\u200C-𞤞; [B1 C1 P1 V6]; [B1 P1 V6] # 𐡆.≯-𞥀 ++N; 𐡆.≯\u200C-𞤞; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐡆.≯-𞥀 ++B; xn--le9c.xn----ogo9956r; [B1 V6]; [B1 V6] ++B; xn--le9c.xn----rgn40iy359e; [B1 C1 V6]; [B1 C1 V6] # 𐡆.≯-𞥀 ++B; 󠁀-。≠\uFCD7; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.≠هج ++B; 󠁀-。=\u0338\uFCD7; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.≠هج ++B; 󠁀-。≠\u0647\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.≠هج ++B; 󠁀-。=\u0338\u0647\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.≠هج ++B; xn----f411m.xn--rgb7c611j; [B1 V3 V6]; [B1 V3 V6] # -.≠هج ++T; 񻬹𑈵。\u200D𞨶; [B1 C2 P1 V6]; [P1 V6] # 𑈵. ++N; 񻬹𑈵。\u200D𞨶; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𑈵. ++B; xn--8g1d12120a.xn--5l6h; [V6]; [V6] ++B; xn--8g1d12120a.xn--1ug6651p; [B1 C2 V6]; [B1 C2 V6] # 𑈵. ++B; 𑋧\uA9C02。㧉򒖄; [P1 V5 V6]; [P1 V5 V6] # 𑋧꧀2.㧉 ++B; 𑋧\uA9C02。㧉򒖄; [P1 V5 V6]; [P1 V5 V6] # 𑋧꧀2.㧉 ++B; xn--2-5z4eu89y.xn--97l02706d; [V5 V6]; [V5 V6] # 𑋧꧀2.㧉 ++T; \u200C𽬄𐹴𞩥。≯6; [B1 C1 P1 V6]; [B1 B5 B6 P1 V6] # 𐹴.≯6 ++N; \u200C𽬄𐹴𞩥。≯6; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹴.≯6 ++T; \u200C𽬄𐹴𞩥。>\u03386; [B1 C1 P1 V6]; [B1 B5 B6 P1 V6] # 𐹴.≯6 ++N; \u200C𽬄𐹴𞩥。>\u03386; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹴.≯6 ++B; xn--so0du768aim9m.xn--6-ogo; [B1 B5 B6 V6]; [B1 B5 B6 V6] ++B; xn--0ug7105gf5wfxepq.xn--6-ogo; [B1 C1 V6]; [B1 C1 V6] # 𐹴.≯6 ++T; 𑁿.𐹦𻞵-\u200D; [B1 B3 B6 C2 P1 V5 V6]; [B1 B3 B6 P1 V3 V5 V6] # 𑁿.𐹦- ++N; 𑁿.𐹦𻞵-\u200D; [B1 B3 B6 C2 P1 V5 V6]; [B1 B3 B6 C2 P1 V5 V6] # 𑁿.𐹦- ++T; 𑁿.𐹦𻞵-\u200D; [B1 B3 B6 C2 P1 V5 V6]; [B1 B3 B6 P1 V3 V5 V6] # 𑁿.𐹦- ++N; 𑁿.𐹦𻞵-\u200D; [B1 B3 B6 C2 P1 V5 V6]; [B1 B3 B6 C2 P1 V5 V6] # 𑁿.𐹦- ++B; xn--q30d.xn----i26i1299n; [B1 B3 B6 V3 V5 V6]; [B1 B3 B6 V3 V5 V6] ++B; xn--q30d.xn----ugn1088hfsxv; [B1 B3 B6 C2 V5 V6]; [B1 B3 B6 C2 V5 V6] # 𑁿.𐹦- ++T; ⤸ς𺱀。\uFFA0; [P1 V6]; [P1 V6] # ⤸ς. ++N; ⤸ς𺱀。\uFFA0; [P1 V6]; [P1 V6] # ⤸ς. ++T; ⤸ς𺱀。\u1160; [P1 V6]; [P1 V6] # ⤸ς. ++N; ⤸ς𺱀。\u1160; [P1 V6]; [P1 V6] # ⤸ς. ++B; ⤸Σ𺱀。\u1160; [P1 V6]; [P1 V6] # ⤸σ. ++B; ⤸σ𺱀。\u1160; [P1 V6]; [P1 V6] # ⤸σ. ++B; xn--4xa192qmp03d.xn--psd; [V6]; [V6] # ⤸σ. ++B; xn--3xa392qmp03d.xn--psd; [V6]; [V6] # ⤸ς. ++B; ⤸Σ𺱀。\uFFA0; [P1 V6]; [P1 V6] # ⤸σ. ++B; ⤸σ𺱀。\uFFA0; [P1 V6]; [P1 V6] # ⤸σ. ++B; xn--4xa192qmp03d.xn--cl7c; [V6]; [V6] # ⤸σ. ++B; xn--3xa392qmp03d.xn--cl7c; [V6]; [V6] # ⤸ς. ++B; \u0765\u1035𐫔\u06D5.𐦬𑋪Ⴃ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ݥဵ𐫔ە.𐦬𑋪Ⴃ ++B; \u0765\u1035𐫔\u06D5.𐦬𑋪ⴃ; [B2 B3]; [B2 B3] # ݥဵ𐫔ە.𐦬𑋪ⴃ ++B; xn--llb10as9tqp5y.xn--ukj7371e21f; [B2 B3]; [B2 B3] # ݥဵ𐫔ە.𐦬𑋪ⴃ ++B; xn--llb10as9tqp5y.xn--bnd9168j21f; [B2 B3 V6]; [B2 B3 V6] # ݥဵ𐫔ە.𐦬𑋪Ⴃ ++B; \u0661\u1B44-킼.\u1BAA\u0616\u066C≯; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ١᭄-킼.᮪ؖ٬≯ ++B; \u0661\u1B44-킼.\u1BAA\u0616\u066C>\u0338; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ١᭄-킼.᮪ؖ٬≯ ++B; xn----9pc551nk39n.xn--4fb6o571degg; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ١᭄-킼.᮪ؖ٬≯ ++B; -。\u06C2\u0604򅖡𑓂; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -.ۂ𑓂 ++B; -。\u06C1\u0654\u0604򅖡𑓂; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -.ۂ𑓂 ++B; -.xn--mfb39a7208dzgs3d; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # -.ۂ𑓂 ++T; \u200D󯑖󠁐.\u05BD𙮰ꡝ𐋡; [C2 P1 V5 V6]; [P1 V5 V6] # .ֽꡝ𐋡 ++N; \u200D󯑖󠁐.\u05BD𙮰ꡝ𐋡; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .ֽꡝ𐋡 ++T; \u200D󯑖󠁐.\u05BD𙮰ꡝ𐋡; [C2 P1 V5 V6]; [P1 V5 V6] # .ֽꡝ𐋡 ++N; \u200D󯑖󠁐.\u05BD𙮰ꡝ𐋡; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .ֽꡝ𐋡 ++B; xn--b726ey18m.xn--ldb8734fg0qcyzzg; [V5 V6]; [V5 V6] # .ֽꡝ𐋡 ++B; xn--1ug66101lt8me.xn--ldb8734fg0qcyzzg; [C2 V5 V6]; [C2 V5 V6] # .ֽꡝ𐋡 ++T; ︒􃈵ς񀠇。𐮈; [B1 P1 V6]; [B1 P1 V6] ++N; ︒􃈵ς񀠇。𐮈; [B1 P1 V6]; [B1 P1 V6] ++T; 。􃈵ς񀠇。𐮈; [P1 V6 A4_2]; [P1 V6 A4_2] ++N; 。􃈵ς񀠇。𐮈; [P1 V6 A4_2]; [P1 V6 A4_2] ++B; 。􃈵Σ񀠇。𐮈; [P1 V6 A4_2]; [P1 V6 A4_2] ++B; 。􃈵σ񀠇。𐮈; [P1 V6 A4_2]; [P1 V6 A4_2] ++B; .xn--4xa68573c7n64d.xn--f29c; [V6 A4_2]; [V6 A4_2] ++B; .xn--3xa88573c7n64d.xn--f29c; [V6 A4_2]; [V6 A4_2] ++B; ︒􃈵Σ񀠇。𐮈; [B1 P1 V6]; [B1 P1 V6] ++B; ︒􃈵σ񀠇。𐮈; [B1 P1 V6]; [B1 P1 V6] ++B; xn--4xa1729jwz5t7gl5f.xn--f29c; [B1 V6]; [B1 V6] ++B; xn--3xa3729jwz5t7gl5f.xn--f29c; [B1 V6]; [B1 V6] ++B; \u07D9.\u06EE󆾃≯󠅲; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ߙ.ۮ≯ ++B; \u07D9.\u06EE󆾃>\u0338󠅲; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ߙ.ۮ≯ ++B; \u07D9.\u06EE󆾃≯󠅲; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ߙ.ۮ≯ ++B; \u07D9.\u06EE󆾃>\u0338󠅲; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ߙ.ۮ≯ ++B; xn--0sb.xn--bmb691l0524t; [B2 B3 V6]; [B2 B3 V6] # ߙ.ۮ≯ ++B; \u1A73󚙸.𐭍; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᩳ.𐭍 ++B; xn--2of22352n.xn--q09c; [B1 V5 V6]; [B1 V5 V6] # ᩳ.𐭍 ++B; ⒉󠊓≠。Ⴟ⬣Ⴈ; [P1 V6]; [P1 V6] ++B; ⒉󠊓=\u0338。Ⴟ⬣Ⴈ; [P1 V6]; [P1 V6] ++B; 2.󠊓≠。Ⴟ⬣Ⴈ; [P1 V6]; [P1 V6] ++B; 2.󠊓=\u0338。Ⴟ⬣Ⴈ; [P1 V6]; [P1 V6] ++B; 2.󠊓=\u0338。ⴟ⬣ⴈ; [P1 V6]; [P1 V6] ++B; 2.󠊓≠。ⴟ⬣ⴈ; [P1 V6]; [P1 V6] ++B; 2.xn--1chz4101l.xn--45iz7d6b; [V6]; [V6] ++B; 2.xn--1chz4101l.xn--gnd9b297j; [V6]; [V6] ++B; ⒉󠊓=\u0338。ⴟ⬣ⴈ; [P1 V6]; [P1 V6] ++B; ⒉󠊓≠。ⴟ⬣ⴈ; [P1 V6]; [P1 V6] ++B; xn--1ch07f91401d.xn--45iz7d6b; [V6]; [V6] ++B; xn--1ch07f91401d.xn--gnd9b297j; [V6]; [V6] ++B; -󠉱\u0FB8Ⴥ。-𐹽\u0774𞣑; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ྸჅ.-𐹽ݴ𞣑 ++B; -󠉱\u0FB8ⴥ。-𐹽\u0774𞣑; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ྸⴥ.-𐹽ݴ𞣑 ++B; xn----xmg317tgv352a.xn----05c4213ryr0g; [B1 V3 V6]; [B1 V3 V6] # -ྸⴥ.-𐹽ݴ𞣑 ++B; xn----xmg12fm2555h.xn----05c4213ryr0g; [B1 V3 V6]; [B1 V3 V6] # -ྸჅ.-𐹽ݴ𞣑 ++B; \u0659。𑄴︒\u0627\u07DD; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ٙ.𑄴︒اߝ ++B; \u0659。𑄴。\u0627\u07DD; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ٙ.𑄴.اߝ ++B; xn--1hb.xn--w80d.xn--mgb09f; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ٙ.𑄴.اߝ ++B; xn--1hb.xn--mgb09fp820c08pa; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # ٙ.𑄴︒اߝ ++T; Ⴙ\u0638.󠆓\u200D; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # Ⴙظ. ++N; Ⴙ\u0638.󠆓\u200D; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # Ⴙظ. ++T; ⴙ\u0638.󠆓\u200D; [B1 B5 B6 C2]; [B5 B6] # ⴙظ. ++N; ⴙ\u0638.󠆓\u200D; [B1 B5 B6 C2]; [B1 B5 B6 C2] # ⴙظ. ++B; xn--3gb910r.; [B5 B6]; [B5 B6] # ⴙظ. ++B; xn--3gb910r.xn--1ug; [B1 B5 B6 C2]; [B1 B5 B6 C2] # ⴙظ. ++B; xn--3gb194c.; [B5 B6 V6]; [B5 B6 V6] # Ⴙظ. ++B; xn--3gb194c.xn--1ug; [B1 B5 B6 C2 V6]; [B1 B5 B6 C2 V6] # Ⴙظ. ++B; 󠆸。₆0𐺧\u0756; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # .60ݖ ++B; 󠆸。60𐺧\u0756; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # .60ݖ ++B; .xn--60-cke9470y; [B1 V6 A4_2]; [B1 V6 A4_2] # .60ݖ ++B; 6\u084F。-𑈴; [B1 V3]; [B1 V3] # 6ࡏ.-𑈴 ++B; 6\u084F。-𑈴; [B1 V3]; [B1 V3] # 6ࡏ.-𑈴 ++B; xn--6-jjd.xn----6n8i; [B1 V3]; [B1 V3] # 6ࡏ.-𑈴 ++T; \u200D񋌿𐹰。\u0ACDς𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𐹰.્ςࣖ ++N; \u200D񋌿𐹰。\u0ACDς𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹰.્ςࣖ ++T; \u200D񋌿𐹰。\u0ACDς𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𐹰.્ςࣖ ++N; \u200D񋌿𐹰。\u0ACDς𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹰.્ςࣖ ++T; \u200D񋌿𐹰。\u0ACDΣ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𐹰.્σࣖ ++N; \u200D񋌿𐹰。\u0ACDΣ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹰.્σࣖ ++T; \u200D񋌿𐹰。\u0ACDσ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𐹰.્σࣖ ++N; \u200D񋌿𐹰。\u0ACDσ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹰.્σࣖ ++B; xn--oo0d1330n.xn--4xa21xcwbfz15g; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # 𐹰.્σࣖ ++B; xn--1ugx105gq26y.xn--4xa21xcwbfz15g; [B1 C2 V5 V6]; [B1 C2 V5 V6] # 𐹰.્σࣖ ++B; xn--1ugx105gq26y.xn--3xa41xcwbfz15g; [B1 C2 V5 V6]; [B1 C2 V5 V6] # 𐹰.્ςࣖ ++T; \u200D񋌿𐹰。\u0ACDΣ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𐹰.્σࣖ ++N; \u200D񋌿𐹰。\u0ACDΣ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹰.્σࣖ ++T; \u200D񋌿𐹰。\u0ACDσ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𐹰.્σࣖ ++N; \u200D񋌿𐹰。\u0ACDσ𞰎\u08D6; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹰.્σࣖ ++B; ⒈񟄜Ⴓ⒪.\u0DCA򘘶\u088B𐹢; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ⒈Ⴓ⒪.්𐹢 ++B; 1.񟄜Ⴓ(o).\u0DCA򘘶\u088B𐹢; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # 1.Ⴓ(o).්𐹢 ++B; 1.񟄜ⴓ(o).\u0DCA򘘶\u088B𐹢; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # 1.ⴓ(o).්𐹢 ++B; 1.񟄜Ⴓ(O).\u0DCA򘘶\u088B𐹢; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # 1.Ⴓ(o).්𐹢 ++B; 1.xn--(o)-7sn88849j.xn--3xb99xpx1yoes3e; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # 1.Ⴓ(o).්𐹢 ++B; 1.xn--(o)-ej1bu5389e.xn--3xb99xpx1yoes3e; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # 1.ⴓ(o).්𐹢 ++B; ⒈񟄜ⴓ⒪.\u0DCA򘘶\u088B𐹢; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ⒈ⴓ⒪.්𐹢 ++B; xn--tsh0ds63atl31n.xn--3xb99xpx1yoes3e; [B1 V5 V6]; [B1 V5 V6] # ⒈ⴓ⒪.්𐹢 ++B; xn--rnd762h7cx3027d.xn--3xb99xpx1yoes3e; [B1 V5 V6]; [B1 V5 V6] # ⒈Ⴓ⒪.්𐹢 ++B; 𞤷.𐮐𞢁𐹠\u0624; ; xn--ve6h.xn--jgb1694kz0b2176a; NV8 # 𞤷.𐮐𞢁𐹠ؤ ++B; 𞤷.𐮐𞢁𐹠\u0648\u0654; 𞤷.𐮐𞢁𐹠\u0624; xn--ve6h.xn--jgb1694kz0b2176a; NV8 # 𞤷.𐮐𞢁𐹠ؤ ++B; 𞤕.𐮐𞢁𐹠\u0648\u0654; 𞤷.𐮐𞢁𐹠\u0624; xn--ve6h.xn--jgb1694kz0b2176a; NV8 # 𞤷.𐮐𞢁𐹠ؤ ++B; 𞤕.𐮐𞢁𐹠\u0624; 𞤷.𐮐𞢁𐹠\u0624; xn--ve6h.xn--jgb1694kz0b2176a; NV8 # 𞤷.𐮐𞢁𐹠ؤ ++B; xn--ve6h.xn--jgb1694kz0b2176a; 𞤷.𐮐𞢁𐹠\u0624; xn--ve6h.xn--jgb1694kz0b2176a; NV8 # 𞤷.𐮐𞢁𐹠ؤ ++B; 𐲈-。𑄳񢌻; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] ++B; 𐲈-。𑄳񢌻; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] ++B; 𐳈-。𑄳񢌻; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] ++B; xn----ue6i.xn--v80d6662t; [B1 B3 V3 V5 V6]; [B1 B3 V3 V5 V6] ++B; 𐳈-。𑄳񢌻; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] ++B; -󠉖ꡧ.󠊂񇆃🄉; [P1 V3 V6]; [P1 V3 V6] ++B; -󠉖ꡧ.󠊂񇆃8,; [P1 V3 V6]; [P1 V3 V6] ++B; xn----hg4ei0361g.xn--8,-k362evu488a; [P1 V3 V6]; [P1 V3 V6] ++B; xn----hg4ei0361g.xn--207ht163h7m94c; [V3 V6]; [V3 V6] ++B; 󠾛󠈴臯𧔤.\u0768𝟝; [B1 P1 V6]; [B1 P1 V6] # 臯𧔤.ݨ5 ++B; 󠾛󠈴臯𧔤.\u07685; [B1 P1 V6]; [B1 P1 V6] # 臯𧔤.ݨ5 ++B; xn--zb1at733hm579ddhla.xn--5-b5c; [B1 V6]; [B1 V6] # 臯𧔤.ݨ5 ++B; ≮𐹣.𝨿; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] ++B; <\u0338𐹣.𝨿; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] ++B; ≮𐹣.𝨿; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] ++B; <\u0338𐹣.𝨿; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] ++B; xn--gdh1504g.xn--e92h; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] ++B; 𐹯ᯛ\u0A4D。脥; [B1]; [B1] # 𐹯ᯛ੍.脥 ++B; 𐹯ᯛ\u0A4D。脥; [B1]; [B1] # 𐹯ᯛ੍.脥 ++B; xn--ybc101g3m1p.xn--740a; [B1]; [B1] # 𐹯ᯛ੍.脥 ++B; \u1B44\u115F𞷿򃀍.-; [B1 B5 P1 V3 V5 V6]; [B1 B5 P1 V3 V5 V6] # ᭄.- ++B; xn--osd971cpx70btgt8b.-; [B1 B5 V3 V5 V6]; [B1 B5 V3 V5 V6] # ᭄.- ++T; \u200C。\u0354; [C1 V5]; [V5 A4_2] # .͔ ++N; \u200C。\u0354; [C1 V5]; [C1 V5] # .͔ ++T; \u200C。\u0354; [C1 V5]; [V5 A4_2] # .͔ ++N; \u200C。\u0354; [C1 V5]; [C1 V5] # .͔ ++B; .xn--yua; [V5 A4_2]; [V5 A4_2] # .͔ ++B; xn--0ug.xn--yua; [C1 V5]; [C1 V5] # .͔ ++B; 𞤥󠅮.ᡄႮ; [P1 V6]; [P1 V6] ++B; 𞤥󠅮.ᡄႮ; [P1 V6]; [P1 V6] ++B; 𞤥󠅮.ᡄⴎ; 𞤥.ᡄⴎ; xn--de6h.xn--37e857h ++B; 𞤃󠅮.ᡄႮ; [P1 V6]; [P1 V6] ++B; 𞤃󠅮.ᡄⴎ; 𞤥.ᡄⴎ; xn--de6h.xn--37e857h ++B; xn--de6h.xn--37e857h; 𞤥.ᡄⴎ; xn--de6h.xn--37e857h ++B; 𞤥.ᡄⴎ; ; xn--de6h.xn--37e857h ++B; 𞤃.ᡄႮ; [P1 V6]; [P1 V6] ++B; 𞤃.ᡄⴎ; 𞤥.ᡄⴎ; xn--de6h.xn--37e857h ++B; xn--de6h.xn--mnd799a; [V6]; [V6] ++B; 𞤥󠅮.ᡄⴎ; 𞤥.ᡄⴎ; xn--de6h.xn--37e857h ++B; 𞤃󠅮.ᡄႮ; [P1 V6]; [P1 V6] ++B; 𞤃󠅮.ᡄⴎ; 𞤥.ᡄⴎ; xn--de6h.xn--37e857h ++B; 𞤥.ᡄႮ; [P1 V6]; [P1 V6] ++B; 𞤧𝨨Ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] ++B; 𞤧𝨨Ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] ++B; 𞤧𝨨ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] ++B; 𞤅𝨨Ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] ++B; 𞤅𝨨ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] ++B; xn--zxa5691vboja.xn--bfi293ci119b; [B2 B3 B6]; [B2 B3 B6] ++B; 𞤧𝨨ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] ++B; 𞤅𝨨Ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] ++B; 𞤅𝨨ξ.𪺏㛨❸; [B2 B3 B6]; [B2 B3 B6] ++T; ᠆몆\u200C-。Ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.Ⴛ𐦅︒ ++N; ᠆몆\u200C-。Ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.Ⴛ𐦅︒ ++T; ᠆몆\u200C-。Ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.Ⴛ𐦅︒ ++N; ᠆몆\u200C-。Ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.Ⴛ𐦅︒ ++T; ᠆몆\u200C-。Ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.Ⴛ𐦅. ++N; ᠆몆\u200C-。Ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.Ⴛ𐦅. ++T; ᠆몆\u200C-。Ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.Ⴛ𐦅. ++N; ᠆몆\u200C-。Ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.Ⴛ𐦅. ++T; ᠆몆\u200C-。ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.ⴛ𐦅. ++N; ᠆몆\u200C-。ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.ⴛ𐦅. ++T; ᠆몆\u200C-。ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.ⴛ𐦅. ++N; ᠆몆\u200C-。ⴛ𐦅。; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.ⴛ𐦅. ++B; xn----e3j6620g.xn--jlju661e.; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] ++B; xn----e3j425bsk1o.xn--jlju661e.; [B1 B5 B6 C1 V3 V6]; [B1 B5 B6 C1 V3 V6] # ᠆몆-.ⴛ𐦅. ++B; xn----e3j6620g.xn--znd4948j.; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] ++B; xn----e3j425bsk1o.xn--znd4948j.; [B1 B5 B6 C1 V3 V6]; [B1 B5 B6 C1 V3 V6] # ᠆몆-.Ⴛ𐦅. ++T; ᠆몆\u200C-。ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.ⴛ𐦅︒ ++N; ᠆몆\u200C-。ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.ⴛ𐦅︒ ++T; ᠆몆\u200C-。ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᠆몆-.ⴛ𐦅︒ ++N; ᠆몆\u200C-。ⴛ𐦅︒; [B1 B5 B6 C1 P1 V3 V6]; [B1 B5 B6 C1 P1 V3 V6] # ᠆몆-.ⴛ𐦅︒ ++B; xn----e3j6620g.xn--jlj4997dhgh; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] ++B; xn----e3j425bsk1o.xn--jlj4997dhgh; [B1 B5 B6 C1 V3 V6]; [B1 B5 B6 C1 V3 V6] # ᠆몆-.ⴛ𐦅︒ ++B; xn----e3j6620g.xn--znd2362jhgh; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] ++B; xn----e3j425bsk1o.xn--znd2362jhgh; [B1 B5 B6 C1 V3 V6]; [B1 B5 B6 C1 V3 V6] # ᠆몆-.Ⴛ𐦅︒ ++T; 󠾳.︒⥱\u200C𐹬; [B1 C1 P1 V6]; [B1 P1 V6] # .︒⥱𐹬 ++N; 󠾳.︒⥱\u200C𐹬; [B1 C1 P1 V6]; [B1 C1 P1 V6] # .︒⥱𐹬 ++T; 󠾳.。⥱\u200C𐹬; [B1 C1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ..⥱𐹬 ++N; 󠾳.。⥱\u200C𐹬; [B1 C1 P1 V6 A4_2]; [B1 C1 P1 V6 A4_2] # ..⥱𐹬 ++B; xn--uf66e..xn--qti2829e; [B1 V6 A4_2]; [B1 V6 A4_2] ++B; xn--uf66e..xn--0ugz28as66q; [B1 C1 V6 A4_2]; [B1 C1 V6 A4_2] # ..⥱𐹬 ++B; xn--uf66e.xn--qtiz073e3ik; [B1 V6]; [B1 V6] ++B; xn--uf66e.xn--0ugz28axl3pqxna; [B1 C1 V6]; [B1 C1 V6] # .︒⥱𐹬 ++B; 𐯖.𐹠Ⴑ񚇜𐫊; [B1 P1 V6]; [B1 P1 V6] ++B; 𐯖.𐹠ⴑ񚇜𐫊; [B1 P1 V6]; [B1 P1 V6] ++B; xn--n49c.xn--8kj8702ewicl862o; [B1 V6]; [B1 V6] ++B; xn--n49c.xn--pnd4619jwicl862o; [B1 V6]; [B1 V6] ++B; \u0FA4񱤯.𝟭Ⴛ; [P1 V5 V6]; [P1 V5 V6] # ྤ.1Ⴛ ++B; \u0FA4񱤯.1Ⴛ; [P1 V5 V6]; [P1 V5 V6] # ྤ.1Ⴛ ++B; \u0FA4񱤯.1ⴛ; [P1 V5 V6]; [P1 V5 V6] # ྤ.1ⴛ ++B; xn--0fd40533g.xn--1-tws; [V5 V6]; [V5 V6] # ྤ.1ⴛ ++B; xn--0fd40533g.xn--1-q1g; [V5 V6]; [V5 V6] # ྤ.1Ⴛ ++B; \u0FA4񱤯.𝟭ⴛ; [P1 V5 V6]; [P1 V5 V6] # ྤ.1ⴛ ++B; -\u0826齀。릿𐸋; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # -ࠦ齀.릿 ++B; -\u0826齀。릿𐸋; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # -ࠦ齀.릿 ++B; xn----6gd0617i.xn--7y2bm55m; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] # -ࠦ齀.릿 ++T; 󠔊\u071C鹝꾗。񾵐\u200D\u200D⏃; [B1 B6 C2 P1 V6]; [B1 B6 P1 V6] # ܜ鹝꾗.⏃ ++N; 󠔊\u071C鹝꾗。񾵐\u200D\u200D⏃; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ܜ鹝꾗.⏃ ++T; 󠔊\u071C鹝꾗。񾵐\u200D\u200D⏃; [B1 B6 C2 P1 V6]; [B1 B6 P1 V6] # ܜ鹝꾗.⏃ ++N; 󠔊\u071C鹝꾗。񾵐\u200D\u200D⏃; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ܜ鹝꾗.⏃ ++B; xn--mnb6558e91kyq533a.xn--6mh27269e; [B1 B6 V6]; [B1 B6 V6] # ܜ鹝꾗.⏃ ++B; xn--mnb6558e91kyq533a.xn--1uga46zs309y; [B1 B6 C2 V6]; [B1 B6 C2 V6] # ܜ鹝꾗.⏃ ++B; ≮.-\u0708--; [B1 P1 V2 V3 V6]; [B1 P1 V2 V3 V6] # ≮.-܈-- ++B; <\u0338.-\u0708--; [B1 P1 V2 V3 V6]; [B1 P1 V2 V3 V6] # ≮.-܈-- ++B; ≮.-\u0708--; [B1 P1 V2 V3 V6]; [B1 P1 V2 V3 V6] # ≮.-܈-- ++B; <\u0338.-\u0708--; [B1 P1 V2 V3 V6]; [B1 P1 V2 V3 V6] # ≮.-܈-- ++B; xn--gdh.xn------eqf; [B1 V2 V3 V6]; [B1 V2 V3 V6] # ≮.-܈-- ++T; 𐹸󠋳。\u200Dς𝟩; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹸.ς7 ++N; 𐹸󠋳。\u200Dς𝟩; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹸.ς7 ++T; 𐹸󠋳。\u200Dς7; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹸.ς7 ++N; 𐹸󠋳。\u200Dς7; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹸.ς7 ++T; 𐹸󠋳。\u200DΣ7; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹸.σ7 ++N; 𐹸󠋳。\u200DΣ7; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹸.σ7 ++T; 𐹸󠋳。\u200Dσ7; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹸.σ7 ++N; 𐹸󠋳。\u200Dσ7; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹸.σ7 ++B; xn--wo0di5177c.xn--7-zmb; [B1 V6]; [B1 V6] ++B; xn--wo0di5177c.xn--7-zmb938s; [B1 C2 V6]; [B1 C2 V6] # 𐹸.σ7 ++B; xn--wo0di5177c.xn--7-xmb248s; [B1 C2 V6]; [B1 C2 V6] # 𐹸.ς7 ++T; 𐹸󠋳。\u200DΣ𝟩; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹸.σ7 ++N; 𐹸󠋳。\u200DΣ𝟩; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹸.σ7 ++T; 𐹸󠋳。\u200Dσ𝟩; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹸.σ7 ++N; 𐹸󠋳。\u200Dσ𝟩; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹸.σ7 ++T; ς򅜌8.𞭤; [P1 V6]; [P1 V6] ++N; ς򅜌8.𞭤; [P1 V6]; [P1 V6] ++T; ς򅜌8.𞭤; [P1 V6]; [P1 V6] ++N; ς򅜌8.𞭤; [P1 V6]; [P1 V6] ++B; Σ򅜌8.𞭤; [P1 V6]; [P1 V6] ++B; σ򅜌8.𞭤; [P1 V6]; [P1 V6] ++B; xn--8-zmb14974n.xn--su6h; [V6]; [V6] ++B; xn--8-xmb44974n.xn--su6h; [V6]; [V6] ++B; Σ򅜌8.𞭤; [P1 V6]; [P1 V6] ++B; σ򅜌8.𞭤; [P1 V6]; [P1 V6] ++T; \u200Cᡑ🄀\u0684.-𐫄𑲤; [B1 C1 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # ᡑ🄀ڄ.-𐫄𑲤 ++N; \u200Cᡑ🄀\u0684.-𐫄𑲤; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # ᡑ🄀ڄ.-𐫄𑲤 ++T; \u200Cᡑ0.\u0684.-𐫄𑲤; [B1 C1 V3]; [B1 V3] # ᡑ0.ڄ.-𐫄𑲤 ++N; \u200Cᡑ0.\u0684.-𐫄𑲤; [B1 C1 V3]; [B1 C1 V3] # ᡑ0.ڄ.-𐫄𑲤 ++B; xn--0-o7j.xn--9ib.xn----ek5i065b; [B1 V3]; [B1 V3] # ᡑ0.ڄ.-𐫄𑲤 ++B; xn--0-o7j263b.xn--9ib.xn----ek5i065b; [B1 C1 V3]; [B1 C1 V3] # ᡑ0.ڄ.-𐫄𑲤 ++B; xn--9ib722gbw95a.xn----ek5i065b; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] # ᡑ🄀ڄ.-𐫄𑲤 ++B; xn--9ib722gvtfi563c.xn----ek5i065b; [B1 C1 V3 V6]; [B1 C1 V3 V6] # ᡑ🄀ڄ.-𐫄𑲤 ++B; 𖠍。𐪿넯򞵲; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++B; 𖠍。𐪿넯򞵲; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++B; xn--4e9e.xn--l60bj21opd57g; [B2 B3 V6]; [B2 B3 V6] ++B; ᠇Ⴘ。\u0603Ⴈ𝆊; [B1 P1 V6]; [B1 P1 V6] # ᠇Ⴘ.Ⴈ𝆊 ++B; ᠇ⴘ。\u0603ⴈ𝆊; [B1 P1 V6]; [B1 P1 V6] # ᠇ⴘ.ⴈ𝆊 ++B; xn--d6e009h.xn--lfb290rfu3z; [B1 V6]; [B1 V6] # ᠇ⴘ.ⴈ𝆊 ++B; xn--wnd558a.xn--lfb465c1v87a; [B1 V6]; [B1 V6] # ᠇Ⴘ.Ⴈ𝆊 ++B; ⒚󠋑𞤰。牣\u0667Ⴜᣥ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # ⒚𞤰.牣٧Ⴜᣥ ++B; 19.󠋑𞤰。牣\u0667Ⴜᣥ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 19.𞤰.牣٧Ⴜᣥ ++B; 19.󠋑𞤰。牣\u0667ⴜᣥ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 19.𞤰.牣٧ⴜᣥ ++B; 19.󠋑𞤎。牣\u0667Ⴜᣥ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 19.𞤰.牣٧Ⴜᣥ ++B; 19.xn--oe6h75760c.xn--gib404ccxgh00h; [B1 B5 V6]; [B1 B5 V6] # 19.𞤰.牣٧Ⴜᣥ ++B; 19.xn--oe6h75760c.xn--gib285gtxo2l9d; [B1 B5 V6]; [B1 B5 V6] # 19.𞤰.牣٧ⴜᣥ ++B; ⒚󠋑𞤰。牣\u0667ⴜᣥ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # ⒚𞤰.牣٧ⴜᣥ ++B; ⒚󠋑𞤎。牣\u0667Ⴜᣥ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # ⒚𞤰.牣٧Ⴜᣥ ++B; xn--cthy466n29j3e.xn--gib404ccxgh00h; [B1 B5 V6]; [B1 B5 V6] # ⒚𞤰.牣٧Ⴜᣥ ++B; xn--cthy466n29j3e.xn--gib285gtxo2l9d; [B1 B5 V6]; [B1 B5 V6] # ⒚𞤰.牣٧ⴜᣥ ++B; -𐋱𐰽⒈.Ⴓ; [B1 P1 V3 V6]; [B1 P1 V3 V6] ++B; -𐋱𐰽1..Ⴓ; [B1 P1 V3 V6 A4_2]; [B1 P1 V3 V6 A4_2] ++B; -𐋱𐰽1..ⴓ; [B1 V3 A4_2]; [B1 V3 A4_2] ++B; xn---1-895nq11a..xn--blj; [B1 V3 A4_2]; [B1 V3 A4_2] ++B; xn---1-895nq11a..xn--rnd; [B1 V3 V6 A4_2]; [B1 V3 V6 A4_2] ++B; -𐋱𐰽⒈.ⴓ; [B1 P1 V3 V6]; [B1 P1 V3 V6] ++B; xn----ecp0206g90h.xn--blj; [B1 V3 V6]; [B1 V3 V6] ++B; xn----ecp0206g90h.xn--rnd; [B1 V3 V6]; [B1 V3 V6] ++T; \u200C긃.榶-; [C1 V3]; [V3] # 긃.榶- ++N; \u200C긃.榶-; [C1 V3]; [C1 V3] # 긃.榶- ++T; \u200C긃.榶-; [C1 V3]; [V3] # 긃.榶- ++N; \u200C긃.榶-; [C1 V3]; [C1 V3] # 긃.榶- ++B; xn--ej0b.xn----d87b; [V3]; [V3] ++B; xn--0ug3307c.xn----d87b; [C1 V3]; [C1 V3] # 긃.榶- ++T; 뉓泓𜵽.\u09CD\u200D; [P1 V5 V6]; [P1 V5 V6] # 뉓泓.্ ++N; 뉓泓𜵽.\u09CD\u200D; [P1 V5 V6]; [P1 V5 V6] # 뉓泓.্ ++T; 뉓泓𜵽.\u09CD\u200D; [P1 V5 V6]; [P1 V5 V6] # 뉓泓.্ ++N; 뉓泓𜵽.\u09CD\u200D; [P1 V5 V6]; [P1 V5 V6] # 뉓泓.্ ++B; xn--lwwp69lqs7m.xn--b7b; [V5 V6]; [V5 V6] # 뉓泓.্ ++B; xn--lwwp69lqs7m.xn--b7b605i; [V5 V6]; [V5 V6] # 뉓泓.্ ++T; \u200D𐹴ß。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ß.ິ ++N; \u200D𐹴ß。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ß.ິ ++T; \u200D𐹴ß。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ß.ິ ++N; \u200D𐹴ß。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ß.ິ ++T; \u200D𐹴SS。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ss.ິ ++N; \u200D𐹴SS。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ss.ິ ++T; \u200D𐹴ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ss.ິ ++N; \u200D𐹴ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ss.ິ ++T; \u200D𐹴Ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ss.ິ ++N; \u200D𐹴Ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ss.ິ ++B; xn--ss-ti3o.xn--57c638l8774i; [B1 V5 V6]; [B1 V5 V6] # 𐹴ss.ິ ++B; xn--ss-l1t5169j.xn--57c638l8774i; [B1 C2 V5 V6]; [B1 C2 V5 V6] # 𐹴ss.ິ ++B; xn--zca770nip7n.xn--57c638l8774i; [B1 C2 V5 V6]; [B1 C2 V5 V6] # 𐹴ß.ິ ++T; \u200D𐹴SS。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ss.ິ ++N; \u200D𐹴SS。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ss.ິ ++T; \u200D𐹴ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ss.ິ ++N; \u200D𐹴ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ss.ິ ++T; \u200D𐹴Ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𐹴ss.ິ ++N; \u200D𐹴Ss。\u0EB4\u2B75񪅌; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𐹴ss.ິ ++B; \u1B44.\u1BAA-≮≠; [P1 V5 V6]; [P1 V5 V6] # ᭄.᮪-≮≠ ++B; \u1B44.\u1BAA-<\u0338=\u0338; [P1 V5 V6]; [P1 V5 V6] # ᭄.᮪-≮≠ ++B; \u1B44.\u1BAA-≮≠; [P1 V5 V6]; [P1 V5 V6] # ᭄.᮪-≮≠ ++B; \u1B44.\u1BAA-<\u0338=\u0338; [P1 V5 V6]; [P1 V5 V6] # ᭄.᮪-≮≠ ++B; xn--1uf.xn----nmlz65aub; [V5 V6]; [V5 V6] # ᭄.᮪-≮≠ ++B; \u1BF3Ⴑ\u115F.𑄴Ⅎ; [P1 V5 V6]; [P1 V5 V6] # ᯳Ⴑ.𑄴Ⅎ ++B; \u1BF3Ⴑ\u115F.𑄴Ⅎ; [P1 V5 V6]; [P1 V5 V6] # ᯳Ⴑ.𑄴Ⅎ ++B; \u1BF3ⴑ\u115F.𑄴ⅎ; [P1 V5 V6]; [P1 V5 V6] # ᯳ⴑ.𑄴ⅎ ++B; \u1BF3Ⴑ\u115F.𑄴ⅎ; [P1 V5 V6]; [P1 V5 V6] # ᯳Ⴑ.𑄴ⅎ ++B; xn--pnd26a55x.xn--73g3065g; [V5 V6]; [V5 V6] # ᯳Ⴑ.𑄴ⅎ ++B; xn--osd925cvyn.xn--73g3065g; [V5 V6]; [V5 V6] # ᯳ⴑ.𑄴ⅎ ++B; xn--pnd26a55x.xn--f3g7465g; [V5 V6]; [V5 V6] # ᯳Ⴑ.𑄴Ⅎ ++B; \u1BF3ⴑ\u115F.𑄴ⅎ; [P1 V5 V6]; [P1 V5 V6] # ᯳ⴑ.𑄴ⅎ ++B; \u1BF3Ⴑ\u115F.𑄴ⅎ; [P1 V5 V6]; [P1 V5 V6] # ᯳Ⴑ.𑄴ⅎ ++B; 𜉆。Ⴃ𐴣𐹹똯; [B5 P1 V6]; [B5 P1 V6] ++B; 𜉆。Ⴃ𐴣𐹹똯; [B5 P1 V6]; [B5 P1 V6] ++B; 𜉆。ⴃ𐴣𐹹똯; [B5 P1 V6]; [B5 P1 V6] ++B; 𜉆。ⴃ𐴣𐹹똯; [B5 P1 V6]; [B5 P1 V6] ++B; xn--187g.xn--ukjy205b8rscdeb; [B5 V6]; [B5 V6] ++B; xn--187g.xn--bnd4785f8r8bdeb; [B5 V6]; [B5 V6] ++B; 𐫀。⳻󠙾󠄷\u3164; [B1 P1 V6]; [B1 P1 V6] # 𐫀.⳻ ++B; 𐫀。⳻󠙾󠄷\u1160; [B1 P1 V6]; [B1 P1 V6] # 𐫀.⳻ ++B; xn--pw9c.xn--psd742lxt32w; [B1 V6]; [B1 V6] # 𐫀.⳻ ++B; xn--pw9c.xn--mkj83l4v899a; [B1 V6]; [B1 V6] # 𐫀.⳻ ++B; \u079A⾇.\u071E-𐋰; [B2 B3]; [B2 B3] # ޚ舛.ܞ-𐋰 ++B; \u079A舛.\u071E-𐋰; [B2 B3]; [B2 B3] # ޚ舛.ܞ-𐋰 ++B; xn--7qb6383d.xn----20c3154q; [B2 B3]; [B2 B3] # ޚ舛.ܞ-𐋰 ++B; Ⴉ猕󹛫≮.︒; [P1 V6]; [P1 V6] ++B; Ⴉ猕󹛫<\u0338.︒; [P1 V6]; [P1 V6] ++B; Ⴉ猕󹛫≮.。; [P1 V6 A4_2]; [P1 V6 A4_2] ++B; Ⴉ猕󹛫<\u0338.。; [P1 V6 A4_2]; [P1 V6 A4_2] ++B; ⴉ猕󹛫<\u0338.。; [P1 V6 A4_2]; [P1 V6 A4_2] ++B; ⴉ猕󹛫≮.。; [P1 V6 A4_2]; [P1 V6 A4_2] ++B; xn--gdh892bbz0d5438s..; [V6 A4_2]; [V6 A4_2] ++B; xn--hnd212gz32d54x5r..; [V6 A4_2]; [V6 A4_2] ++B; ⴉ猕󹛫<\u0338.︒; [P1 V6]; [P1 V6] ++B; ⴉ猕󹛫≮.︒; [P1 V6]; [P1 V6] ++B; xn--gdh892bbz0d5438s.xn--y86c; [V6]; [V6] ++B; xn--hnd212gz32d54x5r.xn--y86c; [V6]; [V6] ++B; 🏮。\u062B鳳\u07E2󠅉; [B1 B2]; [B1 B2] # 🏮.ث鳳ߢ ++B; 🏮。\u062B鳳\u07E2󠅉; [B1 B2]; [B1 B2] # 🏮.ث鳳ߢ ++B; xn--8m8h.xn--qgb29f6z90a; [B1 B2]; [B1 B2] # 🏮.ث鳳ߢ ++T; \u200D𐹶。ß; [B1 C2]; [B1] # 𐹶.ß ++N; \u200D𐹶。ß; [B1 C2]; [B1 C2] # 𐹶.ß ++T; \u200D𐹶。SS; [B1 C2]; [B1] # 𐹶.ss ++N; \u200D𐹶。SS; [B1 C2]; [B1 C2] # 𐹶.ss ++T; \u200D𐹶。ss; [B1 C2]; [B1] # 𐹶.ss ++N; \u200D𐹶。ss; [B1 C2]; [B1 C2] # 𐹶.ss ++T; \u200D𐹶。Ss; [B1 C2]; [B1] # 𐹶.ss ++N; \u200D𐹶。Ss; [B1 C2]; [B1 C2] # 𐹶.ss ++B; xn--uo0d.ss; [B1]; [B1] ++B; xn--1ug9105g.ss; [B1 C2]; [B1 C2] # 𐹶.ss ++B; xn--1ug9105g.xn--zca; [B1 C2]; [B1 C2] # 𐹶.ß ++T; Å둄-.\u200C; [C1 V3]; [V3] # å둄-. ++N; Å둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. ++T; A\u030A둄-.\u200C; [C1 V3]; [V3] # å둄-. ++N; A\u030A둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. ++T; Å둄-.\u200C; [C1 V3]; [V3] # å둄-. ++N; Å둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. ++T; A\u030A둄-.\u200C; [C1 V3]; [V3] # å둄-. ++N; A\u030A둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. ++T; a\u030A둄-.\u200C; [C1 V3]; [V3] # å둄-. ++N; a\u030A둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. ++T; å둄-.\u200C; [C1 V3]; [V3] # å둄-. ++N; å둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. ++B; xn----1fa1788k.; [V3]; [V3] ++B; xn----1fa1788k.xn--0ug; [C1 V3]; [C1 V3] # å둄-. ++T; a\u030A둄-.\u200C; [C1 V3]; [V3] # å둄-. ++N; a\u030A둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. ++T; å둄-.\u200C; [C1 V3]; [V3] # å둄-. ++N; å둄-.\u200C; [C1 V3]; [C1 V3] # å둄-. ++B; \u3099򬎑\u1DD7𞤀.򱲢-\u0953; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # ゙ᷗ𞤢.-॓ ++B; \u3099򬎑\u1DD7𞤢.򱲢-\u0953; [B1 B6 P1 V5 V6]; [B1 B6 P1 V5 V6] # ゙ᷗ𞤢.-॓ ++B; xn--veg121fwg63altj9d.xn----eyd92688s; [B1 B6 V5 V6]; [B1 B6 V5 V6] # ゙ᷗ𞤢.-॓ ++T; ς.ß񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ς.ß⵿ ++N; ς.ß񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ς.ß⵿ ++B; Σ.SS񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ss⵿ ++B; σ.ss񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ss⵿ ++B; Σ.ss񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ss⵿ ++B; xn--4xa.xn--ss-y8d4760biv60n; [B5 B6 V6]; [B5 B6 V6] # σ.ss⵿ ++T; Σ.ß񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ß⵿ ++N; Σ.ß񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ß⵿ ++T; σ.ß񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ß⵿ ++N; σ.ß񴱄\u06DD\u2D7F; [B5 B6 P1 V6]; [B5 B6 P1 V6] # σ.ß⵿ ++B; xn--4xa.xn--zca281az71b8x73m; [B5 B6 V6]; [B5 B6 V6] # σ.ß⵿ ++B; xn--3xa.xn--zca281az71b8x73m; [B5 B6 V6]; [B5 B6 V6] # ς.ß⵿ ++B; ꡀ𞀟。\u066B\u0599; [B1]; [B1] # ꡀ𞀟.٫֙ ++B; ꡀ𞀟。\u066B\u0599; [B1]; [B1] # ꡀ𞀟.٫֙ ++B; xn--8b9a1720d.xn--kcb33b; [B1]; [B1] # ꡀ𞀟.٫֙ ++T; 򈛉\u200C\u08A9。⧅񘘡-𐭡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # ࢩ.⧅-𐭡 ++N; 򈛉\u200C\u08A9。⧅񘘡-𐭡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # ࢩ.⧅-𐭡 ++T; 򈛉\u200C\u08A9。⧅񘘡-𐭡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # ࢩ.⧅-𐭡 ++N; 򈛉\u200C\u08A9。⧅񘘡-𐭡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # ࢩ.⧅-𐭡 ++B; xn--yyb56242i.xn----zir1232guu71b; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ࢩ.⧅-𐭡 ++B; xn--yyb780jll63m.xn----zir1232guu71b; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # ࢩ.⧅-𐭡 ++T; 룱\u200D𰍨\u200C。𝨖︒; [C1 C2 P1 V5 V6]; [P1 V5 V6] # 룱.𝨖︒ ++N; 룱\u200D𰍨\u200C。𝨖︒; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # 룱.𝨖︒ ++T; 룱\u200D𰍨\u200C。𝨖︒; [C1 C2 P1 V5 V6]; [P1 V5 V6] # 룱.𝨖︒ ++N; 룱\u200D𰍨\u200C。𝨖︒; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # 룱.𝨖︒ ++T; 룱\u200D𰍨\u200C。𝨖。; [C1 C2 P1 V5 V6]; [P1 V5 V6] # 룱.𝨖. ++N; 룱\u200D𰍨\u200C。𝨖。; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # 룱.𝨖. ++T; 룱\u200D𰍨\u200C。𝨖。; [C1 C2 P1 V5 V6]; [P1 V5 V6] # 룱.𝨖. ++N; 룱\u200D𰍨\u200C。𝨖。; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # 룱.𝨖. ++B; xn--ct2b0738h.xn--772h.; [V5 V6]; [V5 V6] ++B; xn--0ugb3358ili2v.xn--772h.; [C1 C2 V5 V6]; [C1 C2 V5 V6] # 룱.𝨖. ++B; xn--ct2b0738h.xn--y86cl899a; [V5 V6]; [V5 V6] ++B; xn--0ugb3358ili2v.xn--y86cl899a; [C1 C2 V5 V6]; [C1 C2 V5 V6] # 룱.𝨖︒ ++T; 🄄.\u1CDC⒈ß; [P1 V5 V6]; [P1 V5 V6] # 🄄.᳜⒈ß ++N; 🄄.\u1CDC⒈ß; [P1 V5 V6]; [P1 V5 V6] # 🄄.᳜⒈ß ++T; 3,.\u1CDC1.ß; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ß ++N; 3,.\u1CDC1.ß; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ß ++B; 3,.\u1CDC1.SS; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ss ++B; 3,.\u1CDC1.ss; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ss ++B; 3,.\u1CDC1.Ss; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ss ++B; 3,.xn--1-43l.ss; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ss ++B; 3,.xn--1-43l.xn--zca; [P1 V5 V6]; [P1 V5 V6] # 3,.᳜1.ß ++B; 🄄.\u1CDC⒈SS; [P1 V5 V6]; [P1 V5 V6] # 🄄.᳜⒈ss ++B; 🄄.\u1CDC⒈ss; [P1 V5 V6]; [P1 V5 V6] # 🄄.᳜⒈ss ++B; 🄄.\u1CDC⒈Ss; [P1 V5 V6]; [P1 V5 V6] # 🄄.᳜⒈ss ++B; xn--x07h.xn--ss-k1r094b; [V5 V6]; [V5 V6] # 🄄.᳜⒈ss ++B; xn--x07h.xn--zca344lmif; [V5 V6]; [V5 V6] # 🄄.᳜⒈ß ++B; 񇌍\u2D7F。𞼓򡄨𑐺; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ⵿.𑐺 ++B; 񇌍\u2D7F。𞼓򡄨𑐺; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ⵿.𑐺 ++B; xn--eoj16016a.xn--0v1d3848a3lr0d; [B2 B3 V6]; [B2 B3 V6] # ⵿.𑐺 ++T; \u1DFD\u103A\u094D.≠\u200D㇛; [C2 P1 V5 V6]; [P1 V5 V6] # ်्᷽.≠㇛ ++N; \u1DFD\u103A\u094D.≠\u200D㇛; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ်्᷽.≠㇛ ++T; \u103A\u094D\u1DFD.≠\u200D㇛; [C2 P1 V5 V6]; [P1 V5 V6] # ်्᷽.≠㇛ ++N; \u103A\u094D\u1DFD.≠\u200D㇛; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ်्᷽.≠㇛ ++T; \u103A\u094D\u1DFD.=\u0338\u200D㇛; [C2 P1 V5 V6]; [P1 V5 V6] # ်्᷽.≠㇛ ++N; \u103A\u094D\u1DFD.=\u0338\u200D㇛; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ်्᷽.≠㇛ ++T; \u103A\u094D\u1DFD.≠\u200D㇛; [C2 P1 V5 V6]; [P1 V5 V6] # ်्᷽.≠㇛ ++N; \u103A\u094D\u1DFD.≠\u200D㇛; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ်्᷽.≠㇛ ++T; \u103A\u094D\u1DFD.=\u0338\u200D㇛; [C2 P1 V5 V6]; [P1 V5 V6] # ်्᷽.≠㇛ ++N; \u103A\u094D\u1DFD.=\u0338\u200D㇛; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ်्᷽.≠㇛ ++B; xn--n3b956a9zm.xn--1ch912d; [V5 V6]; [V5 V6] # ်्᷽.≠㇛ ++B; xn--n3b956a9zm.xn--1ug63gz5w; [C2 V5 V6]; [C2 V5 V6] # ်्᷽.≠㇛ ++T; Ⴁ𐋨娤.\u200D\u033C\u0662𑖿; [B1 C2 P1 V6]; [B1 P1 V5 V6] # Ⴁ𐋨娤.̼٢𑖿 ++N; Ⴁ𐋨娤.\u200D\u033C\u0662𑖿; [B1 C2 P1 V6]; [B1 C2 P1 V6] # Ⴁ𐋨娤.̼٢𑖿 ++T; ⴁ𐋨娤.\u200D\u033C\u0662𑖿; [B1 C2]; [B1 V5] # ⴁ𐋨娤.̼٢𑖿 ++N; ⴁ𐋨娤.\u200D\u033C\u0662𑖿; [B1 C2]; [B1 C2] # ⴁ𐋨娤.̼٢𑖿 ++B; xn--skjw75lg29h.xn--9ta62nrv36a; [B1 V5]; [B1 V5] # ⴁ𐋨娤.̼٢𑖿 ++B; xn--skjw75lg29h.xn--9ta62ngt6aou8t; [B1 C2]; [B1 C2] # ⴁ𐋨娤.̼٢𑖿 ++B; xn--8md2578ag21g.xn--9ta62nrv36a; [B1 V5 V6]; [B1 V5 V6] # Ⴁ𐋨娤.̼٢𑖿 ++B; xn--8md2578ag21g.xn--9ta62ngt6aou8t; [B1 C2 V6]; [B1 C2 V6] # Ⴁ𐋨娤.̼٢𑖿 ++T; 🄀Ⴄ\u0669\u0820。⒈\u0FB6ß; [B1 P1 V6]; [B1 P1 V6] # 🄀Ⴄ٩ࠠ.⒈ྶß ++N; 🄀Ⴄ\u0669\u0820。⒈\u0FB6ß; [B1 P1 V6]; [B1 P1 V6] # 🄀Ⴄ٩ࠠ.⒈ྶß ++T; 0.Ⴄ\u0669\u0820。1.\u0FB6ß; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 0.Ⴄ٩ࠠ.1.ྶß ++N; 0.Ⴄ\u0669\u0820。1.\u0FB6ß; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 0.Ⴄ٩ࠠ.1.ྶß ++T; 0.ⴄ\u0669\u0820。1.\u0FB6ß; [B1 B5 B6 V5]; [B1 B5 B6 V5] # 0.ⴄ٩ࠠ.1.ྶß ++N; 0.ⴄ\u0669\u0820。1.\u0FB6ß; [B1 B5 B6 V5]; [B1 B5 B6 V5] # 0.ⴄ٩ࠠ.1.ྶß ++B; 0.Ⴄ\u0669\u0820。1.\u0FB6SS; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 0.Ⴄ٩ࠠ.1.ྶss ++B; 0.ⴄ\u0669\u0820。1.\u0FB6ss; [B1 B5 B6 V5]; [B1 B5 B6 V5] # 0.ⴄ٩ࠠ.1.ྶss ++B; 0.Ⴄ\u0669\u0820。1.\u0FB6Ss; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 0.Ⴄ٩ࠠ.1.ྶss ++B; 0.xn--iib29f26o.1.xn--ss-1sj; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # 0.Ⴄ٩ࠠ.1.ྶss ++B; 0.xn--iib29fp25e.1.xn--ss-1sj; [B1 B5 B6 V5]; [B1 B5 B6 V5] # 0.ⴄ٩ࠠ.1.ྶss ++B; 0.xn--iib29fp25e.1.xn--zca117e; [B1 B5 B6 V5]; [B1 B5 B6 V5] # 0.ⴄ٩ࠠ.1.ྶß ++B; 0.xn--iib29f26o.1.xn--zca117e; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # 0.Ⴄ٩ࠠ.1.ྶß ++T; 🄀ⴄ\u0669\u0820。⒈\u0FB6ß; [B1 P1 V6]; [B1 P1 V6] # 🄀ⴄ٩ࠠ.⒈ྶß ++N; 🄀ⴄ\u0669\u0820。⒈\u0FB6ß; [B1 P1 V6]; [B1 P1 V6] # 🄀ⴄ٩ࠠ.⒈ྶß ++B; 🄀Ⴄ\u0669\u0820。⒈\u0FB6SS; [B1 P1 V6]; [B1 P1 V6] # 🄀Ⴄ٩ࠠ.⒈ྶss ++B; 🄀ⴄ\u0669\u0820。⒈\u0FB6ss; [B1 P1 V6]; [B1 P1 V6] # 🄀ⴄ٩ࠠ.⒈ྶss ++B; 🄀Ⴄ\u0669\u0820。⒈\u0FB6Ss; [B1 P1 V6]; [B1 P1 V6] # 🄀Ⴄ٩ࠠ.⒈ྶss ++B; xn--iib29f26o6n43c.xn--ss-1sj588o; [B1 V6]; [B1 V6] # 🄀Ⴄ٩ࠠ.⒈ྶss ++B; xn--iib29fp25e0219a.xn--ss-1sj588o; [B1 V6]; [B1 V6] # 🄀ⴄ٩ࠠ.⒈ྶss ++B; xn--iib29fp25e0219a.xn--zca117e3vp; [B1 V6]; [B1 V6] # 🄀ⴄ٩ࠠ.⒈ྶß ++B; xn--iib29f26o6n43c.xn--zca117e3vp; [B1 V6]; [B1 V6] # 🄀Ⴄ٩ࠠ.⒈ྶß ++T; ≠.\u200C-\u066B; [B1 C1 P1 V6]; [B1 P1 V3 V6] # ≠.-٫ ++N; ≠.\u200C-\u066B; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ≠.-٫ ++T; =\u0338.\u200C-\u066B; [B1 C1 P1 V6]; [B1 P1 V3 V6] # ≠.-٫ ++N; =\u0338.\u200C-\u066B; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ≠.-٫ ++B; xn--1ch.xn----vqc; [B1 V3 V6]; [B1 V3 V6] # ≠.-٫ ++B; xn--1ch.xn----vqc597q; [B1 C1 V6]; [B1 C1 V6] # ≠.-٫ ++B; \u0660۱。󠳶𞠁\u0665; [B1 P1 V6]; [B1 P1 V6] # ٠۱.𞠁٥ ++B; \u0660۱。󠳶𞠁\u0665; [B1 P1 V6]; [B1 P1 V6] # ٠۱.𞠁٥ ++B; xn--8hb40a.xn--eib7967vner3e; [B1 V6]; [B1 V6] # ٠۱.𞠁٥ ++T; \u200C\u0663⒖。󱅉𽷛\u1BF3; [B1 C1 P1 V6]; [B1 P1 V6] # ٣⒖.᯳ ++N; \u200C\u0663⒖。󱅉𽷛\u1BF3; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ٣⒖.᯳ ++T; \u200C\u066315.。󱅉𽷛\u1BF3; [B1 C1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ٣15..᯳ ++N; \u200C\u066315.。󱅉𽷛\u1BF3; [B1 C1 P1 V6 A4_2]; [B1 C1 P1 V6 A4_2] # ٣15..᯳ ++B; xn--15-gyd..xn--1zf13512buy41d; [B1 V6 A4_2]; [B1 V6 A4_2] # ٣15..᯳ ++B; xn--15-gyd983x..xn--1zf13512buy41d; [B1 C1 V6 A4_2]; [B1 C1 V6 A4_2] # ٣15..᯳ ++B; xn--cib675m.xn--1zf13512buy41d; [B1 V6]; [B1 V6] # ٣⒖.᯳ ++B; xn--cib152kwgd.xn--1zf13512buy41d; [B1 C1 V6]; [B1 C1 V6] # ٣⒖.᯳ ++B; \u1BF3.-逋񳦭󙙮; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ᯳.-逋 ++B; xn--1zf.xn----483d46987byr50b; [V3 V5 V6]; [V3 V5 V6] # ᯳.-逋 ++T; \u0756。\u3164\u200Dς; [C2 P1 V6]; [P1 V6] # ݖ.ς ++N; \u0756。\u3164\u200Dς; [C2 P1 V6]; [C2 P1 V6] # ݖ.ς ++T; \u0756。\u1160\u200Dς; [C2 P1 V6]; [P1 V6] # ݖ.ς ++N; \u0756。\u1160\u200Dς; [C2 P1 V6]; [C2 P1 V6] # ݖ.ς ++T; \u0756。\u1160\u200DΣ; [C2 P1 V6]; [P1 V6] # ݖ.σ ++N; \u0756。\u1160\u200DΣ; [C2 P1 V6]; [C2 P1 V6] # ݖ.σ ++T; \u0756。\u1160\u200Dσ; [C2 P1 V6]; [P1 V6] # ݖ.σ ++N; \u0756。\u1160\u200Dσ; [C2 P1 V6]; [C2 P1 V6] # ݖ.σ ++B; xn--9ob.xn--4xa380e; [V6]; [V6] # ݖ.σ ++B; xn--9ob.xn--4xa380ebol; [C2 V6]; [C2 V6] # ݖ.σ ++B; xn--9ob.xn--3xa580ebol; [C2 V6]; [C2 V6] # ݖ.ς ++T; \u0756。\u3164\u200DΣ; [C2 P1 V6]; [P1 V6] # ݖ.σ ++N; \u0756。\u3164\u200DΣ; [C2 P1 V6]; [C2 P1 V6] # ݖ.σ ++T; \u0756。\u3164\u200Dσ; [C2 P1 V6]; [P1 V6] # ݖ.σ ++N; \u0756。\u3164\u200Dσ; [C2 P1 V6]; [C2 P1 V6] # ݖ.σ ++B; xn--9ob.xn--4xa574u; [V6]; [V6] # ݖ.σ ++B; xn--9ob.xn--4xa795lq2l; [C2 V6]; [C2 V6] # ݖ.σ ++B; xn--9ob.xn--3xa995lq2l; [C2 V6]; [C2 V6] # ݖ.ς ++T; ᡆႣ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [P1 V6] # ᡆႣ.̕ ++N; ᡆႣ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [C2 P1 V6] # ᡆႣ.̕ ++T; ᡆႣ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [P1 V6] # ᡆႣ.̕ ++N; ᡆႣ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [C2 P1 V6] # ᡆႣ.̕ ++T; ᡆⴃ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [P1 V6] # ᡆⴃ.̕ ++N; ᡆⴃ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [C2 P1 V6] # ᡆⴃ.̕ ++B; xn--57e237h.xn--5sa98523p; [V6]; [V6] # ᡆⴃ.̕ ++B; xn--57e237h.xn--5sa649la993427a; [C2 V6]; [C2 V6] # ᡆⴃ.̕ ++B; xn--bnd320b.xn--5sa98523p; [V6]; [V6] # ᡆႣ.̕ ++B; xn--bnd320b.xn--5sa649la993427a; [C2 V6]; [C2 V6] # ᡆႣ.̕ ++T; ᡆⴃ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [P1 V6] # ᡆⴃ.̕ ++N; ᡆⴃ。󞢧\u0315\u200D\u200D; [C2 P1 V6]; [C2 P1 V6] # ᡆⴃ.̕ ++T; 㭄\u200D\u084F𑚵.ς𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6] # 㭄ࡏ𑚵.ς𐮮 ++N; 㭄\u200D\u084F𑚵.ς𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.ς𐮮 ++T; 㭄\u200D\u084F𑚵.ς𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6] # 㭄ࡏ𑚵.ς𐮮 ++N; 㭄\u200D\u084F𑚵.ς𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.ς𐮮 ++T; 㭄\u200D\u084F𑚵.Σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6] # 㭄ࡏ𑚵.σ𐮮 ++N; 㭄\u200D\u084F𑚵.Σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.σ𐮮 ++T; 㭄\u200D\u084F𑚵.σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6] # 㭄ࡏ𑚵.σ𐮮 ++N; 㭄\u200D\u084F𑚵.σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.σ𐮮 ++B; xn--ewb302xhu1l.xn--4xa0426k; [B5 B6]; [B5 B6] # 㭄ࡏ𑚵.σ𐮮 ++B; xn--ewb962jfitku4r.xn--4xa695lda6932v; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.σ𐮮 ++B; xn--ewb962jfitku4r.xn--3xa895lda6932v; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.ς𐮮 ++T; 㭄\u200D\u084F𑚵.Σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6] # 㭄ࡏ𑚵.σ𐮮 ++N; 㭄\u200D\u084F𑚵.Σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.σ𐮮 ++T; 㭄\u200D\u084F𑚵.σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6] # 㭄ࡏ𑚵.σ𐮮 ++N; 㭄\u200D\u084F𑚵.σ𐮮\u200C\u200D; [B5 B6 C1 C2]; [B5 B6 C1 C2] # 㭄ࡏ𑚵.σ𐮮 ++B; \u17B5。𞯸ꡀ🄋; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # .ꡀ🄋 ++B; xn--03e.xn--8b9ar252dngd; [B1 B2 B3 B6 V5 V6]; [B1 B2 B3 B6 V5 V6] # .ꡀ🄋 ++B; 󐪺暑.⾑\u0668; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 暑.襾٨ ++B; 󐪺暑.襾\u0668; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 暑.襾٨ ++B; xn--tlvq3513e.xn--hib9228d; [B5 B6 V6]; [B5 B6 V6] # 暑.襾٨ ++B; 󠄚≯ꡢ。\u0891\u1DFF; [B1 P1 V6]; [B1 P1 V6] # ≯ꡢ.᷿ ++B; 󠄚>\u0338ꡢ。\u0891\u1DFF; [B1 P1 V6]; [B1 P1 V6] # ≯ꡢ.᷿ ++B; xn--hdh7783c.xn--9xb680i; [B1 V6]; [B1 V6] # ≯ꡢ.᷿ ++B; \uFDC3𮁱\u0B4D𐨿.󐧤Ⴗ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # كمم𮁱୍𐨿.Ⴗ ++B; \u0643\u0645\u0645𮁱\u0B4D𐨿.󐧤Ⴗ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # كمم𮁱୍𐨿.Ⴗ ++B; \u0643\u0645\u0645𮁱\u0B4D𐨿.󐧤ⴗ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # كمم𮁱୍𐨿.ⴗ ++B; xn--fhbea662czx68a2tju.xn--fljz2846h; [B2 B3 V6]; [B2 B3 V6] # كمم𮁱୍𐨿.ⴗ ++B; xn--fhbea662czx68a2tju.xn--vnd55511o; [B2 B3 V6]; [B2 B3 V6] # كمم𮁱୍𐨿.Ⴗ ++B; \uFDC3𮁱\u0B4D𐨿.󐧤ⴗ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # كمم𮁱୍𐨿.ⴗ ++B; 𞀨。\u1B44򡛨𞎇; [P1 V5 V6]; [P1 V5 V6] # 𞀨.᭄ ++B; 𞀨。\u1B44򡛨𞎇; [P1 V5 V6]; [P1 V5 V6] # 𞀨.᭄ ++B; xn--mi4h.xn--1uf6843smg20c; [V5 V6]; [V5 V6] # 𞀨.᭄ ++T; 󠣼\u200C.𐺰\u200Cᡟ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # .ᡟ ++N; 󠣼\u200C.𐺰\u200Cᡟ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .ᡟ ++T; 󠣼\u200C.𐺰\u200Cᡟ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # .ᡟ ++N; 󠣼\u200C.𐺰\u200Cᡟ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .ᡟ ++B; xn--q046e.xn--v8e7227j; [B1 B2 B3 V6]; [B1 B2 B3 V6] ++B; xn--0ug18531l.xn--v8e340bp21t; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # .ᡟ ++T; ᢛ󨅟ß.ጧ; [P1 V6]; [P1 V6] ++N; ᢛ󨅟ß.ጧ; [P1 V6]; [P1 V6] ++B; ᢛ󨅟SS.ጧ; [P1 V6]; [P1 V6] ++B; ᢛ󨅟ss.ጧ; [P1 V6]; [P1 V6] ++B; ᢛ󨅟Ss.ጧ; [P1 V6]; [P1 V6] ++B; xn--ss-7dp66033t.xn--p5d; [V6]; [V6] ++B; xn--zca562jc642x.xn--p5d; [V6]; [V6] ++T; ⮒\u200C.񒚗\u200C; [C1 P1 V6]; [P1 V6] # ⮒. ++N; ⮒\u200C.񒚗\u200C; [C1 P1 V6]; [C1 P1 V6] # ⮒. ++B; xn--b9i.xn--5p9y; [V6]; [V6] ++B; xn--0ugx66b.xn--0ugz2871c; [C1 V6]; [C1 V6] # ⮒. ++B; 𞤂񹞁𐹯。Ⴜ; [B2 P1 V6]; [B2 P1 V6] ++B; 𞤤񹞁𐹯。ⴜ; [B2 P1 V6]; [B2 P1 V6] ++B; xn--no0dr648a51o3b.xn--klj; [B2 V6]; [B2 V6] ++B; xn--no0dr648a51o3b.xn--0nd; [B2 V6]; [B2 V6] ++B; 𞤂񹞁𐹯。ⴜ; [B2 P1 V6]; [B2 P1 V6] ++T; 𐹵⮣\u200C𑄰。񷴿\uFCB7; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # 𐹵⮣𑄰.ضم ++N; 𐹵⮣\u200C𑄰。񷴿\uFCB7; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # 𐹵⮣𑄰.ضم ++T; 𐹵⮣\u200C𑄰。񷴿\u0636\u0645; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # 𐹵⮣𑄰.ضم ++N; 𐹵⮣\u200C𑄰。񷴿\u0636\u0645; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # 𐹵⮣𑄰.ضم ++B; xn--s9i5458e7yb.xn--1gb4a66004i; [B1 B5 B6 V6]; [B1 B5 B6 V6] # 𐹵⮣𑄰.ضم ++B; xn--0ug586bcj8p7jc.xn--1gb4a66004i; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # 𐹵⮣𑄰.ضم ++T; Ⴒ。デß𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デß𞤵్ ++N; Ⴒ。デß𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デß𞤵్ ++T; Ⴒ。テ\u3099ß𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デß𞤵్ ++N; Ⴒ。テ\u3099ß𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デß𞤵్ ++T; ⴒ。テ\u3099ß𞤵\u0C4D; [B5 B6]; [B5 B6] # ⴒ.デß𞤵్ ++N; ⴒ。テ\u3099ß𞤵\u0C4D; [B5 B6]; [B5 B6] # ⴒ.デß𞤵్ ++T; ⴒ。デß𞤵\u0C4D; [B5 B6]; [B5 B6] # ⴒ.デß𞤵్ ++N; ⴒ。デß𞤵\u0C4D; [B5 B6]; [B5 B6] # ⴒ.デß𞤵్ ++B; Ⴒ。デSS𞤓\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デss𞤵్ ++B; Ⴒ。テ\u3099SS𞤓\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デss𞤵్ ++B; ⴒ。テ\u3099ss𞤵\u0C4D; [B5 B6]; [B5 B6] # ⴒ.デss𞤵్ ++B; ⴒ。デss𞤵\u0C4D; [B5 B6]; [B5 B6] # ⴒ.デss𞤵్ ++B; Ⴒ。デSs𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デss𞤵్ ++B; Ⴒ。テ\u3099Ss𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デss𞤵్ ++B; xn--qnd.xn--ss-9nh3648ahh20b; [B5 B6 V6]; [B5 B6 V6] # Ⴒ.デss𞤵్ ++B; xn--9kj.xn--ss-9nh3648ahh20b; [B5 B6]; [B5 B6] # ⴒ.デss𞤵్ ++B; xn--9kj.xn--zca669cmr3a0f28a; [B5 B6]; [B5 B6] # ⴒ.デß𞤵్ ++B; xn--qnd.xn--zca669cmr3a0f28a; [B5 B6 V6]; [B5 B6 V6] # Ⴒ.デß𞤵్ ++B; Ⴒ。デSS𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デss𞤵్ ++B; Ⴒ。テ\u3099SS𞤵\u0C4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴒ.デss𞤵్ ++B; 𑁿\u0D4D.7-\u07D2; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𑁿്.7-ߒ ++B; 𑁿\u0D4D.7-\u07D2; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𑁿്.7-ߒ ++B; xn--wxc1283k.xn--7--yue; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𑁿്.7-ߒ ++B; ≯𑜫󠭇.\u1734񒞤𑍬ᢧ; [P1 V5 V6]; [P1 V5 V6] # ≯𑜫.᜴𑍬ᢧ ++B; >\u0338𑜫󠭇.\u1734񒞤𑍬ᢧ; [P1 V5 V6]; [P1 V5 V6] # ≯𑜫.᜴𑍬ᢧ ++B; xn--hdhx157g68o0g.xn--c0e65eu616c34o7a; [V5 V6]; [V5 V6] # ≯𑜫.᜴𑍬ᢧ ++B; \u1DDB򎐙Ⴗ쏔。\u0781; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᷛႷ쏔.ށ ++B; \u1DDB򎐙Ⴗ쏔。\u0781; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᷛႷ쏔.ށ ++B; \u1DDB򎐙ⴗ쏔。\u0781; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᷛⴗ쏔.ށ ++B; \u1DDB򎐙ⴗ쏔。\u0781; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᷛⴗ쏔.ށ ++B; xn--zegy26dw47iy6w2f.xn--iqb; [B1 V5 V6]; [B1 V5 V6] # ᷛⴗ쏔.ށ ++B; xn--vnd148d733ky6n9e.xn--iqb; [B1 V5 V6]; [B1 V5 V6] # ᷛႷ쏔.ށ ++T; ß。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ß.𐋳Ⴌྸ ++N; ß。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ß.𐋳Ⴌྸ ++T; ß。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ß.𐋳Ⴌྸ ++N; ß。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ß.𐋳Ⴌྸ ++T; ß。𐋳ⴌ\u0FB8; ß.𐋳ⴌ\u0FB8; ss.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ ++N; ß。𐋳ⴌ\u0FB8; ß.𐋳ⴌ\u0FB8; xn--zca.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ ++B; SS。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ss.𐋳Ⴌྸ ++B; ss。𐋳ⴌ\u0FB8; ss.𐋳ⴌ\u0FB8; ss.xn--lgd921mvv0m; NV8 # ss.𐋳ⴌྸ ++B; Ss。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ss.𐋳Ⴌྸ ++B; ss.xn--lgd10cu829c; [V6]; [V6] # ss.𐋳Ⴌྸ ++B; ss.xn--lgd921mvv0m; ss.𐋳ⴌ\u0FB8; ss.xn--lgd921mvv0m; NV8 # ss.𐋳ⴌྸ ++B; ss.𐋳ⴌ\u0FB8; ; ss.xn--lgd921mvv0m; NV8 # ss.𐋳ⴌྸ ++B; SS.𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ss.𐋳Ⴌྸ ++B; Ss.𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ss.𐋳Ⴌྸ ++B; xn--zca.xn--lgd921mvv0m; ß.𐋳ⴌ\u0FB8; xn--zca.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ ++T; ß.𐋳ⴌ\u0FB8; ; ss.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ ++N; ß.𐋳ⴌ\u0FB8; ; xn--zca.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ ++B; xn--zca.xn--lgd10cu829c; [V6]; [V6] # ß.𐋳Ⴌྸ ++T; ß。𐋳ⴌ\u0FB8; ß.𐋳ⴌ\u0FB8; ss.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ ++N; ß。𐋳ⴌ\u0FB8; ß.𐋳ⴌ\u0FB8; xn--zca.xn--lgd921mvv0m; NV8 # ß.𐋳ⴌྸ ++B; SS。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ss.𐋳Ⴌྸ ++B; ss。𐋳ⴌ\u0FB8; ss.𐋳ⴌ\u0FB8; ss.xn--lgd921mvv0m; NV8 # ss.𐋳ⴌྸ ++B; Ss。𐋳Ⴌ\u0FB8; [P1 V6]; [P1 V6] # ss.𐋳Ⴌྸ ++T; -\u069E𐶡.\u200C⾝\u09CD; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # -ڞ.身্ ++N; -\u069E𐶡.\u200C⾝\u09CD; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # -ڞ.身্ ++T; -\u069E𐶡.\u200C身\u09CD; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # -ڞ.身্ ++N; -\u069E𐶡.\u200C身\u09CD; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # -ڞ.身্ ++B; xn----stc7013r.xn--b7b1419d; [B1 V3 V6]; [B1 V3 V6] # -ڞ.身্ ++B; xn----stc7013r.xn--b7b305imj2f; [B1 C1 V3 V6]; [B1 C1 V3 V6] # -ڞ.身্ ++T; 😮\u0764𑈵𞀖.💅\u200D; [B1 C2]; [B1] # 😮ݤ𑈵𞀖.💅 ++N; 😮\u0764𑈵𞀖.💅\u200D; [B1 C2]; [B1 C2] # 😮ݤ𑈵𞀖.💅 ++T; 😮\u0764𑈵𞀖.💅\u200D; [B1 C2]; [B1] # 😮ݤ𑈵𞀖.💅 ++N; 😮\u0764𑈵𞀖.💅\u200D; [B1 C2]; [B1 C2] # 😮ݤ𑈵𞀖.💅 ++B; xn--opb4277kuc7elqsa.xn--kr8h; [B1]; [B1] # 😮ݤ𑈵𞀖.💅 ++B; xn--opb4277kuc7elqsa.xn--1ug5265p; [B1 C2]; [B1 C2] # 😮ݤ𑈵𞀖.💅 ++T; \u08F2\u200D꙳\u0712.ᢏ\u200C󠍄; [B1 B6 C1 C2 P1 V5 V6]; [B1 B6 P1 V5 V6] # ࣲ꙳ܒ.ᢏ ++N; \u08F2\u200D꙳\u0712.ᢏ\u200C󠍄; [B1 B6 C1 C2 P1 V5 V6]; [B1 B6 C1 C2 P1 V5 V6] # ࣲ꙳ܒ.ᢏ ++B; xn--cnb37gdy00a.xn--89e02253p; [B1 B6 V5 V6]; [B1 B6 V5 V6] # ࣲ꙳ܒ.ᢏ ++B; xn--cnb37g904be26j.xn--89e849ax9363a; [B1 B6 C1 C2 V5 V6]; [B1 B6 C1 C2 V5 V6] # ࣲ꙳ܒ.ᢏ ++B; Ⴑ.\u06BF𞯓ᠲ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # Ⴑ.ڿᠲ ++B; Ⴑ.\u06BF𞯓ᠲ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # Ⴑ.ڿᠲ ++B; ⴑ.\u06BF𞯓ᠲ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ⴑ.ڿᠲ ++B; xn--8kj.xn--ykb840gd555a; [B2 B3 V6]; [B2 B3 V6] # ⴑ.ڿᠲ ++B; xn--pnd.xn--ykb840gd555a; [B2 B3 V6]; [B2 B3 V6] # Ⴑ.ڿᠲ ++B; ⴑ.\u06BF𞯓ᠲ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ⴑ.ڿᠲ ++B; \u1A5A𛦝\u0C4D。𚝬𝟵; [P1 V5 V6]; [P1 V5 V6] # ᩚ్.9 ++B; \u1A5A𛦝\u0C4D。𚝬9; [P1 V5 V6]; [P1 V5 V6] # ᩚ్.9 ++B; xn--lqc703ebm93a.xn--9-000p; [V5 V6]; [V5 V6] # ᩚ్.9 ++T; \u200C\u06A0𿺆𝟗。Ⴣ꒘\uFCD0񐘖; [B1 B5 C1 P1 V6]; [B2 B5 P1 V6] # ڠ9.Ⴣ꒘مخ ++N; \u200C\u06A0𿺆𝟗。Ⴣ꒘\uFCD0񐘖; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ڠ9.Ⴣ꒘مخ ++T; \u200C\u06A0𿺆9。Ⴣ꒘\u0645\u062E񐘖; [B1 B5 C1 P1 V6]; [B2 B5 P1 V6] # ڠ9.Ⴣ꒘مخ ++N; \u200C\u06A0𿺆9。Ⴣ꒘\u0645\u062E񐘖; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ڠ9.Ⴣ꒘مخ ++T; \u200C\u06A0𿺆9。ⴣ꒘\u0645\u062E񐘖; [B1 B5 C1 P1 V6]; [B2 B5 P1 V6] # ڠ9.ⴣ꒘مخ ++N; \u200C\u06A0𿺆9。ⴣ꒘\u0645\u062E񐘖; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ڠ9.ⴣ꒘مخ ++B; xn--9-vtc42319e.xn--tgb9bz87p833hw316c; [B2 B5 V6]; [B2 B5 V6] # ڠ9.ⴣ꒘مخ ++B; xn--9-vtc736qts91g.xn--tgb9bz87p833hw316c; [B1 B5 C1 V6]; [B1 B5 C1 V6] # ڠ9.ⴣ꒘مخ ++B; xn--9-vtc42319e.xn--tgb9bz61cfn8mw3t2c; [B2 B5 V6]; [B2 B5 V6] # ڠ9.Ⴣ꒘مخ ++B; xn--9-vtc736qts91g.xn--tgb9bz61cfn8mw3t2c; [B1 B5 C1 V6]; [B1 B5 C1 V6] # ڠ9.Ⴣ꒘مخ ++T; \u200C\u06A0𿺆𝟗。ⴣ꒘\uFCD0񐘖; [B1 B5 C1 P1 V6]; [B2 B5 P1 V6] # ڠ9.ⴣ꒘مخ ++N; \u200C\u06A0𿺆𝟗。ⴣ꒘\uFCD0񐘖; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ڠ9.ⴣ꒘مخ ++B; ᡖ。\u031F񗛨\u0B82-; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ᡖ.̟ஂ- ++B; ᡖ。\u031F񗛨\u0B82-; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ᡖ.̟ஂ- ++B; xn--m8e.xn----mdb555dkk71m; [V3 V5 V6]; [V3 V5 V6] # ᡖ.̟ஂ- ++B; 𞠠浘。絧𞀀; [B2 B3]; [B2 B3] ++B; xn--e0wp491f.xn--ud0a3573e; [B2 B3]; [B2 B3] ++B; \u0596Ⴋ.𝟳≯︒\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖Ⴋ.7≯︒ ++B; \u0596Ⴋ.𝟳>\u0338︒\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖Ⴋ.7≯︒ ++B; \u0596Ⴋ.7≯。\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖Ⴋ.7≯. ++B; \u0596Ⴋ.7>\u0338。\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖Ⴋ.7≯. ++B; \u0596ⴋ.7>\u0338。\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖ⴋ.7≯. ++B; \u0596ⴋ.7≯。\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖ⴋ.7≯. ++B; xn--hcb613r.xn--7-pgo.; [V5 V6]; [V5 V6] # ֖ⴋ.7≯. ++B; xn--hcb887c.xn--7-pgo.; [V5 V6]; [V5 V6] # ֖Ⴋ.7≯. ++B; \u0596ⴋ.𝟳>\u0338︒\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖ⴋ.7≯︒ ++B; \u0596ⴋ.𝟳≯︒\uFE0A; [P1 V5 V6]; [P1 V5 V6] # ֖ⴋ.7≯︒ ++B; xn--hcb613r.xn--7-pgoy530h; [V5 V6]; [V5 V6] # ֖ⴋ.7≯︒ ++B; xn--hcb887c.xn--7-pgoy530h; [V5 V6]; [V5 V6] # ֖Ⴋ.7≯︒ ++T; \u200DF𑓂。󠺨︒\u077E𐹢; [B1 C2 P1 V6]; [B1 P1 V6] # f𑓂.︒ݾ𐹢 ++N; \u200DF𑓂。󠺨︒\u077E𐹢; [B1 C2 P1 V6]; [B1 C2 P1 V6] # f𑓂.︒ݾ𐹢 ++T; \u200DF𑓂。󠺨。\u077E𐹢; [B1 C2 P1 V6]; [B1 P1 V6] # f𑓂..ݾ𐹢 ++N; \u200DF𑓂。󠺨。\u077E𐹢; [B1 C2 P1 V6]; [B1 C2 P1 V6] # f𑓂..ݾ𐹢 ++T; \u200Df𑓂。󠺨。\u077E𐹢; [B1 C2 P1 V6]; [B1 P1 V6] # f𑓂..ݾ𐹢 ++N; \u200Df𑓂。󠺨。\u077E𐹢; [B1 C2 P1 V6]; [B1 C2 P1 V6] # f𑓂..ݾ𐹢 ++B; xn--f-kq9i.xn--7656e.xn--fqb4175k; [B1 V6]; [B1 V6] # f𑓂..ݾ𐹢 ++B; xn--f-tgn9761i.xn--7656e.xn--fqb4175k; [B1 C2 V6]; [B1 C2 V6] # f𑓂..ݾ𐹢 ++T; \u200Df𑓂。󠺨︒\u077E𐹢; [B1 C2 P1 V6]; [B1 P1 V6] # f𑓂.︒ݾ𐹢 ++N; \u200Df𑓂。󠺨︒\u077E𐹢; [B1 C2 P1 V6]; [B1 C2 P1 V6] # f𑓂.︒ݾ𐹢 ++B; xn--f-kq9i.xn--fqb1637j8hky9452a; [B1 V6]; [B1 V6] # f𑓂.︒ݾ𐹢 ++B; xn--f-tgn9761i.xn--fqb1637j8hky9452a; [B1 C2 V6]; [B1 C2 V6] # f𑓂.︒ݾ𐹢 ++B; \u0845🄇𐼗︒。𐹻𑜫; [B1 B3 P1 V6]; [B1 B3 P1 V6] # ࡅ🄇︒.𐹻𑜫 ++B; \u08456,𐼗。。𐹻𑜫; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ࡅ6,..𐹻𑜫 ++B; xn--6,-r4e4420y..xn--zo0di2m; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ࡅ6,..𐹻𑜫 ++B; xn--3vb4696jpxkjh7s.xn--zo0di2m; [B1 B3 V6]; [B1 B3 V6] # ࡅ🄇︒.𐹻𑜫 ++B; 𐹈.\u1DC0𑈱𐦭; [B1 P1 V5 V6]; [B1 P1 V5 V6] # .᷀𑈱𐦭 ++B; xn--jn0d.xn--7dg0871h3lf; [B1 V5 V6]; [B1 V5 V6] # .᷀𑈱𐦭 ++B; Ⴂ䠺。𞤃񅏎󙮦\u0693; [B2 P1 V6]; [B2 P1 V6] # Ⴂ䠺.𞤥ړ ++B; ⴂ䠺。𞤥񅏎󙮦\u0693; [B2 P1 V6]; [B2 P1 V6] # ⴂ䠺.𞤥ړ ++B; xn--tkj638f.xn--pjb9818vg4xno967d; [B2 V6]; [B2 V6] # ⴂ䠺.𞤥ړ ++B; xn--9md875z.xn--pjb9818vg4xno967d; [B2 V6]; [B2 V6] # Ⴂ䠺.𞤥ړ ++B; ⴂ䠺。𞤃񅏎󙮦\u0693; [B2 P1 V6]; [B2 P1 V6] # ⴂ䠺.𞤥ړ ++B; 🄇伐︒.𜙚\uA8C4; [P1 V6]; [P1 V6] # 🄇伐︒.꣄ ++B; 6,伐。.𜙚\uA8C4; [P1 V6 A4_2]; [P1 V6 A4_2] # 6,伐..꣄ ++B; xn--6,-7i3c..xn--0f9ao925c; [P1 V6 A4_2]; [P1 V6 A4_2] # 6,伐..꣄ ++B; xn--woqs083bel0g.xn--0f9ao925c; [V6]; [V6] # 🄇伐︒.꣄ ++T; \u200D𐹠\uABED\uFFFB。\u200D𐫓Ⴚ𑂹; [B1 C2 P1 V6]; [B1 B2 B3 P1 V6] # 𐹠꯭.𐫓Ⴚ𑂹 ++N; \u200D𐹠\uABED\uFFFB。\u200D𐫓Ⴚ𑂹; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹠꯭.𐫓Ⴚ𑂹 ++T; \u200D𐹠\uABED\uFFFB。\u200D𐫓ⴚ𑂹; [B1 C2 P1 V6]; [B1 B2 B3 P1 V6] # 𐹠꯭.𐫓ⴚ𑂹 ++N; \u200D𐹠\uABED\uFFFB。\u200D𐫓ⴚ𑂹; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹠꯭.𐫓ⴚ𑂹 ++B; xn--429az70n29i.xn--ilj7702eqyd; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 𐹠꯭.𐫓ⴚ𑂹 ++B; xn--1ugz126coy7bdbm.xn--1ug062chv7ov6e; [B1 C2 V6]; [B1 C2 V6] # 𐹠꯭.𐫓ⴚ𑂹 ++B; xn--429az70n29i.xn--ynd3619jqyd; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 𐹠꯭.𐫓Ⴚ𑂹 ++B; xn--1ugz126coy7bdbm.xn--ynd959evs1pv6e; [B1 C2 V6]; [B1 C2 V6] # 𐹠꯭.𐫓Ⴚ𑂹 ++B; 󠆠.񷐴󌟈; [P1 V6 A4_2]; [P1 V6 A4_2] ++B; 󠆠.񷐴󌟈; [P1 V6 A4_2]; [P1 V6 A4_2] ++B; .xn--rx21bhv12i; [V6 A4_2]; [V6 A4_2] ++T; 𐫃\u200CႦ.≠𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # 𐫃Ⴆ.≠ ++N; 𐫃\u200CႦ.≠𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 𐫃Ⴆ.≠ ++T; 𐫃\u200CႦ.=\u0338𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # 𐫃Ⴆ.≠ ++N; 𐫃\u200CႦ.=\u0338𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 𐫃Ⴆ.≠ ++T; 𐫃\u200Cⴆ.=\u0338𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # 𐫃ⴆ.≠ ++N; 𐫃\u200Cⴆ.=\u0338𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 𐫃ⴆ.≠ ++T; 𐫃\u200Cⴆ.≠𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # 𐫃ⴆ.≠ ++N; 𐫃\u200Cⴆ.≠𞷙; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 𐫃ⴆ.≠ ++B; xn--xkjz802e.xn--1ch2802p; [B1 B2 B3 V6]; [B1 B2 B3 V6] ++B; xn--0ug132csv7o.xn--1ch2802p; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # 𐫃ⴆ.≠ ++B; xn--end1719j.xn--1ch2802p; [B1 B2 B3 V6]; [B1 B2 B3 V6] ++B; xn--end799ekr1p.xn--1ch2802p; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # 𐫃Ⴆ.≠ ++B; 󠁲𙩢𝟥ꘌ.\u0841; [B1 P1 V6]; [B1 P1 V6] # 3ꘌ.ࡁ ++B; 󠁲𙩢3ꘌ.\u0841; [B1 P1 V6]; [B1 P1 V6] # 3ꘌ.ࡁ ++B; xn--3-0g3es485d8i15h.xn--zvb; [B1 V6]; [B1 V6] # 3ꘌ.ࡁ ++B; -.\u1886󡲣-; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -.ᢆ- ++B; -.xn----pbkx6497q; [V3 V5 V6]; [V3 V5 V6] # -.ᢆ- ++T; 󲚗\u200C。\u200C𞰆ς; [B1 B6 C1 P1 V6]; [B2 B3 P1 V6] # .ς ++N; 󲚗\u200C。\u200C𞰆ς; [B1 B6 C1 P1 V6]; [B1 B6 C1 P1 V6] # .ς ++T; 󲚗\u200C。\u200C𞰆ς; [B1 B6 C1 P1 V6]; [B2 B3 P1 V6] # .ς ++N; 󲚗\u200C。\u200C𞰆ς; [B1 B6 C1 P1 V6]; [B1 B6 C1 P1 V6] # .ς ++T; 󲚗\u200C。\u200C𞰆Σ; [B1 B6 C1 P1 V6]; [B2 B3 P1 V6] # .σ ++N; 󲚗\u200C。\u200C𞰆Σ; [B1 B6 C1 P1 V6]; [B1 B6 C1 P1 V6] # .σ ++T; 󲚗\u200C。\u200C𞰆σ; [B1 B6 C1 P1 V6]; [B2 B3 P1 V6] # .σ ++N; 󲚗\u200C。\u200C𞰆σ; [B1 B6 C1 P1 V6]; [B1 B6 C1 P1 V6] # .σ ++B; xn--qp42f.xn--4xa3011w; [B2 B3 V6]; [B2 B3 V6] ++B; xn--0ug76062m.xn--4xa595lhn92a; [B1 B6 C1 V6]; [B1 B6 C1 V6] # .σ ++B; xn--0ug76062m.xn--3xa795lhn92a; [B1 B6 C1 V6]; [B1 B6 C1 V6] # .ς ++T; 󲚗\u200C。\u200C𞰆Σ; [B1 B6 C1 P1 V6]; [B2 B3 P1 V6] # .σ ++N; 󲚗\u200C。\u200C𞰆Σ; [B1 B6 C1 P1 V6]; [B1 B6 C1 P1 V6] # .σ ++T; 󲚗\u200C。\u200C𞰆σ; [B1 B6 C1 P1 V6]; [B2 B3 P1 V6] # .σ ++N; 󲚗\u200C。\u200C𞰆σ; [B1 B6 C1 P1 V6]; [B1 B6 C1 P1 V6] # .σ ++T; 堕𑓂\u1B02。𐮇𞤽\u200C-; [B3 C1 V3]; [B3 V3] # 堕𑓂ᬂ.𐮇𞤽- ++N; 堕𑓂\u1B02。𐮇𞤽\u200C-; [B3 C1 V3]; [B3 C1 V3] # 堕𑓂ᬂ.𐮇𞤽- ++T; 堕𑓂\u1B02。𐮇𞤛\u200C-; [B3 C1 V3]; [B3 V3] # 堕𑓂ᬂ.𐮇𞤽- ++N; 堕𑓂\u1B02。𐮇𞤛\u200C-; [B3 C1 V3]; [B3 C1 V3] # 堕𑓂ᬂ.𐮇𞤽- ++B; xn--5sf345zdk8h.xn----iv5iw606c; [B3 V3]; [B3 V3] # 堕𑓂ᬂ.𐮇𞤽- ++B; xn--5sf345zdk8h.xn----rgnt157hwl9g; [B3 C1 V3]; [B3 C1 V3] # 堕𑓂ᬂ.𐮇𞤽- ++T; 𐹶𑁆ᡕ𞤢。ᡥς\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥςتς ++N; 𐹶𑁆ᡕ𞤢。ᡥς\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥςتς ++T; 𐹶𑁆ᡕ𞤢。ᡥς\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥςتς ++N; 𐹶𑁆ᡕ𞤢。ᡥς\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥςتς ++B; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062AΣ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ ++B; 𐹶𑁆ᡕ𞤢。ᡥσ\u062Aσ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ ++B; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062Aσ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ ++B; xn--l8e1317j1ebz456b.xn--4xaa85plx4a; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ ++T; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς ++N; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς ++T; 𐹶𑁆ᡕ𞤢。ᡥσ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς ++N; 𐹶𑁆ᡕ𞤢。ᡥσ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς ++B; xn--l8e1317j1ebz456b.xn--3xab95plx4a; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς ++B; xn--l8e1317j1ebz456b.xn--3xaa16plx4a; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥςتς ++B; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062AΣ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ ++B; 𐹶𑁆ᡕ𞤢。ᡥσ\u062Aσ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ ++B; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062Aσ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ ++T; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς ++N; 𐹶𑁆ᡕ𞤀。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς ++T; 𐹶𑁆ᡕ𞤢。ᡥσ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς ++N; 𐹶𑁆ᡕ𞤢。ᡥσ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς ++B; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062AΣ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ ++B; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062Aσ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ ++T; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς ++N; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς ++B; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062AΣ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ ++B; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062Aσ; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتσ ++T; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς ++N; 𐹶𑁆ᡕ𞤢。ᡥΣ\u062Aς; [B1 B5]; [B1 B5] # 𐹶𑁆ᡕ𞤢.ᡥσتς ++T; 󏒰.-𝟻ß; [P1 V3 V6]; [P1 V3 V6] ++N; 󏒰.-𝟻ß; [P1 V3 V6]; [P1 V3 V6] ++T; 󏒰.-5ß; [P1 V3 V6]; [P1 V3 V6] ++N; 󏒰.-5ß; [P1 V3 V6]; [P1 V3 V6] ++B; 󏒰.-5SS; [P1 V3 V6]; [P1 V3 V6] ++B; 󏒰.-5ss; [P1 V3 V6]; [P1 V3 V6] ++B; 󏒰.-5Ss; [P1 V3 V6]; [P1 V3 V6] ++B; xn--t960e.-5ss; [V3 V6]; [V3 V6] ++B; xn--t960e.xn---5-hia; [V3 V6]; [V3 V6] ++B; 󏒰.-𝟻SS; [P1 V3 V6]; [P1 V3 V6] ++B; 󏒰.-𝟻ss; [P1 V3 V6]; [P1 V3 V6] ++B; 󏒰.-𝟻Ss; [P1 V3 V6]; [P1 V3 V6] ++T; \u200D𐨿.🤒Ⴥ򑮶; [C2 P1 V6]; [P1 V5 V6] # 𐨿.🤒Ⴥ ++N; \u200D𐨿.🤒Ⴥ򑮶; [C2 P1 V6]; [C2 P1 V6] # 𐨿.🤒Ⴥ ++T; \u200D𐨿.🤒ⴥ򑮶; [C2 P1 V6]; [P1 V5 V6] # 𐨿.🤒ⴥ ++N; \u200D𐨿.🤒ⴥ򑮶; [C2 P1 V6]; [C2 P1 V6] # 𐨿.🤒ⴥ ++B; xn--0s9c.xn--tljz038l0gz4b; [V5 V6]; [V5 V6] ++B; xn--1ug9533g.xn--tljz038l0gz4b; [C2 V6]; [C2 V6] # 𐨿.🤒ⴥ ++B; xn--0s9c.xn--9nd3211w0gz4b; [V5 V6]; [V5 V6] ++B; xn--1ug9533g.xn--9nd3211w0gz4b; [C2 V6]; [C2 V6] # 𐨿.🤒Ⴥ ++T; 𵋅。ß𬵩\u200D; [C2 P1 V6]; [P1 V6] # .ß𬵩 ++N; 𵋅。ß𬵩\u200D; [C2 P1 V6]; [C2 P1 V6] # .ß𬵩 ++T; 𵋅。SS𬵩\u200D; [C2 P1 V6]; [P1 V6] # .ss𬵩 ++N; 𵋅。SS𬵩\u200D; [C2 P1 V6]; [C2 P1 V6] # .ss𬵩 ++T; 𵋅。ss𬵩\u200D; [C2 P1 V6]; [P1 V6] # .ss𬵩 ++N; 𵋅。ss𬵩\u200D; [C2 P1 V6]; [C2 P1 V6] # .ss𬵩 ++T; 𵋅。Ss𬵩\u200D; [C2 P1 V6]; [P1 V6] # .ss𬵩 ++N; 𵋅。Ss𬵩\u200D; [C2 P1 V6]; [C2 P1 V6] # .ss𬵩 ++B; xn--ey1p.xn--ss-eq36b; [V6]; [V6] ++B; xn--ey1p.xn--ss-n1tx0508a; [C2 V6]; [C2 V6] # .ss𬵩 ++B; xn--ey1p.xn--zca870nz438b; [C2 V6]; [C2 V6] # .ß𬵩 ++T; \u200C𭉝。\u07F1\u0301𞹻; [B1 C1 V5]; [B1 V5] # 𭉝.߱́غ ++N; \u200C𭉝。\u07F1\u0301𞹻; [B1 C1 V5]; [B1 C1 V5] # 𭉝.߱́غ ++T; \u200C𭉝。\u07F1\u0301\u063A; [B1 C1 V5]; [B1 V5] # 𭉝.߱́غ ++N; \u200C𭉝。\u07F1\u0301\u063A; [B1 C1 V5]; [B1 C1 V5] # 𭉝.߱́غ ++B; xn--634m.xn--lsa46nuub; [B1 V5]; [B1 V5] # 𭉝.߱́غ ++B; xn--0ugy003y.xn--lsa46nuub; [B1 C1 V5]; [B1 C1 V5] # 𭉝.߱́غ ++T; 𞼌\u200C𑈶。𐹡; [B1 B3 C1 P1 V6]; [B1 P1 V6] # 𑈶.𐹡 ++N; 𞼌\u200C𑈶。𐹡; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # 𑈶.𐹡 ++B; xn--9g1d1288a.xn--8n0d; [B1 V6]; [B1 V6] ++B; xn--0ug7946gzpxf.xn--8n0d; [B1 B3 C1 V6]; [B1 B3 C1 V6] # 𑈶.𐹡 ++T; 󠅯򇽭\u200C🜭。𑖿\u1ABBς≠; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻ς≠ ++N; 󠅯򇽭\u200C🜭。𑖿\u1ABBς≠; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻ς≠ ++T; 󠅯򇽭\u200C🜭。𑖿\u1ABBς=\u0338; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻ς≠ ++N; 󠅯򇽭\u200C🜭。𑖿\u1ABBς=\u0338; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻ς≠ ++T; 󠅯򇽭\u200C🜭。𑖿\u1ABBς≠; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻ς≠ ++N; 󠅯򇽭\u200C🜭。𑖿\u1ABBς≠; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻ς≠ ++T; 󠅯򇽭\u200C🜭。𑖿\u1ABBς=\u0338; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻ς≠ ++N; 󠅯򇽭\u200C🜭。𑖿\u1ABBς=\u0338; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻ς≠ ++T; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ=\u0338; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ ++N; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ=\u0338; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ ++T; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ≠; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ ++N; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ≠; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ ++T; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ≠; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ ++N; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ≠; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ ++T; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ=\u0338; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ ++N; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ=\u0338; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ ++B; xn--zb9h5968x.xn--4xa378i1mfjw7y; [V5 V6]; [V5 V6] # 🜭.𑖿᪻σ≠ ++B; xn--0ug3766p5nm1b.xn--4xa378i1mfjw7y; [C1 V5 V6]; [C1 V5 V6] # 🜭.𑖿᪻σ≠ ++B; xn--0ug3766p5nm1b.xn--3xa578i1mfjw7y; [C1 V5 V6]; [C1 V5 V6] # 🜭.𑖿᪻ς≠ ++T; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ=\u0338; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ ++N; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ=\u0338; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ ++T; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ≠; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ ++N; 󠅯򇽭\u200C🜭。𑖿\u1ABBΣ≠; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ ++T; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ≠; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ ++N; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ≠; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ ++T; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ=\u0338; [C1 P1 V5 V6]; [P1 V5 V6] # 🜭.𑖿᪻σ≠ ++N; 󠅯򇽭\u200C🜭。𑖿\u1ABBσ=\u0338; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 🜭.𑖿᪻σ≠ ++T; ⒋。⒈\u200D򳴢; [C2 P1 V6]; [P1 V6] # ⒋.⒈ ++N; ⒋。⒈\u200D򳴢; [C2 P1 V6]; [C2 P1 V6] # ⒋.⒈ ++T; 4.。1.\u200D򳴢; [C2 P1 V6 A4_2]; [P1 V6 A4_2] # 4..1. ++N; 4.。1.\u200D򳴢; [C2 P1 V6 A4_2]; [C2 P1 V6 A4_2] # 4..1. ++B; 4..1.xn--sf51d; [V6 A4_2]; [V6 A4_2] ++B; 4..1.xn--1ug64613i; [C2 V6 A4_2]; [C2 V6 A4_2] # 4..1. ++B; xn--wsh.xn--tsh07994h; [V6]; [V6] ++B; xn--wsh.xn--1ug58o74922a; [C2 V6]; [C2 V6] # ⒋.⒈ ++T; \u0644ß。𐇽\u1A60򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لß.᩠𐇽𞤾 ++N; \u0644ß。𐇽\u1A60򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لß.᩠𐇽𞤾 ++T; \u0644ß。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لß.᩠𐇽𞤾 ++N; \u0644ß。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لß.᩠𐇽𞤾 ++T; \u0644ß。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لß.᩠𐇽𞤾 ++N; \u0644ß。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لß.᩠𐇽𞤾 ++B; \u0644SS。\u1A60𐇽򾅢𞤜; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 ++B; \u0644ss。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 ++B; \u0644Ss。\u1A60𐇽򾅢𞤜; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 ++B; xn--ss-svd.xn--jof2298hn83fln78f; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # لss.᩠𐇽𞤾 ++B; xn--zca57y.xn--jof2298hn83fln78f; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # لß.᩠𐇽𞤾 ++B; \u0644SS。\u1A60𐇽򾅢𞤜; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 ++B; \u0644ss。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 ++B; \u0644Ss。\u1A60𐇽򾅢𞤜; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 ++B; \u0644SS。𐇽\u1A60򾅢𞤜; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 ++B; \u0644ss。𐇽\u1A60򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 ++B; \u0644Ss。𐇽\u1A60򾅢𞤜; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 ++B; \u0644SS。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 ++B; \u0644Ss。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 ++B; \u0644SS。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 ++B; \u0644Ss。\u1A60𐇽򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 ++B; \u0644SS。𐇽\u1A60򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 ++B; \u0644Ss。𐇽\u1A60򾅢𞤾; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # لss.᩠𐇽𞤾 ++B; 𐹽𑄳񼜲.\u1DDF\u17B8\uA806𑜫; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𐹽𑄳.ᷟី꠆𑜫 ++B; xn--1o0di0c0652w.xn--33e362arr1l153d; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # 𐹽𑄳.ᷟី꠆𑜫 ++T; Ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # Ⴓ𑜫.ڧ𑰶 ++N; Ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # Ⴓ𑜫.ڧ𑰶 ++T; Ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # Ⴓ𑜫.ڧ𑰶 ++N; Ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # Ⴓ𑜫.ڧ𑰶 ++T; ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # ⴓ𑜫.ڧ𑰶 ++N; ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # ⴓ𑜫.ڧ𑰶 ++B; xn--blj6306ey091d.xn--9jb4223l; [V6]; [V6] # ⴓ𑜫.ڧ𑰶 ++B; xn--1ugy52cym7p7xu5e.xn--9jb4223l; [V6]; [V6] # ⴓ𑜫.ڧ𑰶 ++B; xn--rnd8945ky009c.xn--9jb4223l; [V6]; [V6] # Ⴓ𑜫.ڧ𑰶 ++B; xn--rnd479ep20q7x12e.xn--9jb4223l; [V6]; [V6] # Ⴓ𑜫.ڧ𑰶 ++T; ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # ⴓ𑜫.ڧ𑰶 ++N; ⴓ𑜫\u200D򗭓.\u06A7𑰶; [P1 V6]; [P1 V6] # ⴓ𑜫.ڧ𑰶 ++B; 𐨿.🄆—; [P1 V5 V6]; [P1 V5 V6] ++B; 𐨿.5,—; [P1 V5 V6]; [P1 V5 V6] ++B; xn--0s9c.xn--5,-81t; [P1 V5 V6]; [P1 V5 V6] ++B; xn--0s9c.xn--8ug8324p; [V5 V6]; [V5 V6] ++B; 򔊱񁦮۸。󠾭-; [P1 V3 V6]; [P1 V3 V6] ++B; xn--lmb18944c0g2z.xn----2k81m; [V3 V6]; [V3 V6] ++B; 𼗸\u07CD𐹮。\u06DDᡎᠴ; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ߍ𐹮.ᡎᠴ ++B; xn--osb0855kcc2r.xn--tlb299fhc; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ߍ𐹮.ᡎᠴ ++T; \u200DᠮႾ🄂.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 P1 V6] # ᠮႾ🄂.🚗ࡁ ++N; \u200DᠮႾ🄂.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ᠮႾ🄂.🚗ࡁ ++T; \u200DᠮႾ1,.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 B6 P1 V6] # ᠮႾ1,.🚗ࡁ ++N; \u200DᠮႾ1,.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ᠮႾ1,.🚗ࡁ ++T; \u200Dᠮⴞ1,.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 B6 P1 V6] # ᠮⴞ1,.🚗ࡁ ++N; \u200Dᠮⴞ1,.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ᠮⴞ1,.🚗ࡁ ++B; xn--1,-v3o625k.xn--zvb3124wpkpf; [B1 B6 P1 V6]; [B1 B6 P1 V6] # ᠮⴞ1,.🚗ࡁ ++B; xn--1,-v3o161c53q.xn--zvb692j9664aic1g; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ᠮⴞ1,.🚗ࡁ ++B; xn--1,-ogkx89c.xn--zvb3124wpkpf; [B1 B6 P1 V6]; [B1 B6 P1 V6] # ᠮႾ1,.🚗ࡁ ++B; xn--1,-ogkx89c39j.xn--zvb692j9664aic1g; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ᠮႾ1,.🚗ࡁ ++T; \u200Dᠮⴞ🄂.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 P1 V6] # ᠮⴞ🄂.🚗ࡁ ++N; \u200Dᠮⴞ🄂.🚗\u0841𮹌\u200C; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ᠮⴞ🄂.🚗ࡁ ++B; xn--h7e438h1p44a.xn--zvb3124wpkpf; [B1 V6]; [B1 V6] # ᠮⴞ🄂.🚗ࡁ ++B; xn--h7e341b0wlbv45b.xn--zvb692j9664aic1g; [B1 C1 C2 V6]; [B1 C1 C2 V6] # ᠮⴞ🄂.🚗ࡁ ++B; xn--2nd129ai554b.xn--zvb3124wpkpf; [B1 V6]; [B1 V6] # ᠮႾ🄂.🚗ࡁ ++B; xn--2nd129ay2gnw71c.xn--zvb692j9664aic1g; [B1 C1 C2 V6]; [B1 C1 C2 V6] # ᠮႾ🄂.🚗ࡁ ++B; \u0601\u0697.𑚶񼡷⾆; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ڗ.𑚶舌 ++B; \u0601\u0697.𑚶񼡷舌; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ڗ.𑚶舌 ++B; xn--jfb41a.xn--tc1ap851axo39c; [B1 V5 V6]; [B1 V5 V6] # ڗ.𑚶舌 ++B; 🞅󠳡󜍙.񲖷; [P1 V6]; [P1 V6] ++B; xn--ie9hi1349bqdlb.xn--oj69a; [V6]; [V6] ++T; \u20E7񯡎-򫣝.4Ⴄ\u200C; [C1 P1 V5 V6]; [P1 V5 V6] # ⃧-.4Ⴄ ++N; \u20E7񯡎-򫣝.4Ⴄ\u200C; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⃧-.4Ⴄ ++T; \u20E7񯡎-򫣝.4ⴄ\u200C; [C1 P1 V5 V6]; [P1 V5 V6] # ⃧-.4ⴄ ++N; \u20E7񯡎-򫣝.4ⴄ\u200C; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⃧-.4ⴄ ++B; xn----9snu5320fi76w.xn--4-ivs; [V5 V6]; [V5 V6] # ⃧-.4ⴄ ++B; xn----9snu5320fi76w.xn--4-sgn589c; [C1 V5 V6]; [C1 V5 V6] # ⃧-.4ⴄ ++B; xn----9snu5320fi76w.xn--4-f0g; [V5 V6]; [V5 V6] # ⃧-.4Ⴄ ++B; xn----9snu5320fi76w.xn--4-f0g649i; [C1 V5 V6]; [C1 V5 V6] # ⃧-.4Ⴄ ++T; ᚭ。𝌠ß𖫱; ᚭ.𝌠ß𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 ++N; ᚭ。𝌠ß𖫱; ᚭ.𝌠ß𖫱; xn--hwe.xn--zca4946pblnc; NV8 ++T; ᚭ。𝌠ß𖫱; ᚭ.𝌠ß𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 ++N; ᚭ。𝌠ß𖫱; ᚭ.𝌠ß𖫱; xn--hwe.xn--zca4946pblnc; NV8 ++B; ᚭ。𝌠SS𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 ++B; ᚭ。𝌠ss𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 ++B; ᚭ。𝌠Ss𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 ++B; xn--hwe.xn--ss-ci1ub261a; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 ++B; ᚭ.𝌠ss𖫱; ; xn--hwe.xn--ss-ci1ub261a; NV8 ++B; ᚭ.𝌠SS𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 ++B; ᚭ.𝌠Ss𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 ++B; xn--hwe.xn--zca4946pblnc; ᚭ.𝌠ß𖫱; xn--hwe.xn--zca4946pblnc; NV8 ++T; ᚭ.𝌠ß𖫱; ; xn--hwe.xn--ss-ci1ub261a; NV8 ++N; ᚭ.𝌠ß𖫱; ; xn--hwe.xn--zca4946pblnc; NV8 ++B; ᚭ。𝌠SS𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 ++B; ᚭ。𝌠ss𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 ++B; ᚭ。𝌠Ss𖫱; ᚭ.𝌠ss𖫱; xn--hwe.xn--ss-ci1ub261a; NV8 ++B; ₁。𞤫ꡪ; [B1 B2 B3]; [B1 B2 B3] ++B; 1。𞤫ꡪ; [B1 B2 B3]; [B1 B2 B3] ++B; 1。𞤉ꡪ; [B1 B2 B3]; [B1 B2 B3] ++B; 1.xn--gd9al691d; [B1 B2 B3]; [B1 B2 B3] ++B; ₁。𞤉ꡪ; [B1 B2 B3]; [B1 B2 B3] ++T; 𯻼\u200C.𞶞򻙤񥘇; [B2 B3 B6 C1 P1 V6]; [B2 B3 P1 V6] # . ++N; 𯻼\u200C.𞶞򻙤񥘇; [B2 B3 B6 C1 P1 V6]; [B2 B3 B6 C1 P1 V6] # . ++B; xn--kg4n.xn--2b7hs861pl540a; [B2 B3 V6]; [B2 B3 V6] ++B; xn--0ug27500a.xn--2b7hs861pl540a; [B2 B3 B6 C1 V6]; [B2 B3 B6 C1 V6] # . ++B; 𑑄≯。𑜤; [P1 V5 V6]; [P1 V5 V6] ++B; 𑑄>\u0338。𑜤; [P1 V5 V6]; [P1 V5 V6] ++B; 𑑄≯。𑜤; [P1 V5 V6]; [P1 V5 V6] ++B; 𑑄>\u0338。𑜤; [P1 V5 V6]; [P1 V5 V6] ++B; xn--hdh5636g.xn--ci2d; [V5 V6]; [V5 V6] ++T; Ⴋ≮𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [P1 V5 V6] # Ⴋ≮.ާ𐋣 ++N; Ⴋ≮𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [C2 P1 V6] # Ⴋ≮.ާ𐋣 ++T; Ⴋ<\u0338𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [P1 V5 V6] # Ⴋ≮.ާ𐋣 ++N; Ⴋ<\u0338𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [C2 P1 V6] # Ⴋ≮.ާ𐋣 ++T; ⴋ<\u0338𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [P1 V5 V6] # ⴋ≮.ާ𐋣 ++N; ⴋ<\u0338𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [C2 P1 V6] # ⴋ≮.ާ𐋣 ++T; ⴋ≮𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [P1 V5 V6] # ⴋ≮.ާ𐋣 ++N; ⴋ≮𱲆。\u200D\u07A7𐋣; [C2 P1 V6]; [C2 P1 V6] # ⴋ≮.ާ𐋣 ++B; xn--gdhz03bxt42d.xn--lrb6479j; [V5 V6]; [V5 V6] # ⴋ≮.ާ𐋣 ++B; xn--gdhz03bxt42d.xn--lrb506jqr4n; [C2 V6]; [C2 V6] # ⴋ≮.ާ𐋣 ++B; xn--jnd802gsm17c.xn--lrb6479j; [V5 V6]; [V5 V6] # Ⴋ≮.ާ𐋣 ++B; xn--jnd802gsm17c.xn--lrb506jqr4n; [C2 V6]; [C2 V6] # Ⴋ≮.ާ𐋣 ++B; \u17D2.򆽒≯; [P1 V5 V6]; [P1 V5 V6] # ្.≯ ++B; \u17D2.򆽒>\u0338; [P1 V5 V6]; [P1 V5 V6] # ្.≯ ++B; xn--u4e.xn--hdhx0084f; [V5 V6]; [V5 V6] # ្.≯ ++B; 񏁇\u1734.𐨺É⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 ++B; 񏁇\u1734.𐨺E\u0301⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 ++B; 񏁇\u1734.𐨺É⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 ++B; 񏁇\u1734.𐨺E\u0301⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 ++B; 񏁇\u1734.𐨺e\u0301⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 ++B; 񏁇\u1734.𐨺é⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 ++B; xn--c0e34564d.xn--9ca207st53lg3f; [V5 V6]; [V5 V6] # ᜴.𐨺é⬓𑄴 ++B; 񏁇\u1734.𐨺e\u0301⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 ++B; 񏁇\u1734.𐨺é⬓𑄴; [P1 V5 V6]; [P1 V5 V6] # ᜴.𐨺é⬓𑄴 ++T; ᢇ\u200D\uA8C4。︒𞤺; [B1 B6 C2 P1 V6]; [B1 P1 V6] # ᢇ꣄.︒𞤺 ++N; ᢇ\u200D\uA8C4。︒𞤺; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ᢇ꣄.︒𞤺 ++T; ᢇ\u200D\uA8C4。。𞤺; [B6 C2 A4_2]; [A4_2] # ᢇ꣄..𞤺 ++N; ᢇ\u200D\uA8C4。。𞤺; [B6 C2 A4_2]; [B6 C2 A4_2] # ᢇ꣄..𞤺 ++T; ᢇ\u200D\uA8C4。。𞤘; [B6 C2 A4_2]; [A4_2] # ᢇ꣄..𞤺 ++N; ᢇ\u200D\uA8C4。。𞤘; [B6 C2 A4_2]; [B6 C2 A4_2] # ᢇ꣄..𞤺 ++B; xn--09e4694e..xn--ye6h; [A4_2]; [A4_2] # ᢇ꣄..𞤺 ++B; xn--09e669a6x8j..xn--ye6h; [B6 C2 A4_2]; [B6 C2 A4_2] # ᢇ꣄..𞤺 ++T; ᢇ\u200D\uA8C4。︒𞤘; [B1 B6 C2 P1 V6]; [B1 P1 V6] # ᢇ꣄.︒𞤺 ++N; ᢇ\u200D\uA8C4。︒𞤘; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ᢇ꣄.︒𞤺 ++B; xn--09e4694e.xn--y86cv562b; [B1 V6]; [B1 V6] # ᢇ꣄.︒𞤺 ++B; xn--09e669a6x8j.xn--y86cv562b; [B1 B6 C2 V6]; [B1 B6 C2 V6] # ᢇ꣄.︒𞤺 ++T; 𞩬򖙱\u1714\u200C。\u0631\u07AA≮; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ ++N; 𞩬򖙱\u1714\u200C。\u0631\u07AA≮; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ ++T; 𞩬򖙱\u1714\u200C。\u0631\u07AA<\u0338; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ ++N; 𞩬򖙱\u1714\u200C。\u0631\u07AA<\u0338; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ ++T; 𞩬򖙱\u1714\u200C。\u0631\u07AA≮; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ ++N; 𞩬򖙱\u1714\u200C。\u0631\u07AA≮; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ ++T; 𞩬򖙱\u1714\u200C。\u0631\u07AA<\u0338; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ ++N; 𞩬򖙱\u1714\u200C。\u0631\u07AA<\u0338; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ᜔.رު≮ ++B; xn--fze3930v7hz6b.xn--wgb86el10d; [B2 B3 V6]; [B2 B3 V6] # ᜔.رު≮ ++B; xn--fze607b9651bjwl7c.xn--wgb86el10d; [B2 B3 V6]; [B2 B3 V6] # ᜔.رު≮ ++B; Ⴣ.\u0653ᢤ; [P1 V5 V6]; [P1 V5 V6] # Ⴣ.ٓᢤ ++B; Ⴣ.\u0653ᢤ; [P1 V5 V6]; [P1 V5 V6] # Ⴣ.ٓᢤ ++B; ⴣ.\u0653ᢤ; [V5]; [V5] # ⴣ.ٓᢤ ++B; xn--rlj.xn--vhb294g; [V5]; [V5] # ⴣ.ٓᢤ ++B; xn--7nd.xn--vhb294g; [V5 V6]; [V5 V6] # Ⴣ.ٓᢤ ++B; ⴣ.\u0653ᢤ; [V5]; [V5] # ⴣ.ٓᢤ ++B; 󠄈\u0813.싉򄆻Ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉Ⴤ ++B; 󠄈\u0813.싉򄆻Ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉Ⴤ ++B; 󠄈\u0813.싉򄆻Ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉Ⴤ ++B; 󠄈\u0813.싉򄆻Ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉Ⴤ ++B; 󠄈\u0813.싉򄆻ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉ⴤ ++B; 󠄈\u0813.싉򄆻ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉ⴤ ++B; xn--oub.xn--sljz109bpe25dviva; [V6]; [V6] # ࠓ.싉ⴤ ++B; xn--oub.xn--8nd9522gpe69cviva; [V6]; [V6] # ࠓ.싉Ⴤ ++B; 󠄈\u0813.싉򄆻ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉ⴤ ++B; 󠄈\u0813.싉򄆻ⴤ򂡐; [P1 V6]; [P1 V6] # ࠓ.싉ⴤ ++B; \uAA2C𑲫≮.⤂; [P1 V5 V6]; [P1 V5 V6] # ꨬ𑲫≮.⤂ ++B; \uAA2C𑲫<\u0338.⤂; [P1 V5 V6]; [P1 V5 V6] # ꨬ𑲫≮.⤂ ++B; \uAA2C𑲫≮.⤂; [P1 V5 V6]; [P1 V5 V6] # ꨬ𑲫≮.⤂ ++B; \uAA2C𑲫<\u0338.⤂; [P1 V5 V6]; [P1 V5 V6] # ꨬ𑲫≮.⤂ ++B; xn--gdh1854cn19c.xn--kqi; [V5 V6]; [V5 V6] # ꨬ𑲫≮.⤂ ++B; \u0604𐩔≮Ⴢ.Ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.Ⴃ ++B; \u0604𐩔<\u0338Ⴢ.Ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.Ⴃ ++B; \u0604𐩔≮Ⴢ.Ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.Ⴃ ++B; \u0604𐩔<\u0338Ⴢ.Ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.Ⴃ ++B; \u0604𐩔<\u0338ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮ⴢ.ⴃ ++B; \u0604𐩔≮ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮ⴢ.ⴃ ++B; \u0604𐩔≮Ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.ⴃ ++B; \u0604𐩔<\u0338Ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.ⴃ ++B; xn--mfb416c0jox02t.xn--ukj; [B1 V6]; [B1 V6] # 𐩔≮Ⴢ.ⴃ ++B; xn--mfb266l4khr54u.xn--ukj; [B1 V6]; [B1 V6] # 𐩔≮ⴢ.ⴃ ++B; xn--mfb416c0jox02t.xn--bnd; [B1 V6]; [B1 V6] # 𐩔≮Ⴢ.Ⴃ ++B; \u0604𐩔<\u0338ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮ⴢ.ⴃ ++B; \u0604𐩔≮ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮ⴢ.ⴃ ++B; \u0604𐩔≮Ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.ⴃ ++B; \u0604𐩔<\u0338Ⴢ.ⴃ; [B1 P1 V6]; [B1 P1 V6] # 𐩔≮Ⴢ.ⴃ ++B; 𑁅。-; [V3 V5]; [V3 V5] ++B; xn--210d.-; [V3 V5]; [V3 V5] ++B; \u0DCA򕸽󠧱。饈≠\u0664; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ්.饈≠٤ ++B; \u0DCA򕸽󠧱。饈=\u0338\u0664; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ්.饈≠٤ ++B; \u0DCA򕸽󠧱。饈≠\u0664; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ්.饈≠٤ ++B; \u0DCA򕸽󠧱。饈=\u0338\u0664; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ්.饈≠٤ ++B; xn--h1c25913jfwov.xn--dib144ler5f; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ්.饈≠٤ ++B; 𞥃ᠠ⁷。≯邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] ++B; 𞥃ᠠ⁷。>\u0338邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] ++B; 𞥃ᠠ7。≯邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] ++B; 𞥃ᠠ7。>\u0338邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] ++B; 𞤡ᠠ7。>\u0338邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] ++B; 𞤡ᠠ7。≯邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] ++B; xn--7-v4j2826w.xn--4-ogoy01bou3i; [B1 B2 V6]; [B1 B2 V6] ++B; 𞤡ᠠ⁷。>\u0338邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] ++B; 𞤡ᠠ⁷。≯邅⬻4; [B1 B2 P1 V6]; [B1 B2 P1 V6] ++B; 򠿯ᡳ-𑐻.𐹴𐋫\u0605󑎳; [B1 B6 P1 V6]; [B1 B6 P1 V6] # ᡳ-𑐻.𐹴𐋫 ++B; xn----m9j3429kxmy7e.xn--nfb7950kdihrp812a; [B1 B6 V6]; [B1 B6 V6] # ᡳ-𑐻.𐹴𐋫 ++B; 򠶆\u0845\u0A51.넨-󶧈; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡅੑ.넨- ++B; 򠶆\u0845\u0A51.넨-󶧈; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡅੑ.넨- ++B; xn--3vb26hb6834b.xn----i37ez0957g; [B5 B6 V6]; [B5 B6 V6] # ࡅੑ.넨- ++T; ꡦᡑ\u200D⒈。𐋣-; [C2 P1 V3 V6]; [P1 V3 V6] # ꡦᡑ⒈.𐋣- ++N; ꡦᡑ\u200D⒈。𐋣-; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ꡦᡑ⒈.𐋣- ++T; ꡦᡑ\u200D1.。𐋣-; [C2 V3 A4_2]; [V3 A4_2] # ꡦᡑ1..𐋣- ++N; ꡦᡑ\u200D1.。𐋣-; [C2 V3 A4_2]; [C2 V3 A4_2] # ꡦᡑ1..𐋣- ++B; xn--1-o7j0610f..xn----381i; [V3 A4_2]; [V3 A4_2] ++B; xn--1-o7j663bdl7m..xn----381i; [C2 V3 A4_2]; [C2 V3 A4_2] # ꡦᡑ1..𐋣- ++B; xn--h8e863drj7h.xn----381i; [V3 V6]; [V3 V6] ++B; xn--h8e470bl0d838o.xn----381i; [C2 V3 V6]; [C2 V3 V6] # ꡦᡑ⒈.𐋣- ++B; Ⴌ。􍼠\uFB69; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴌ.ٹ ++B; Ⴌ。􍼠\u0679; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴌ.ٹ ++B; ⴌ。􍼠\u0679; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴌ.ٹ ++B; xn--3kj.xn--yib19191t; [B5 B6 V6]; [B5 B6 V6] # ⴌ.ٹ ++B; xn--knd.xn--yib19191t; [B5 B6 V6]; [B5 B6 V6] # Ⴌ.ٹ ++B; ⴌ。􍼠\uFB69; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴌ.ٹ ++B; 𐮁𐭱.\u0F84\u135E-\u1CFA; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𐮁𐭱.྄፞- ++B; xn--r19c5a.xn----xjg270ag3m; [B1 V5 V6]; [B1 V5 V6] # 𐮁𐭱.྄፞- ++T; ⒈䰹\u200D-。웈; [C2 P1 V3 V6]; [P1 V3 V6] # ⒈䰹-.웈 ++N; ⒈䰹\u200D-。웈; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ⒈䰹-.웈 ++T; ⒈䰹\u200D-。웈; [C2 P1 V3 V6]; [P1 V3 V6] # ⒈䰹-.웈 ++N; ⒈䰹\u200D-。웈; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ⒈䰹-.웈 ++T; 1.䰹\u200D-。웈; [C2 V3]; [V3] # 1.䰹-.웈 ++N; 1.䰹\u200D-。웈; [C2 V3]; [C2 V3] # 1.䰹-.웈 ++T; 1.䰹\u200D-。웈; [C2 V3]; [V3] # 1.䰹-.웈 ++N; 1.䰹\u200D-。웈; [C2 V3]; [C2 V3] # 1.䰹-.웈 ++B; 1.xn----zw5a.xn--kp5b; [V3]; [V3] ++B; 1.xn----tgnz80r.xn--kp5b; [C2 V3]; [C2 V3] # 1.䰹-.웈 ++B; xn----dcp160o.xn--kp5b; [V3 V6]; [V3 V6] ++B; xn----tgnx5rjr6c.xn--kp5b; [C2 V3 V6]; [C2 V3 V6] # ⒈䰹-.웈 ++T; て。\u200C󠳽\u07F3; [C1 P1 V6]; [P1 V6] # て.߳ ++N; て。\u200C󠳽\u07F3; [C1 P1 V6]; [C1 P1 V6] # て.߳ ++B; xn--m9j.xn--rtb10784p; [V6]; [V6] # て.߳ ++B; xn--m9j.xn--rtb154j9l73w; [C1 V6]; [C1 V6] # て.߳ ++T; ς。\uA9C0\u06E7; [V5]; [V5] # ς.꧀ۧ ++N; ς。\uA9C0\u06E7; [V5]; [V5] # ς.꧀ۧ ++T; ς。\uA9C0\u06E7; [V5]; [V5] # ς.꧀ۧ ++N; ς。\uA9C0\u06E7; [V5]; [V5] # ς.꧀ۧ ++B; Σ。\uA9C0\u06E7; [V5]; [V5] # σ.꧀ۧ ++B; σ。\uA9C0\u06E7; [V5]; [V5] # σ.꧀ۧ ++B; xn--4xa.xn--3lb1944f; [V5]; [V5] # σ.꧀ۧ ++B; xn--3xa.xn--3lb1944f; [V5]; [V5] # ς.꧀ۧ ++B; Σ。\uA9C0\u06E7; [V5]; [V5] # σ.꧀ۧ ++B; σ。\uA9C0\u06E7; [V5]; [V5] # σ.꧀ۧ ++B; \u0BCD󥫅򌉑.ႢႵ; [P1 V5 V6]; [P1 V5 V6] # ்.ႢႵ ++B; \u0BCD󥫅򌉑.ⴂⴕ; [P1 V5 V6]; [P1 V5 V6] # ்.ⴂⴕ ++B; \u0BCD󥫅򌉑.Ⴂⴕ; [P1 V5 V6]; [P1 V5 V6] # ்.Ⴂⴕ ++B; xn--xmc83135idcxza.xn--9md086l; [V5 V6]; [V5 V6] # ்.Ⴂⴕ ++B; xn--xmc83135idcxza.xn--tkjwb; [V5 V6]; [V5 V6] # ்.ⴂⴕ ++B; xn--xmc83135idcxza.xn--9md2b; [V5 V6]; [V5 V6] # ்.ႢႵ ++T; \u1C32🄈⾛\u05A6.\u200D򯥤\u07FD; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ᰲ🄈走֦. ++N; \u1C32🄈⾛\u05A6.\u200D򯥤\u07FD; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᰲ🄈走֦. ++T; \u1C327,走\u05A6.\u200D򯥤\u07FD; [B1 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ᰲ7,走֦. ++N; \u1C327,走\u05A6.\u200D򯥤\u07FD; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᰲ7,走֦. ++B; xn--7,-bid991urn3k.xn--1tb13454l; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ᰲ7,走֦. ++B; xn--7,-bid991urn3k.xn--1tb334j1197q; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᰲ7,走֦. ++B; xn--xcb756i493fwi5o.xn--1tb13454l; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ᰲ🄈走֦. ++B; xn--xcb756i493fwi5o.xn--1tb334j1197q; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ᰲ🄈走֦. ++B; ᢗ。Ӏ񝄻; [P1 V6]; [P1 V6] ++B; ᢗ。Ӏ񝄻; [P1 V6]; [P1 V6] ++B; ᢗ。ӏ񝄻; [P1 V6]; [P1 V6] ++B; xn--hbf.xn--s5a83117e; [V6]; [V6] ++B; xn--hbf.xn--d5a86117e; [V6]; [V6] ++B; ᢗ。ӏ񝄻; [P1 V6]; [P1 V6] ++B; \u0668-。񠏇🝆ᄾ; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ٨-.🝆ᄾ ++B; xn----oqc.xn--qrd1699v327w; [B1 V3 V6]; [B1 V3 V6] # ٨-.🝆ᄾ ++B; -𐋷𖾑。󠆬; [V3]; [V3] ++B; xn----991iq40y.; [V3]; [V3] ++T; \u200C𐹳🐴멈.\uABED񐡼; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𐹳🐴멈.꯭ ++N; \u200C𐹳🐴멈.\uABED񐡼; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𐹳🐴멈.꯭ ++T; \u200C𐹳🐴멈.\uABED񐡼; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𐹳🐴멈.꯭ ++N; \u200C𐹳🐴멈.\uABED񐡼; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𐹳🐴멈.꯭ ++B; xn--422b325mqb6i.xn--429a8682s; [B1 V5 V6]; [B1 V5 V6] # 𐹳🐴멈.꯭ ++B; xn--0ug6681d406b7bwk.xn--429a8682s; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 𐹳🐴멈.꯭ ++B; ≮.\u0769\u0603; [B1 P1 V6]; [B1 P1 V6] # ≮.ݩ ++B; <\u0338.\u0769\u0603; [B1 P1 V6]; [B1 P1 V6] # ≮.ݩ ++B; xn--gdh.xn--lfb92e; [B1 V6]; [B1 V6] # ≮.ݩ ++T; 𐶭⾆。\u200C𑚶򟱃𞰘; [B1 B2 B3 C1 P1 V6]; [B2 B3 B5 B6 P1 V5 V6] # 舌.𑚶 ++N; 𐶭⾆。\u200C𑚶򟱃𞰘; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 舌.𑚶 ++T; 𐶭舌。\u200C𑚶򟱃𞰘; [B1 B2 B3 C1 P1 V6]; [B2 B3 B5 B6 P1 V5 V6] # 舌.𑚶 ++N; 𐶭舌。\u200C𑚶򟱃𞰘; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 舌.𑚶 ++B; xn--tc1ao37z.xn--6e2dw557azds2d; [B2 B3 B5 B6 V5 V6]; [B2 B3 B5 B6 V5 V6] ++B; xn--tc1ao37z.xn--0ugx728gi1nfwqz2e; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # 舌.𑚶 ++T; \u200CჀ-.𝟷ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # Ⴠ-.1ςς ++N; \u200CჀ-.𝟷ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # Ⴠ-.1ςς ++T; \u200CჀ-.1ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # Ⴠ-.1ςς ++N; \u200CჀ-.1ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # Ⴠ-.1ςς ++T; \u200Cⴠ-.1ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # ⴠ-.1ςς ++N; \u200Cⴠ-.1ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # ⴠ-.1ςς ++T; \u200CჀ-.1Σ𞴺Σ; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # Ⴠ-.1σσ ++N; \u200CჀ-.1Σ𞴺Σ; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # Ⴠ-.1σσ ++T; \u200Cⴠ-.1σ𞴺σ; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # ⴠ-.1σσ ++N; \u200Cⴠ-.1σ𞴺σ; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # ⴠ-.1σσ ++B; xn----2ws.xn--1-0mba52321c; [B1 B6 V3 V6]; [B1 B6 V3 V6] ++B; xn----rgn530d.xn--1-0mba52321c; [B1 C1 V3 V6]; [B1 C1 V3 V6] # ⴠ-.1σσ ++B; xn----z1g.xn--1-0mba52321c; [B1 B6 V3 V6]; [B1 B6 V3 V6] ++B; xn----z1g168i.xn--1-0mba52321c; [B1 C1 V3 V6]; [B1 C1 V3 V6] # Ⴠ-.1σσ ++B; xn----rgn530d.xn--1-ymba92321c; [B1 C1 V3 V6]; [B1 C1 V3 V6] # ⴠ-.1ςς ++B; xn----z1g168i.xn--1-ymba92321c; [B1 C1 V3 V6]; [B1 C1 V3 V6] # Ⴠ-.1ςς ++T; \u200Cⴠ-.𝟷ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # ⴠ-.1ςς ++N; \u200Cⴠ-.𝟷ς𞴺ς; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # ⴠ-.1ςς ++T; \u200CჀ-.𝟷Σ𞴺Σ; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # Ⴠ-.1σσ ++N; \u200CჀ-.𝟷Σ𞴺Σ; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # Ⴠ-.1σσ ++T; \u200Cⴠ-.𝟷σ𞴺σ; [B1 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # ⴠ-.1σσ ++N; \u200Cⴠ-.𝟷σ𞴺σ; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # ⴠ-.1σσ ++B; 𑲘󠄒𓑡。𝟪Ⴜ; [P1 V5 V6]; [P1 V5 V6] ++B; 𑲘󠄒𓑡。8Ⴜ; [P1 V5 V6]; [P1 V5 V6] ++B; 𑲘󠄒𓑡。8ⴜ; [P1 V5 V6]; [P1 V5 V6] ++B; xn--7m3d291b.xn--8-vws; [V5 V6]; [V5 V6] ++B; xn--7m3d291b.xn--8-s1g; [V5 V6]; [V5 V6] ++B; 𑲘󠄒𓑡。𝟪ⴜ; [P1 V5 V6]; [P1 V5 V6] ++B; 䪏\u06AB\u07E0\u0941。뭕ᢝ\u17B9; [B5 B6]; [B5 B6] # 䪏ګߠु.뭕ᢝឹ ++B; 䪏\u06AB\u07E0\u0941。뭕ᢝ\u17B9; [B5 B6]; [B5 B6] # 䪏ګߠु.뭕ᢝឹ ++B; 䪏\u06AB\u07E0\u0941。뭕ᢝ\u17B9; [B5 B6]; [B5 B6] # 䪏ګߠु.뭕ᢝឹ ++B; 䪏\u06AB\u07E0\u0941。뭕ᢝ\u17B9; [B5 B6]; [B5 B6] # 䪏ګߠु.뭕ᢝឹ ++B; xn--ekb23dj4at01n.xn--43e96bh910b; [B5 B6]; [B5 B6] # 䪏ګߠु.뭕ᢝឹ ++B; \u1BAB。🂉󠁰; [P1 V5 V6]; [P1 V5 V6] # ᮫.🂉 ++B; \u1BAB。🂉󠁰; [P1 V5 V6]; [P1 V5 V6] # ᮫.🂉 ++B; xn--zxf.xn--fx7ho0250c; [V5 V6]; [V5 V6] # ᮫.🂉 ++T; 󩎃\u0AC4。ς\u200D𐹮𑈵; [B5 C2 P1 V6]; [B5 P1 V6] # ૄ.ς𐹮𑈵 ++N; 󩎃\u0AC4。ς\u200D𐹮𑈵; [B5 C2 P1 V6]; [B5 C2 P1 V6] # ૄ.ς𐹮𑈵 ++T; 󩎃\u0AC4。Σ\u200D𐹮𑈵; [B5 C2 P1 V6]; [B5 P1 V6] # ૄ.σ𐹮𑈵 ++N; 󩎃\u0AC4。Σ\u200D𐹮𑈵; [B5 C2 P1 V6]; [B5 C2 P1 V6] # ૄ.σ𐹮𑈵 ++T; 󩎃\u0AC4。σ\u200D𐹮𑈵; [B5 C2 P1 V6]; [B5 P1 V6] # ૄ.σ𐹮𑈵 ++N; 󩎃\u0AC4。σ\u200D𐹮𑈵; [B5 C2 P1 V6]; [B5 C2 P1 V6] # ૄ.σ𐹮𑈵 ++B; xn--dfc53161q.xn--4xa8467k5mc; [B5 V6]; [B5 V6] # ૄ.σ𐹮𑈵 ++B; xn--dfc53161q.xn--4xa895lzo7nsfd; [B5 C2 V6]; [B5 C2 V6] # ૄ.σ𐹮𑈵 ++B; xn--dfc53161q.xn--3xa006lzo7nsfd; [B5 C2 V6]; [B5 C2 V6] # ૄ.ς𐹮𑈵 ++B; 𐫀ᡂ𑜫.𑘿; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] ++B; 𐫀ᡂ𑜫.𑘿; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] ++B; xn--17e9625js1h.xn--sb2d; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] ++T; 󬚶󸋖򖩰-。\u200C; [C1 P1 V3 V6]; [P1 V3 V6] # -. ++N; 󬚶󸋖򖩰-。\u200C; [C1 P1 V3 V6]; [C1 P1 V3 V6] # -. ++B; xn----7i12hu122k9ire.; [V3 V6]; [V3 V6] ++B; xn----7i12hu122k9ire.xn--0ug; [C1 V3 V6]; [C1 V3 V6] # -. ++B; 𐹣.\u07C2; [B1]; [B1] # 𐹣.߂ ++B; 𐹣.\u07C2; [B1]; [B1] # 𐹣.߂ ++B; xn--bo0d.xn--dsb; [B1]; [B1] # 𐹣.߂ ++B; -\u07E1。Ↄ; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ߡ.Ↄ ++B; -\u07E1。Ↄ; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ߡ.Ↄ ++B; -\u07E1。ↄ; [B1 V3]; [B1 V3] # -ߡ.ↄ ++B; xn----8cd.xn--r5g; [B1 V3]; [B1 V3] # -ߡ.ↄ ++B; xn----8cd.xn--q5g; [B1 V3 V6]; [B1 V3 V6] # -ߡ.Ↄ ++B; -\u07E1。ↄ; [B1 V3]; [B1 V3] # -ߡ.ↄ ++T; \u200D-︒󠄄。ß哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 P1 V3 V6] # -︒.ß哑 ++N; \u200D-︒󠄄。ß哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # -︒.ß哑 ++T; \u200D-。󠄄。ß哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 P1 V3 V6 A4_2] # -..ß哑 ++N; \u200D-。󠄄。ß哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2] # -..ß哑 ++T; \u200D-。󠄄。SS哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 P1 V3 V6 A4_2] # -..ss哑 ++N; \u200D-。󠄄。SS哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2] # -..ss哑 ++T; \u200D-。󠄄。ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 P1 V3 V6 A4_2] # -..ss哑 ++N; \u200D-。󠄄。ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2] # -..ss哑 ++T; \u200D-。󠄄。Ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 P1 V3 V6 A4_2] # -..ss哑 ++N; \u200D-。󠄄。Ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2]; [B1 B5 B6 C1 C2 P1 V3 V6 A4_2] # -..ss哑 ++B; -..xn--ss-h46c5711e; [B1 B5 B6 V3 V6 A4_2]; [B1 B5 B6 V3 V6 A4_2] ++B; xn----tgn..xn--ss-k1ts75zb8ym; [B1 B5 B6 C1 C2 V3 V6 A4_2]; [B1 B5 B6 C1 C2 V3 V6 A4_2] # -..ss哑 ++B; xn----tgn..xn--zca670n5f0binyk; [B1 B5 B6 C1 C2 V3 V6 A4_2]; [B1 B5 B6 C1 C2 V3 V6 A4_2] # -..ß哑 ++T; \u200D-︒󠄄。SS哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 P1 V3 V6] # -︒.ss哑 ++N; \u200D-︒󠄄。SS哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # -︒.ss哑 ++T; \u200D-︒󠄄。ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 P1 V3 V6] # -︒.ss哑 ++N; \u200D-︒󠄄。ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # -︒.ss哑 ++T; \u200D-︒󠄄。Ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 P1 V3 V6] # -︒.ss哑 ++N; \u200D-︒󠄄。Ss哑\u200C𐵿; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # -︒.ss哑 ++B; xn----o89h.xn--ss-h46c5711e; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] ++B; xn----tgnt341h.xn--ss-k1ts75zb8ym; [B1 B5 B6 C1 C2 V6]; [B1 B5 B6 C1 C2 V6] # -︒.ss哑 ++B; xn----tgnt341h.xn--zca670n5f0binyk; [B1 B5 B6 C1 C2 V6]; [B1 B5 B6 C1 C2 V6] # -︒.ß哑 ++B; ︒.\uFE2F𑑂; [P1 V5 V6]; [P1 V5 V6] # ︒.𑑂︯ ++B; ︒.𑑂\uFE2F; [P1 V5 V6]; [P1 V5 V6] # ︒.𑑂︯ ++B; 。.𑑂\uFE2F; [V5 A4_2]; [V5 A4_2] # ..𑑂︯ ++B; ..xn--s96cu30b; [V5 A4_2]; [V5 A4_2] # ..𑑂︯ ++B; xn--y86c.xn--s96cu30b; [V5 V6]; [V5 V6] # ︒.𑑂︯ ++T; \uA92C。\u200D; [C2 V5]; [V5] # ꤬. ++N; \uA92C。\u200D; [C2 V5]; [C2 V5] # ꤬. ++B; xn--zi9a.; [V5]; [V5] # ꤬. ++B; xn--zi9a.xn--1ug; [C2 V5]; [C2 V5] # ꤬. ++T; \u200D󠸡。\uFCD7; [B1 C2 P1 V6]; [B1 P1 V6] # .هج ++N; \u200D󠸡。\uFCD7; [B1 C2 P1 V6]; [B1 C2 P1 V6] # .هج ++T; \u200D󠸡。\u0647\u062C; [B1 C2 P1 V6]; [B1 P1 V6] # .هج ++N; \u200D󠸡。\u0647\u062C; [B1 C2 P1 V6]; [B1 C2 P1 V6] # .هج ++B; xn--d356e.xn--rgb7c; [B1 V6]; [B1 V6] # .هج ++B; xn--1ug80651l.xn--rgb7c; [B1 C2 V6]; [B1 C2 V6] # .هج ++T; -Ⴄ𝟢\u0663.𑍴ς; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -Ⴄ0٣.𑍴ς ++N; -Ⴄ𝟢\u0663.𑍴ς; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -Ⴄ0٣.𑍴ς ++T; -Ⴄ0\u0663.𑍴ς; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -Ⴄ0٣.𑍴ς ++N; -Ⴄ0\u0663.𑍴ς; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -Ⴄ0٣.𑍴ς ++T; -ⴄ0\u0663.𑍴ς; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴ς ++N; -ⴄ0\u0663.𑍴ς; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴ς ++B; -Ⴄ0\u0663.𑍴Σ; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -Ⴄ0٣.𑍴σ ++B; -ⴄ0\u0663.𑍴σ; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴σ ++B; xn---0-iyd8660b.xn--4xa9120l; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴σ ++B; xn---0-iyd216h.xn--4xa9120l; [B1 V3 V5 V6]; [B1 V3 V5 V6] # -Ⴄ0٣.𑍴σ ++B; xn---0-iyd8660b.xn--3xa1220l; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴ς ++B; xn---0-iyd216h.xn--3xa1220l; [B1 V3 V5 V6]; [B1 V3 V5 V6] # -Ⴄ0٣.𑍴ς ++T; -ⴄ𝟢\u0663.𑍴ς; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴ς ++N; -ⴄ𝟢\u0663.𑍴ς; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴ς ++B; -Ⴄ𝟢\u0663.𑍴Σ; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -Ⴄ0٣.𑍴σ ++B; -ⴄ𝟢\u0663.𑍴σ; [B1 V3 V5]; [B1 V3 V5] # -ⴄ0٣.𑍴σ ++B; 󦈄。-; [P1 V3 V6]; [P1 V3 V6] ++B; xn--xm38e.-; [V3 V6]; [V3 V6] ++T; ⋠𐋮.򶈮\u0F18ß≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ ++N; ⋠𐋮.򶈮\u0F18ß≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ ++T; ≼\u0338𐋮.򶈮\u0F18ß>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ ++N; ≼\u0338𐋮.򶈮\u0F18ß>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ ++T; ⋠𐋮.򶈮\u0F18ß≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ ++N; ⋠𐋮.򶈮\u0F18ß≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ ++T; ≼\u0338𐋮.򶈮\u0F18ß>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ ++N; ≼\u0338𐋮.򶈮\u0F18ß>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ß≯ ++B; ≼\u0338𐋮.򶈮\u0F18SS>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ ++B; ⋠𐋮.򶈮\u0F18SS≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ ++B; ⋠𐋮.򶈮\u0F18ss≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ ++B; ≼\u0338𐋮.򶈮\u0F18ss>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ ++B; ≼\u0338𐋮.򶈮\u0F18Ss>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ ++B; ⋠𐋮.򶈮\u0F18Ss≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ ++B; xn--pgh4639f.xn--ss-ifj426nle504a; [V6]; [V6] # ⋠𐋮.༘ss≯ ++B; xn--pgh4639f.xn--zca593eo6oc013y; [V6]; [V6] # ⋠𐋮.༘ß≯ ++B; ≼\u0338𐋮.򶈮\u0F18SS>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ ++B; ⋠𐋮.򶈮\u0F18SS≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ ++B; ⋠𐋮.򶈮\u0F18ss≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ ++B; ≼\u0338𐋮.򶈮\u0F18ss>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ ++B; ≼\u0338𐋮.򶈮\u0F18Ss>\u0338; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ ++B; ⋠𐋮.򶈮\u0F18Ss≯; [P1 V6]; [P1 V6] # ⋠𐋮.༘ss≯ ++B; 1𐋸\u0664。󠢮\uFBA4񷝊; [B1 P1 V6]; [B1 P1 V6] # 1𐋸٤.ۀ ++B; 1𐋸\u0664。󠢮\u06C0񷝊; [B1 P1 V6]; [B1 P1 V6] # 1𐋸٤.ۀ ++B; 1𐋸\u0664。󠢮\u06D5\u0654񷝊; [B1 P1 V6]; [B1 P1 V6] # 1𐋸٤.ۀ ++B; xn--1-hqc3905q.xn--zkb83268gqee4a; [B1 V6]; [B1 V6] # 1𐋸٤.ۀ ++T; 儭-。𐹴Ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # 儭-.𐹴Ⴢ ++N; 儭-。𐹴Ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # 儭-.𐹴Ⴢ ++T; 儭-。𐹴Ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # 儭-.𐹴Ⴢ ++N; 儭-。𐹴Ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # 儭-.𐹴Ⴢ ++T; 儭-。𐹴ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # 儭-.𐹴ⴢ ++N; 儭-。𐹴ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # 儭-.𐹴ⴢ ++B; xn----gz7a.xn--qlj9223eywx0b; [B1 B6 V3 V6]; [B1 B6 V3 V6] ++B; xn----gz7a.xn--0ug472cfq0pus98b; [B1 B6 C1 V3 V6]; [B1 B6 C1 V3 V6] # 儭-.𐹴ⴢ ++B; xn----gz7a.xn--6nd5001kyw98a; [B1 B6 V3 V6]; [B1 B6 V3 V6] ++B; xn----gz7a.xn--6nd249ejl4pusr7b; [B1 B6 C1 V3 V6]; [B1 B6 C1 V3 V6] # 儭-.𐹴Ⴢ ++T; 儭-。𐹴ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 P1 V3 V6] # 儭-.𐹴ⴢ ++N; 儭-。𐹴ⴢ񥳠\u200C; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # 儭-.𐹴ⴢ ++B; 𝟺𐋷\u06B9.𞤭򿍡; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 4𐋷ڹ.𞤭 ++B; 4𐋷\u06B9.𞤭򿍡; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 4𐋷ڹ.𞤭 ++B; 4𐋷\u06B9.𞤋򿍡; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 4𐋷ڹ.𞤭 ++B; xn--4-cvc5384q.xn--le6hi7322b; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 4𐋷ڹ.𞤭 ++B; 𝟺𐋷\u06B9.𞤋򿍡; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 4𐋷ڹ.𞤭 ++B; ≯-ꡋ𑲣.⒈𐹭; [B1 P1 V6]; [B1 P1 V6] ++B; >\u0338-ꡋ𑲣.⒈𐹭; [B1 P1 V6]; [B1 P1 V6] ++B; ≯-ꡋ𑲣.1.𐹭; [B1 P1 V6]; [B1 P1 V6] ++B; >\u0338-ꡋ𑲣.1.𐹭; [B1 P1 V6]; [B1 P1 V6] ++B; xn----ogox061d5i8d.1.xn--lo0d; [B1 V6]; [B1 V6] ++B; xn----ogox061d5i8d.xn--tsh0666f; [B1 V6]; [B1 V6] ++B; \u0330.󰜱蚀; [P1 V5 V6]; [P1 V5 V6] # ̰.蚀 ++B; \u0330.󰜱蚀; [P1 V5 V6]; [P1 V5 V6] # ̰.蚀 ++B; xn--xta.xn--e91aw9417e; [V5 V6]; [V5 V6] # ̰.蚀 ++T; \uFB39Ⴘ.𞡼𑇀ß\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ß⃗ ++N; \uFB39Ⴘ.𞡼𑇀ß\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ß⃗ ++T; \u05D9\u05BCႸ.𞡼𑇀ß\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ß⃗ ++N; \u05D9\u05BCႸ.𞡼𑇀ß\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ß⃗ ++T; \u05D9\u05BCⴘ.𞡼𑇀ß\u20D7; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ß⃗ ++N; \u05D9\u05BCⴘ.𞡼𑇀ß\u20D7; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ß⃗ ++B; \u05D9\u05BCႸ.𞡼𑇀SS\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ss⃗ ++B; \u05D9\u05BCⴘ.𞡼𑇀ss\u20D7; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ss⃗ ++B; \u05D9\u05BCႸ.𞡼𑇀ss\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ss⃗ ++B; xn--kdb1d867b.xn--ss-yju5690ken9h; [B2 B3 V6]; [B2 B3 V6] # יּႸ.𞡼𑇀ss⃗ ++B; xn--kdb1d278n.xn--ss-yju5690ken9h; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ss⃗ ++B; xn--kdb1d278n.xn--zca284nhg9nrrxg; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ß⃗ ++B; xn--kdb1d867b.xn--zca284nhg9nrrxg; [B2 B3 V6]; [B2 B3 V6] # יּႸ.𞡼𑇀ß⃗ ++T; \uFB39ⴘ.𞡼𑇀ß\u20D7; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ß⃗ ++N; \uFB39ⴘ.𞡼𑇀ß\u20D7; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ß⃗ ++B; \uFB39Ⴘ.𞡼𑇀SS\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ss⃗ ++B; \uFB39ⴘ.𞡼𑇀ss\u20D7; [B2 B3]; [B2 B3] # יּⴘ.𞡼𑇀ss⃗ ++B; \uFB39Ⴘ.𞡼𑇀ss\u20D7; [B2 B3 P1 V6]; [B2 B3 P1 V6] # יּႸ.𞡼𑇀ss⃗ ++B; \u1BA3𐹰򁱓。凬; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᮣ𐹰.凬 ++B; \u1BA3𐹰򁱓。凬; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᮣ𐹰.凬 ++B; xn--rxfz314ilg20c.xn--t9q; [B1 V5 V6]; [B1 V5 V6] # ᮣ𐹰.凬 ++T; 🢟🄈\u200Dꡎ。\u0F84; [C2 P1 V5 V6]; [P1 V5 V6] # 🢟🄈ꡎ.྄ ++N; 🢟🄈\u200Dꡎ。\u0F84; [C2 P1 V5 V6]; [C2 P1 V5 V6] # 🢟🄈ꡎ.྄ ++T; 🢟7,\u200Dꡎ。\u0F84; [C2 P1 V5 V6]; [P1 V5 V6] # 🢟7,ꡎ.྄ ++N; 🢟7,\u200Dꡎ。\u0F84; [C2 P1 V5 V6]; [C2 P1 V5 V6] # 🢟7,ꡎ.྄ ++B; xn--7,-gh9hg322i.xn--3ed; [P1 V5 V6]; [P1 V5 V6] # 🢟7,ꡎ.྄ ++B; xn--7,-n1t0654eqo3o.xn--3ed; [C2 P1 V5 V6]; [C2 P1 V5 V6] # 🢟7,ꡎ.྄ ++B; xn--nc9aq743ds0e.xn--3ed; [V5 V6]; [V5 V6] # 🢟🄈ꡎ.྄ ++B; xn--1ug4874cfd0kbmg.xn--3ed; [C2 V5 V6]; [C2 V5 V6] # 🢟🄈ꡎ.྄ ++B; ꡔ。\u1039ᢇ; [V5]; [V5] # ꡔ.္ᢇ ++B; xn--tc9a.xn--9jd663b; [V5]; [V5] # ꡔ.္ᢇ ++B; \u20EB≮.𝨖; [P1 V5 V6]; [P1 V5 V6] # ⃫≮.𝨖 ++B; \u20EB<\u0338.𝨖; [P1 V5 V6]; [P1 V5 V6] # ⃫≮.𝨖 ++B; xn--e1g71d.xn--772h; [V5 V6]; [V5 V6] # ⃫≮.𝨖 ++B; Ⴢ≯褦.ᠪ\u07EAႾ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴢ≯褦.ᠪߪႾݧ ++B; Ⴢ>\u0338褦.ᠪ\u07EAႾ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴢ≯褦.ᠪߪႾݧ ++B; Ⴢ≯褦.ᠪ\u07EAႾ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴢ≯褦.ᠪߪႾݧ ++B; Ⴢ>\u0338褦.ᠪ\u07EAႾ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴢ≯褦.ᠪߪႾݧ ++B; ⴢ>\u0338褦.ᠪ\u07EAⴞ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴢ≯褦.ᠪߪⴞݧ ++B; ⴢ≯褦.ᠪ\u07EAⴞ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴢ≯褦.ᠪߪⴞݧ ++B; xn--hdh433bev8e.xn--rpb5x392bcyt; [B5 B6 V6]; [B5 B6 V6] # ⴢ≯褦.ᠪߪⴞݧ ++B; xn--6nd461g478e.xn--rpb5x49td2h; [B5 B6 V6]; [B5 B6 V6] # Ⴢ≯褦.ᠪߪႾݧ ++B; ⴢ>\u0338褦.ᠪ\u07EAⴞ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴢ≯褦.ᠪߪⴞݧ ++B; ⴢ≯褦.ᠪ\u07EAⴞ\u0767; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴢ≯褦.ᠪߪⴞݧ ++T; 򊉆󠆒\u200C\uA953。𞤙\u067Bꡘ; [B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # ꥓.𞤻ٻꡘ ++N; 򊉆󠆒\u200C\uA953。𞤙\u067Bꡘ; [B2 B3 C1 P1 V6]; [B2 B3 C1 P1 V6] # ꥓.𞤻ٻꡘ ++T; 򊉆󠆒\u200C\uA953。𞤻\u067Bꡘ; [B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # ꥓.𞤻ٻꡘ ++N; 򊉆󠆒\u200C\uA953。𞤻\u067Bꡘ; [B2 B3 C1 P1 V6]; [B2 B3 C1 P1 V6] # ꥓.𞤻ٻꡘ ++B; xn--3j9al6189a.xn--0ib8893fegvj; [B2 B3 V6]; [B2 B3 V6] # ꥓.𞤻ٻꡘ ++B; xn--0ug8815chtz0e.xn--0ib8893fegvj; [B2 B3 C1 V6]; [B2 B3 C1 V6] # ꥓.𞤻ٻꡘ ++T; \u200C.≯; [C1 P1 V6]; [P1 V6 A4_2] # .≯ ++N; \u200C.≯; [C1 P1 V6]; [C1 P1 V6] # .≯ ++T; \u200C.>\u0338; [C1 P1 V6]; [P1 V6 A4_2] # .≯ ++N; \u200C.>\u0338; [C1 P1 V6]; [C1 P1 V6] # .≯ ++B; .xn--hdh; [V6 A4_2]; [V6 A4_2] ++B; xn--0ug.xn--hdh; [C1 V6]; [C1 V6] # .≯ ++B; 𰅧񣩠-.\uABED-悜; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -.꯭-悜 ++B; 𰅧񣩠-.\uABED-悜; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -.꯭-悜 ++B; xn----7m53aj640l.xn----8f4br83t; [V3 V5 V6]; [V3 V5 V6] # -.꯭-悜 ++T; ᡉ𶓧⬞ᢜ.-\u200D𞣑\u202E; [C2 P1 V3 V6]; [P1 V3 V6] # ᡉ⬞ᢜ.-𞣑 ++N; ᡉ𶓧⬞ᢜ.-\u200D𞣑\u202E; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ᡉ⬞ᢜ.-𞣑 ++B; xn--87e0ol04cdl39e.xn----qinu247r; [V3 V6]; [V3 V6] # ᡉ⬞ᢜ.-𞣑 ++B; xn--87e0ol04cdl39e.xn----ugn5e3763s; [C2 V3 V6]; [C2 V3 V6] # ᡉ⬞ᢜ.-𞣑 ++T; ⒐\u200C衃Ⴝ.\u0682Ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # ⒐衃Ⴝ.ڂႴ ++N; ⒐\u200C衃Ⴝ.\u0682Ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # ⒐衃Ⴝ.ڂႴ ++T; 9.\u200C衃Ⴝ.\u0682Ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # 9.衃Ⴝ.ڂႴ ++N; 9.\u200C衃Ⴝ.\u0682Ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 9.衃Ⴝ.ڂႴ ++T; 9.\u200C衃ⴝ.\u0682ⴔ; [B1 B2 B3 C1]; [B1 B2 B3] # 9.衃ⴝ.ڂⴔ ++N; 9.\u200C衃ⴝ.\u0682ⴔ; [B1 B2 B3 C1]; [B1 B2 B3 C1] # 9.衃ⴝ.ڂⴔ ++T; 9.\u200C衃Ⴝ.\u0682ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # 9.衃Ⴝ.ڂⴔ ++N; 9.\u200C衃Ⴝ.\u0682ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # 9.衃Ⴝ.ڂⴔ ++B; 9.xn--1nd9032d.xn--7ib268q; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 9.衃Ⴝ.ڂⴔ ++B; 9.xn--1nd159e1y2f.xn--7ib268q; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # 9.衃Ⴝ.ڂⴔ ++B; 9.xn--llj1920a.xn--7ib268q; [B1 B2 B3]; [B1 B2 B3] # 9.衃ⴝ.ڂⴔ ++B; 9.xn--0ug862cbm5e.xn--7ib268q; [B1 B2 B3 C1]; [B1 B2 B3 C1] # 9.衃ⴝ.ڂⴔ ++B; 9.xn--1nd9032d.xn--7ib433c; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 9.衃Ⴝ.ڂႴ ++B; 9.xn--1nd159e1y2f.xn--7ib433c; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # 9.衃Ⴝ.ڂႴ ++T; ⒐\u200C衃ⴝ.\u0682ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # ⒐衃ⴝ.ڂⴔ ++N; ⒐\u200C衃ⴝ.\u0682ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # ⒐衃ⴝ.ڂⴔ ++T; ⒐\u200C衃Ⴝ.\u0682ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 P1 V6] # ⒐衃Ⴝ.ڂⴔ ++N; ⒐\u200C衃Ⴝ.\u0682ⴔ; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # ⒐衃Ⴝ.ڂⴔ ++B; xn--1nd362hy16e.xn--7ib268q; [B1 B2 B3 V6]; [B1 B2 B3 V6] # ⒐衃Ⴝ.ڂⴔ ++B; xn--1nd159ecmd785k.xn--7ib268q; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # ⒐衃Ⴝ.ڂⴔ ++B; xn--1shy52abz3f.xn--7ib268q; [B1 B2 B3 V6]; [B1 B2 B3 V6] # ⒐衃ⴝ.ڂⴔ ++B; xn--0ugx0px1izu2h.xn--7ib268q; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # ⒐衃ⴝ.ڂⴔ ++B; xn--1nd362hy16e.xn--7ib433c; [B1 B2 B3 V6]; [B1 B2 B3 V6] # ⒐衃Ⴝ.ڂႴ ++B; xn--1nd159ecmd785k.xn--7ib433c; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # ⒐衃Ⴝ.ڂႴ ++T; \u07E1\u200C。--⸬; [B1 B3 C1 V3]; [B1 V3] # ߡ.--⸬ ++N; \u07E1\u200C。--⸬; [B1 B3 C1 V3]; [B1 B3 C1 V3] # ߡ.--⸬ ++B; xn--8sb.xn-----iw2a; [B1 V3]; [B1 V3] # ߡ.--⸬ ++B; xn--8sb884j.xn-----iw2a; [B1 B3 C1 V3]; [B1 B3 C1 V3] # ߡ.--⸬ ++B; 𞥓.\u0718; 𞥓.\u0718; xn--of6h.xn--inb # 𞥓.ܘ ++B; 𞥓.\u0718; ; xn--of6h.xn--inb # 𞥓.ܘ ++B; xn--of6h.xn--inb; 𞥓.\u0718; xn--of6h.xn--inb # 𞥓.ܘ ++B; 󠄽-.-\u0DCA; [V3]; [V3] # -.-් ++B; 󠄽-.-\u0DCA; [V3]; [V3] # -.-් ++B; -.xn----ptf; [V3]; [V3] # -.-් ++B; 󠇝\u075B-.\u1927; [B1 B3 B6 V3 V5]; [B1 B3 B6 V3 V5] # ݛ-.ᤧ ++B; xn----k4c.xn--lff; [B1 B3 B6 V3 V5]; [B1 B3 B6 V3 V5] # ݛ-.ᤧ ++B; 𞤴󠆹⦉𐹺.\uA806⒌󘤸; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴⦉𐹺.꠆⒌ ++B; 𞤴󠆹⦉𐹺.\uA8065.󘤸; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴⦉𐹺.꠆5. ++B; 𞤒󠆹⦉𐹺.\uA8065.󘤸; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴⦉𐹺.꠆5. ++B; xn--fuix729epewf.xn--5-w93e.xn--7b83e; [B1 V5 V6]; [B1 V5 V6] # 𞤴⦉𐹺.꠆5. ++B; 𞤒󠆹⦉𐹺.\uA806⒌󘤸; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴⦉𐹺.꠆⒌ ++B; xn--fuix729epewf.xn--xsh5029b6e77i; [B1 V5 V6]; [B1 V5 V6] # 𞤴⦉𐹺.꠆⒌ ++T; 󠄸₀。𑖿\u200C𐦂\u200D; [B1 C2 V5]; [B1 V5] # 0.𑖿𐦂 ++N; 󠄸₀。𑖿\u200C𐦂\u200D; [B1 C2 V5]; [B1 C2 V5] # 0.𑖿𐦂 ++T; 󠄸0。𑖿\u200C𐦂\u200D; [B1 C2 V5]; [B1 V5] # 0.𑖿𐦂 ++N; 󠄸0。𑖿\u200C𐦂\u200D; [B1 C2 V5]; [B1 C2 V5] # 0.𑖿𐦂 ++B; 0.xn--mn9cz2s; [B1 V5]; [B1 V5] ++B; 0.xn--0ugc8040p9hk; [B1 C2 V5]; [B1 C2 V5] # 0.𑖿𐦂 ++B; Ⴚ𐋸󠄄。𝟝ퟶ\u103A; [P1 V6]; [P1 V6] # Ⴚ𐋸.5ퟶ် ++B; Ⴚ𐋸󠄄。5ퟶ\u103A; [P1 V6]; [P1 V6] # Ⴚ𐋸.5ퟶ် ++B; ⴚ𐋸󠄄。5ퟶ\u103A; ⴚ𐋸.5ퟶ\u103A; xn--ilj2659d.xn--5-dug9054m; NV8 # ⴚ𐋸.5ퟶ် ++B; xn--ilj2659d.xn--5-dug9054m; ⴚ𐋸.5ퟶ\u103A; xn--ilj2659d.xn--5-dug9054m; NV8 # ⴚ𐋸.5ퟶ် ++B; ⴚ𐋸.5ퟶ\u103A; ; xn--ilj2659d.xn--5-dug9054m; NV8 # ⴚ𐋸.5ퟶ် ++B; Ⴚ𐋸.5ퟶ\u103A; [P1 V6]; [P1 V6] # Ⴚ𐋸.5ퟶ် ++B; xn--ynd2415j.xn--5-dug9054m; [V6]; [V6] # Ⴚ𐋸.5ퟶ် ++B; ⴚ𐋸󠄄。𝟝ퟶ\u103A; ⴚ𐋸.5ퟶ\u103A; xn--ilj2659d.xn--5-dug9054m; NV8 # ⴚ𐋸.5ퟶ် ++T; \u200D-ᠹ﹪.\u1DE1\u1922; [C2 P1 V5 V6]; [P1 V3 V5 V6] # -ᠹ﹪.ᷡᤢ ++N; \u200D-ᠹ﹪.\u1DE1\u1922; [C2 P1 V5 V6]; [C2 P1 V5 V6] # -ᠹ﹪.ᷡᤢ ++T; \u200D-ᠹ%.\u1DE1\u1922; [C2 P1 V5 V6]; [P1 V3 V5 V6] # -ᠹ%.ᷡᤢ ++N; \u200D-ᠹ%.\u1DE1\u1922; [C2 P1 V5 V6]; [C2 P1 V5 V6] # -ᠹ%.ᷡᤢ ++B; xn---%-u4o.xn--gff52t; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -ᠹ%.ᷡᤢ ++B; xn---%-u4oy48b.xn--gff52t; [C2 P1 V5 V6]; [C2 P1 V5 V6] # -ᠹ%.ᷡᤢ ++B; xn----c6jx047j.xn--gff52t; [V3 V5 V6]; [V3 V5 V6] # -ᠹ﹪.ᷡᤢ ++B; xn----c6j614b1z4v.xn--gff52t; [C2 V5 V6]; [C2 V5 V6] # -ᠹ﹪.ᷡᤢ ++B; ≠.ᠿ; [P1 V6]; [P1 V6] ++B; =\u0338.ᠿ; [P1 V6]; [P1 V6] ++B; xn--1ch.xn--y7e; [V6]; [V6] ++B; \u0723\u05A3。㌪; \u0723\u05A3.ハイツ; xn--ucb18e.xn--eck4c5a # ܣ֣.ハイツ ++B; \u0723\u05A3。ハイツ; \u0723\u05A3.ハイツ; xn--ucb18e.xn--eck4c5a # ܣ֣.ハイツ ++B; xn--ucb18e.xn--eck4c5a; \u0723\u05A3.ハイツ; xn--ucb18e.xn--eck4c5a # ܣ֣.ハイツ ++B; \u0723\u05A3.ハイツ; ; xn--ucb18e.xn--eck4c5a # ܣ֣.ハイツ ++B; 𞷥󠆀≮.\u2D7F-; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] # ≮.⵿- ++B; 𞷥󠆀<\u0338.\u2D7F-; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] # ≮.⵿- ++B; xn--gdhx802p.xn----i2s; [B1 B3 V3 V5 V6]; [B1 B3 V3 V5 V6] # ≮.⵿- ++B; ₆榎򦖎\u0D4D。𞤅\u06ED\uFC5A󠮨; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 6榎്.𞤧ۭيي ++B; 6榎򦖎\u0D4D。𞤅\u06ED\u064A\u064A󠮨; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 6榎്.𞤧ۭيي ++B; 6榎򦖎\u0D4D。𞤧\u06ED\u064A\u064A󠮨; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 6榎്.𞤧ۭيي ++B; xn--6-kmf4691ejv41j.xn--mhba10ch545mn8v8h; [B1 B3 V6]; [B1 B3 V6] # 6榎്.𞤧ۭيي ++B; ₆榎򦖎\u0D4D。𞤧\u06ED\uFC5A󠮨; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 6榎്.𞤧ۭيي ++B; 𣩫.򌑲; [P1 V6]; [P1 V6] ++B; 𣩫.򌑲; [P1 V6]; [P1 V6] ++B; xn--td3j.xn--4628b; [V6]; [V6] ++T; \u200D︒。\u06B9\u200C; [B1 B3 C1 C2 P1 V6]; [B1 P1 V6] # ︒.ڹ ++N; \u200D︒。\u06B9\u200C; [B1 B3 C1 C2 P1 V6]; [B1 B3 C1 C2 P1 V6] # ︒.ڹ ++B; xn--y86c.xn--skb; [B1 V6]; [B1 V6] # ︒.ڹ ++B; xn--1ug2658f.xn--skb080k; [B1 B3 C1 C2 V6]; [B1 B3 C1 C2 V6] # ︒.ڹ ++B; xn--skb; \u06B9; xn--skb # ڹ ++B; \u06B9; ; xn--skb # ڹ ++T; 𐹦\u200C𐹶。\u206D; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹦𐹶. ++N; 𐹦\u200C𐹶。\u206D; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹦𐹶. ++B; xn--eo0d6a.xn--sxg; [B1 V6]; [B1 V6] # 𐹦𐹶. ++B; xn--0ug4994goba.xn--sxg; [B1 C1 V6]; [B1 C1 V6] # 𐹦𐹶. ++B; \u0C4D𝨾\u05A9𝟭。-𑜨; [V3 V5]; [V3 V5] # ్𝨾֩1.-𑜨 ++B; \u0C4D𝨾\u05A91。-𑜨; [V3 V5]; [V3 V5] # ్𝨾֩1.-𑜨 ++B; xn--1-rfc312cdp45c.xn----nq0j; [V3 V5]; [V3 V5] # ్𝨾֩1.-𑜨 ++B; 򣿈。뙏; [P1 V6]; [P1 V6] ++B; 򣿈。뙏; [P1 V6]; [P1 V6] ++B; xn--ph26c.xn--281b; [V6]; [V6] ++B; 񕨚󠄌󑽀ᡀ.\u08B6; [P1 V6]; [P1 V6] # ᡀ.ࢶ ++B; xn--z7e98100evc01b.xn--czb; [V6]; [V6] # ᡀ.ࢶ ++T; \u200D。񅁛; [C2 P1 V6]; [P1 V6 A4_2] # . ++N; \u200D。񅁛; [C2 P1 V6]; [C2 P1 V6] # . ++T; \u200D。񅁛; [C2 P1 V6]; [P1 V6 A4_2] # . ++N; \u200D。񅁛; [C2 P1 V6]; [C2 P1 V6] # . ++B; .xn--6x4u; [V6 A4_2]; [V6 A4_2] ++B; xn--1ug.xn--6x4u; [C2 V6]; [C2 V6] # . ++B; \u084B皥.-; [B1 B2 B3 V3]; [B1 B2 B3 V3] # ࡋ皥.- ++B; \u084B皥.-; [B1 B2 B3 V3]; [B1 B2 B3 V3] # ࡋ皥.- ++B; xn--9vb4167c.-; [B1 B2 B3 V3]; [B1 B2 B3 V3] # ࡋ皥.- ++B; 𐣸\u0315𐮇.⒈ꡦ; [B1 P1 V6]; [B1 P1 V6] # ̕𐮇.⒈ꡦ ++B; 𐣸\u0315𐮇.1.ꡦ; [B1 P1 V6]; [B1 P1 V6] # ̕𐮇.1.ꡦ ++B; xn--5sa9915kgvb.1.xn--cd9a; [B1 V6]; [B1 V6] # ̕𐮇.1.ꡦ ++B; xn--5sa9915kgvb.xn--tshw539b; [B1 V6]; [B1 V6] # ̕𐮇.⒈ꡦ ++T; Ⴛ\u200C\u05A2\u200D。\uFFA0ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 ++N; Ⴛ\u200C\u05A2\u200D。\uFFA0ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 ++T; Ⴛ\u200C\u05A2\u200D。\uFFA0a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 ++N; Ⴛ\u200C\u05A2\u200D。\uFFA0a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 ++T; Ⴛ\u200C\u05A2\u200D。\u1160ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 ++N; Ⴛ\u200C\u05A2\u200D。\u1160ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 ++T; Ⴛ\u200C\u05A2\u200D。\u1160a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 ++N; Ⴛ\u200C\u05A2\u200D。\u1160a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 ++T; ⴛ\u200C\u05A2\u200D。\u1160a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # ⴛ֢.ā𐹦 ++N; ⴛ\u200C\u05A2\u200D。\u1160a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # ⴛ֢.ā𐹦 ++T; ⴛ\u200C\u05A2\u200D。\u1160ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # ⴛ֢.ā𐹦 ++N; ⴛ\u200C\u05A2\u200D。\u1160ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # ⴛ֢.ā𐹦 ++T; Ⴛ\u200C\u05A2\u200D。\u1160Ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 ++N; Ⴛ\u200C\u05A2\u200D。\u1160Ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 ++T; Ⴛ\u200C\u05A2\u200D。\u1160A\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 ++N; Ⴛ\u200C\u05A2\u200D。\u1160A\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 ++B; xn--tcb597c.xn--yda594fdn5q; [B5 B6 V6]; [B5 B6 V6] # Ⴛ֢.ā𐹦 ++B; xn--tcb597cdmmfa.xn--yda594fdn5q; [B5 B6 C1 C2 V6]; [B5 B6 C1 C2 V6] # Ⴛ֢.ā𐹦 ++B; xn--tcb323r.xn--yda594fdn5q; [B5 B6 V6]; [B5 B6 V6] # ⴛ֢.ā𐹦 ++B; xn--tcb736kea974k.xn--yda594fdn5q; [B5 B6 C1 C2 V6]; [B5 B6 C1 C2 V6] # ⴛ֢.ā𐹦 ++T; ⴛ\u200C\u05A2\u200D。\uFFA0a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # ⴛ֢.ā𐹦 ++N; ⴛ\u200C\u05A2\u200D。\uFFA0a\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # ⴛ֢.ā𐹦 ++T; ⴛ\u200C\u05A2\u200D。\uFFA0ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # ⴛ֢.ā𐹦 ++N; ⴛ\u200C\u05A2\u200D。\uFFA0ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # ⴛ֢.ā𐹦 ++T; Ⴛ\u200C\u05A2\u200D。\uFFA0Ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 ++N; Ⴛ\u200C\u05A2\u200D。\uFFA0Ā𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 ++T; Ⴛ\u200C\u05A2\u200D。\uFFA0A\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # Ⴛ֢.ā𐹦 ++N; Ⴛ\u200C\u05A2\u200D。\uFFA0A\u0304𐹦; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # Ⴛ֢.ā𐹦 ++B; xn--tcb597c.xn--yda9741khjj; [B5 B6 V6]; [B5 B6 V6] # Ⴛ֢.ā𐹦 ++B; xn--tcb597cdmmfa.xn--yda9741khjj; [B5 B6 C1 C2 V6]; [B5 B6 C1 C2 V6] # Ⴛ֢.ā𐹦 ++B; xn--tcb323r.xn--yda9741khjj; [B5 B6 V6]; [B5 B6 V6] # ⴛ֢.ā𐹦 ++B; xn--tcb736kea974k.xn--yda9741khjj; [B5 B6 C1 C2 V6]; [B5 B6 C1 C2 V6] # ⴛ֢.ā𐹦 ++T; \uFFF9\u200C。曳⾑𐋰≯; [C1 P1 V6]; [P1 V6] # .曳襾𐋰≯ ++N; \uFFF9\u200C。曳⾑𐋰≯; [C1 P1 V6]; [C1 P1 V6] # .曳襾𐋰≯ ++T; \uFFF9\u200C。曳⾑𐋰>\u0338; [C1 P1 V6]; [P1 V6] # .曳襾𐋰≯ ++N; \uFFF9\u200C。曳⾑𐋰>\u0338; [C1 P1 V6]; [C1 P1 V6] # .曳襾𐋰≯ ++T; \uFFF9\u200C。曳襾𐋰≯; [C1 P1 V6]; [P1 V6] # .曳襾𐋰≯ ++N; \uFFF9\u200C。曳襾𐋰≯; [C1 P1 V6]; [C1 P1 V6] # .曳襾𐋰≯ ++T; \uFFF9\u200C。曳襾𐋰>\u0338; [C1 P1 V6]; [P1 V6] # .曳襾𐋰≯ ++N; \uFFF9\u200C。曳襾𐋰>\u0338; [C1 P1 V6]; [C1 P1 V6] # .曳襾𐋰≯ ++B; xn--vn7c.xn--hdh501y8wvfs5h; [V6]; [V6] # .曳襾𐋰≯ ++B; xn--0ug2139f.xn--hdh501y8wvfs5h; [C1 V6]; [C1 V6] # .曳襾𐋰≯ ++T; ≯⒈。ß; [P1 V6]; [P1 V6] ++N; ≯⒈。ß; [P1 V6]; [P1 V6] ++T; >\u0338⒈。ß; [P1 V6]; [P1 V6] ++N; >\u0338⒈。ß; [P1 V6]; [P1 V6] ++T; ≯1.。ß; [P1 V6 A4_2]; [P1 V6 A4_2] ++N; ≯1.。ß; [P1 V6 A4_2]; [P1 V6 A4_2] ++T; >\u03381.。ß; [P1 V6 A4_2]; [P1 V6 A4_2] ++N; >\u03381.。ß; [P1 V6 A4_2]; [P1 V6 A4_2] ++B; >\u03381.。SS; [P1 V6 A4_2]; [P1 V6 A4_2] ++B; ≯1.。SS; [P1 V6 A4_2]; [P1 V6 A4_2] ++B; ≯1.。ss; [P1 V6 A4_2]; [P1 V6 A4_2] ++B; >\u03381.。ss; [P1 V6 A4_2]; [P1 V6 A4_2] ++B; >\u03381.。Ss; [P1 V6 A4_2]; [P1 V6 A4_2] ++B; ≯1.。Ss; [P1 V6 A4_2]; [P1 V6 A4_2] ++B; xn--1-ogo..ss; [V6 A4_2]; [V6 A4_2] ++B; xn--1-ogo..xn--zca; [V6 A4_2]; [V6 A4_2] ++B; >\u0338⒈。SS; [P1 V6]; [P1 V6] ++B; ≯⒈。SS; [P1 V6]; [P1 V6] ++B; ≯⒈。ss; [P1 V6]; [P1 V6] ++B; >\u0338⒈。ss; [P1 V6]; [P1 V6] ++B; >\u0338⒈。Ss; [P1 V6]; [P1 V6] ++B; ≯⒈。Ss; [P1 V6]; [P1 V6] ++B; xn--hdh84f.ss; [V6]; [V6] ++B; xn--hdh84f.xn--zca; [V6]; [V6] ++T; \u0667\u200D\uFB96。\u07DA-₆Ⴙ; [B1 B2 B3 C2 P1 V6]; [B1 B2 B3 P1 V6] # ٧ڳ.ߚ-6Ⴙ ++N; \u0667\u200D\uFB96。\u07DA-₆Ⴙ; [B1 B2 B3 C2 P1 V6]; [B1 B2 B3 C2 P1 V6] # ٧ڳ.ߚ-6Ⴙ ++T; \u0667\u200D\u06B3。\u07DA-6Ⴙ; [B1 B2 B3 C2 P1 V6]; [B1 B2 B3 P1 V6] # ٧ڳ.ߚ-6Ⴙ ++N; \u0667\u200D\u06B3。\u07DA-6Ⴙ; [B1 B2 B3 C2 P1 V6]; [B1 B2 B3 C2 P1 V6] # ٧ڳ.ߚ-6Ⴙ ++T; \u0667\u200D\u06B3。\u07DA-6ⴙ; [B1 B2 B3 C2]; [B1 B2 B3] # ٧ڳ.ߚ-6ⴙ ++N; \u0667\u200D\u06B3。\u07DA-6ⴙ; [B1 B2 B3 C2]; [B1 B2 B3 C2] # ٧ڳ.ߚ-6ⴙ ++B; xn--gib6m.xn---6-lve6529a; [B1 B2 B3]; [B1 B2 B3] # ٧ڳ.ߚ-6ⴙ ++B; xn--gib6m343e.xn---6-lve6529a; [B1 B2 B3 C2]; [B1 B2 B3 C2] # ٧ڳ.ߚ-6ⴙ ++B; xn--gib6m.xn---6-lve002g; [B1 B2 B3 V6]; [B1 B2 B3 V6] # ٧ڳ.ߚ-6Ⴙ ++B; xn--gib6m343e.xn---6-lve002g; [B1 B2 B3 C2 V6]; [B1 B2 B3 C2 V6] # ٧ڳ.ߚ-6Ⴙ ++T; \u0667\u200D\uFB96。\u07DA-₆ⴙ; [B1 B2 B3 C2]; [B1 B2 B3] # ٧ڳ.ߚ-6ⴙ ++N; \u0667\u200D\uFB96。\u07DA-₆ⴙ; [B1 B2 B3 C2]; [B1 B2 B3 C2] # ٧ڳ.ߚ-6ⴙ ++T; \u200C。≠; [C1 P1 V6]; [P1 V6 A4_2] # .≠ ++N; \u200C。≠; [C1 P1 V6]; [C1 P1 V6] # .≠ ++T; \u200C。=\u0338; [C1 P1 V6]; [P1 V6 A4_2] # .≠ ++N; \u200C。=\u0338; [C1 P1 V6]; [C1 P1 V6] # .≠ ++T; \u200C。≠; [C1 P1 V6]; [P1 V6 A4_2] # .≠ ++N; \u200C。≠; [C1 P1 V6]; [C1 P1 V6] # .≠ ++T; \u200C。=\u0338; [C1 P1 V6]; [P1 V6 A4_2] # .≠ ++N; \u200C。=\u0338; [C1 P1 V6]; [C1 P1 V6] # .≠ ++B; .xn--1ch; [V6 A4_2]; [V6 A4_2] ++B; xn--0ug.xn--1ch; [C1 V6]; [C1 V6] # .≠ ++T; 𑖿𝨔.ᡟ𑖿\u1B42\u200C; [C1 V5]; [V5] # 𑖿𝨔.ᡟ𑖿ᭂ ++N; 𑖿𝨔.ᡟ𑖿\u1B42\u200C; [C1 V5]; [C1 V5] # 𑖿𝨔.ᡟ𑖿ᭂ ++B; xn--461dw464a.xn--v8e29loy65a; [V5]; [V5] # 𑖿𝨔.ᡟ𑖿ᭂ ++B; xn--461dw464a.xn--v8e29ldzfo952a; [C1 V5]; [C1 V5] # 𑖿𝨔.ᡟ𑖿ᭂ ++T; 򔣳\u200D򑝱.𖬴Ↄ≠-; [C2 P1 V3 V5 V6]; [P1 V3 V5 V6] # .𖬴Ↄ≠- ++N; 򔣳\u200D򑝱.𖬴Ↄ≠-; [C2 P1 V3 V5 V6]; [C2 P1 V3 V5 V6] # .𖬴Ↄ≠- ++T; 򔣳\u200D򑝱.𖬴Ↄ=\u0338-; [C2 P1 V3 V5 V6]; [P1 V3 V5 V6] # .𖬴Ↄ≠- ++N; 򔣳\u200D򑝱.𖬴Ↄ=\u0338-; [C2 P1 V3 V5 V6]; [C2 P1 V3 V5 V6] # .𖬴Ↄ≠- ++T; 򔣳\u200D򑝱.𖬴ↄ=\u0338-; [C2 P1 V3 V5 V6]; [P1 V3 V5 V6] # .𖬴ↄ≠- ++N; 򔣳\u200D򑝱.𖬴ↄ=\u0338-; [C2 P1 V3 V5 V6]; [C2 P1 V3 V5 V6] # .𖬴ↄ≠- ++T; 򔣳\u200D򑝱.𖬴ↄ≠-; [C2 P1 V3 V5 V6]; [P1 V3 V5 V6] # .𖬴ↄ≠- ++N; 򔣳\u200D򑝱.𖬴ↄ≠-; [C2 P1 V3 V5 V6]; [C2 P1 V3 V5 V6] # .𖬴ↄ≠- ++B; xn--6j00chy9a.xn----81n51bt713h; [V3 V5 V6]; [V3 V5 V6] ++B; xn--1ug15151gkb5a.xn----81n51bt713h; [C2 V3 V5 V6]; [C2 V3 V5 V6] # .𖬴ↄ≠- ++B; xn--6j00chy9a.xn----61n81bt713h; [V3 V5 V6]; [V3 V5 V6] ++B; xn--1ug15151gkb5a.xn----61n81bt713h; [C2 V3 V5 V6]; [C2 V3 V5 V6] # .𖬴Ↄ≠- ++T; \u07E2ς\u200D𝟳。蔑򛖢; [B2 C2 P1 V6]; [B2 P1 V6] # ߢς7.蔑 ++N; \u07E2ς\u200D𝟳。蔑򛖢; [B2 C2 P1 V6]; [B2 C2 P1 V6] # ߢς7.蔑 ++T; \u07E2ς\u200D7。蔑򛖢; [B2 C2 P1 V6]; [B2 P1 V6] # ߢς7.蔑 ++N; \u07E2ς\u200D7。蔑򛖢; [B2 C2 P1 V6]; [B2 C2 P1 V6] # ߢς7.蔑 ++T; \u07E2Σ\u200D7。蔑򛖢; [B2 C2 P1 V6]; [B2 P1 V6] # ߢσ7.蔑 ++N; \u07E2Σ\u200D7。蔑򛖢; [B2 C2 P1 V6]; [B2 C2 P1 V6] # ߢσ7.蔑 ++T; \u07E2σ\u200D7。蔑򛖢; [B2 C2 P1 V6]; [B2 P1 V6] # ߢσ7.蔑 ++N; \u07E2σ\u200D7。蔑򛖢; [B2 C2 P1 V6]; [B2 C2 P1 V6] # ߢσ7.蔑 ++B; xn--7-zmb872a.xn--wy1ao4929b; [B2 V6]; [B2 V6] # ߢσ7.蔑 ++B; xn--7-zmb872aez5a.xn--wy1ao4929b; [B2 C2 V6]; [B2 C2 V6] # ߢσ7.蔑 ++B; xn--7-xmb182aez5a.xn--wy1ao4929b; [B2 C2 V6]; [B2 C2 V6] # ߢς7.蔑 ++T; \u07E2Σ\u200D𝟳。蔑򛖢; [B2 C2 P1 V6]; [B2 P1 V6] # ߢσ7.蔑 ++N; \u07E2Σ\u200D𝟳。蔑򛖢; [B2 C2 P1 V6]; [B2 C2 P1 V6] # ߢσ7.蔑 ++T; \u07E2σ\u200D𝟳。蔑򛖢; [B2 C2 P1 V6]; [B2 P1 V6] # ߢσ7.蔑 ++N; \u07E2σ\u200D𝟳。蔑򛖢; [B2 C2 P1 V6]; [B2 C2 P1 V6] # ߢσ7.蔑 ++B; 𐹰.\u0600; [B1 P1 V6]; [B1 P1 V6] # 𐹰. ++B; xn--oo0d.xn--ifb; [B1 V6]; [B1 V6] # 𐹰. ++B; -\u08A8.𱠖; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ࢨ. ++B; xn----mod.xn--5o9n; [B1 V3 V6]; [B1 V3 V6] # -ࢨ. ++B; ≯𞱸󠇀。誆⒈; [B1 P1 V6]; [B1 P1 V6] ++B; >\u0338𞱸󠇀。誆⒈; [B1 P1 V6]; [B1 P1 V6] ++B; ≯𞱸󠇀。誆1.; [B1 P1 V6]; [B1 P1 V6] ++B; >\u0338𞱸󠇀。誆1.; [B1 P1 V6]; [B1 P1 V6] ++B; xn--hdh7151p.xn--1-dy1d.; [B1 V6]; [B1 V6] ++B; xn--hdh7151p.xn--tsh1248a; [B1 V6]; [B1 V6] ++B; \u0616𞥙䐊\u0650.︒\u0645↺\u069C; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ؖ𞥙䐊ِ.︒م↺ڜ ++B; \u0616𞥙䐊\u0650.。\u0645↺\u069C; [B1 V5 A4_2]; [B1 V5 A4_2] # ؖ𞥙䐊ِ..م↺ڜ ++B; xn--4fb0j490qjg4x..xn--hhb8o948e; [B1 V5 A4_2]; [B1 V5 A4_2] # ؖ𞥙䐊ِ..م↺ڜ ++B; xn--4fb0j490qjg4x.xn--hhb8o948euo5r; [B1 V5 V6]; [B1 V5 V6] # ؖ𞥙䐊ِ.︒م↺ڜ ++T; 퀬-\uDF7E񶳒.\u200C\u0AC5󩸤۴; [C1 P1 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ ++N; 퀬-\uDF7E񶳒.\u200C\u0AC5󩸤۴; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ ++T; 퀬-\uDF7E񶳒.\u200C\u0AC5󩸤۴; [C1 P1 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ ++N; 퀬-\uDF7E񶳒.\u200C\u0AC5󩸤۴; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ ++B; 퀬-\uDF7E񶳒.xn--hmb76q74166b; [P1 V5 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ ++B; 퀬-\uDF7E񶳒.xn--hmb76q74166b; [P1 V5 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ ++B; 퀬-\uDF7E񶳒.XN--HMB76Q74166B; [P1 V5 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ ++B; 퀬-\uDF7E񶳒.XN--HMB76Q74166B; [P1 V5 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ ++B; 퀬-\uDF7E񶳒.Xn--Hmb76q74166b; [P1 V5 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ ++B; 퀬-\uDF7E񶳒.Xn--Hmb76q74166b; [P1 V5 V6]; [P1 V5 V6 A3] # 퀬-.ૅ۴ ++B; 퀬-\uDF7E񶳒.xn--hmb76q48y18505a; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ ++B; 퀬-\uDF7E񶳒.xn--hmb76q48y18505a; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ ++B; 퀬-\uDF7E񶳒.XN--HMB76Q48Y18505A; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ ++B; 퀬-\uDF7E񶳒.XN--HMB76Q48Y18505A; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ ++B; 퀬-\uDF7E񶳒.Xn--Hmb76q48y18505a; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ ++B; 퀬-\uDF7E񶳒.Xn--Hmb76q48y18505a; [C1 P1 V6]; [C1 P1 V6 A3] # 퀬-.ૅ۴ ++B; Ⴌ.𐹾︒𑁿𞾄; [B1 P1 V6]; [B1 P1 V6] ++B; Ⴌ.𐹾。𑁿𞾄; [B1 P1 V5 V6]; [B1 P1 V5 V6] ++B; ⴌ.𐹾。𑁿𞾄; [B1 P1 V5 V6]; [B1 P1 V5 V6] ++B; xn--3kj.xn--2o0d.xn--q30dg029a; [B1 V5 V6]; [B1 V5 V6] ++B; xn--knd.xn--2o0d.xn--q30dg029a; [B1 V5 V6]; [B1 V5 V6] ++B; ⴌ.𐹾︒𑁿𞾄; [B1 P1 V6]; [B1 P1 V6] ++B; xn--3kj.xn--y86c030a9ob6374b; [B1 V6]; [B1 V6] ++B; xn--knd.xn--y86c030a9ob6374b; [B1 V6]; [B1 V6] ++B; 񧞿╏。𞩕󠁾; [B3 B6 P1 V6]; [B3 B6 P1 V6] ++B; xn--iyh90030d.xn--1m6hs0260c; [B3 B6 V6]; [B3 B6 V6] ++T; \u200D┮󠇐.\u0C00\u0C4D\u1734\u200D; [C2 V5]; [V5] # ┮.ఀ్᜴ ++N; \u200D┮󠇐.\u0C00\u0C4D\u1734\u200D; [C2 V5]; [C2 V5] # ┮.ఀ్᜴ ++T; \u200D┮󠇐.\u0C00\u0C4D\u1734\u200D; [C2 V5]; [V5] # ┮.ఀ్᜴ ++N; \u200D┮󠇐.\u0C00\u0C4D\u1734\u200D; [C2 V5]; [C2 V5] # ┮.ఀ్᜴ ++B; xn--kxh.xn--eoc8m432a; [V5]; [V5] # ┮.ఀ్᜴ ++B; xn--1ug04r.xn--eoc8m432a40i; [C2 V5]; [C2 V5] # ┮.ఀ్᜴ ++B; 򹚪。🄂; [P1 V6]; [P1 V6] ++B; 򹚪。1,; [P1 V6]; [P1 V6] ++B; xn--n433d.1,; [P1 V6]; [P1 V6] ++B; xn--n433d.xn--v07h; [V6]; [V6] ++B; 𑍨刍.🛦; [V5]; [V5] ++B; xn--rbry728b.xn--y88h; [V5]; [V5] ++B; 󠌏3。\u1BF1𝟒; [P1 V5 V6]; [P1 V5 V6] # 3.ᯱ4 ++B; 󠌏3。\u1BF14; [P1 V5 V6]; [P1 V5 V6] # 3.ᯱ4 ++B; xn--3-ib31m.xn--4-pql; [V5 V6]; [V5 V6] # 3.ᯱ4 ++T; \u06876Ⴔ辘.\uFD22\u0687\u200C; [B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # ڇ6Ⴔ辘.صيڇ ++N; \u06876Ⴔ辘.\uFD22\u0687\u200C; [B2 B3 C1 P1 V6]; [B2 B3 C1 P1 V6] # ڇ6Ⴔ辘.صيڇ ++T; \u06876Ⴔ辘.\u0635\u064A\u0687\u200C; [B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # ڇ6Ⴔ辘.صيڇ ++N; \u06876Ⴔ辘.\u0635\u064A\u0687\u200C; [B2 B3 C1 P1 V6]; [B2 B3 C1 P1 V6] # ڇ6Ⴔ辘.صيڇ ++T; \u06876ⴔ辘.\u0635\u064A\u0687\u200C; [B2 B3 C1]; [B2 B3] # ڇ6ⴔ辘.صيڇ ++N; \u06876ⴔ辘.\u0635\u064A\u0687\u200C; [B2 B3 C1]; [B2 B3 C1] # ڇ6ⴔ辘.صيڇ ++B; xn--6-gsc2270akm6f.xn--0gb6bxk; [B2 B3]; [B2 B3] # ڇ6ⴔ辘.صيڇ ++B; xn--6-gsc2270akm6f.xn--0gb6bxkx18g; [B2 B3 C1]; [B2 B3 C1] # ڇ6ⴔ辘.صيڇ ++B; xn--6-gsc039eqq6k.xn--0gb6bxk; [B2 B3 V6]; [B2 B3 V6] # ڇ6Ⴔ辘.صيڇ ++B; xn--6-gsc039eqq6k.xn--0gb6bxkx18g; [B2 B3 C1 V6]; [B2 B3 C1 V6] # ڇ6Ⴔ辘.صيڇ ++T; \u06876ⴔ辘.\uFD22\u0687\u200C; [B2 B3 C1]; [B2 B3] # ڇ6ⴔ辘.صيڇ ++N; \u06876ⴔ辘.\uFD22\u0687\u200C; [B2 B3 C1]; [B2 B3 C1] # ڇ6ⴔ辘.صيڇ ++B; 󠄍.𐮭𞰬򻫞۹; [B2 P1 V6 A4_2]; [B2 P1 V6 A4_2] ++B; .xn--mmb3954kd0uf1zx7f; [B2 V6 A4_2]; [B2 V6 A4_2] ++B; \uA87D≯.򻲀򒳄; [P1 V6]; [P1 V6] # ≯. ++B; \uA87D>\u0338.򻲀򒳄; [P1 V6]; [P1 V6] # ≯. ++B; \uA87D≯.򻲀򒳄; [P1 V6]; [P1 V6] # ≯. ++B; \uA87D>\u0338.򻲀򒳄; [P1 V6]; [P1 V6] # ≯. ++B; xn--hdh8193c.xn--5z40cp629b; [V6]; [V6] # ≯. ++T; ςო\u067B.ς\u0714; [B5 B6]; [B5 B6] # ςოٻ.ςܔ ++N; ςო\u067B.ς\u0714; [B5 B6]; [B5 B6] # ςოٻ.ςܔ ++B; Σო\u067B.Σ\u0714; [B5 B6]; [B5 B6] # σოٻ.σܔ ++B; σო\u067B.σ\u0714; [B5 B6]; [B5 B6] # σოٻ.σܔ ++B; Σო\u067B.σ\u0714; [B5 B6]; [B5 B6] # σოٻ.σܔ ++B; xn--4xa60l26n.xn--4xa21o; [B5 B6]; [B5 B6] # σოٻ.σܔ ++T; Σო\u067B.ς\u0714; [B5 B6]; [B5 B6] # σოٻ.ςܔ ++N; Σო\u067B.ς\u0714; [B5 B6]; [B5 B6] # σოٻ.ςܔ ++T; σო\u067B.ς\u0714; [B5 B6]; [B5 B6] # σოٻ.ςܔ ++N; σო\u067B.ς\u0714; [B5 B6]; [B5 B6] # σოٻ.ςܔ ++B; xn--4xa60l26n.xn--3xa41o; [B5 B6]; [B5 B6] # σოٻ.ςܔ ++B; xn--3xa80l26n.xn--3xa41o; [B5 B6]; [B5 B6] # ςოٻ.ςܔ ++B; 򄖚\u0748𠄯\u075F。󠛩; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ݈𠄯ݟ. ++B; 򄖚\u0748𠄯\u075F。󠛩; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ݈𠄯ݟ. ++B; xn--vob0c4369twfv8b.xn--kl46e; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ݈𠄯ݟ. ++T; 󠳛.\u200D䤫≠Ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠Ⴞ ++N; 󠳛.\u200D䤫≠Ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠Ⴞ ++T; 󠳛.\u200D䤫=\u0338Ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠Ⴞ ++N; 󠳛.\u200D䤫=\u0338Ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠Ⴞ ++T; 󠳛.\u200D䤫≠Ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠Ⴞ ++N; 󠳛.\u200D䤫≠Ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠Ⴞ ++T; 󠳛.\u200D䤫=\u0338Ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠Ⴞ ++N; 󠳛.\u200D䤫=\u0338Ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠Ⴞ ++T; 󠳛.\u200D䤫=\u0338ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠ⴞ ++N; 󠳛.\u200D䤫=\u0338ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠ⴞ ++T; 󠳛.\u200D䤫≠ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠ⴞ ++N; 󠳛.\u200D䤫≠ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠ⴞ ++B; xn--1t56e.xn--1ch153bqvw; [V6]; [V6] ++B; xn--1t56e.xn--1ug73gzzpwi3a; [C2 V6]; [C2 V6] # .䤫≠ⴞ ++B; xn--1t56e.xn--2nd141ghl2a; [V6]; [V6] ++B; xn--1t56e.xn--2nd159e9vb743e; [C2 V6]; [C2 V6] # .䤫≠Ⴞ ++T; 󠳛.\u200D䤫=\u0338ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠ⴞ ++N; 󠳛.\u200D䤫=\u0338ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠ⴞ ++T; 󠳛.\u200D䤫≠ⴞ; [C2 P1 V6]; [P1 V6] # .䤫≠ⴞ ++N; 󠳛.\u200D䤫≠ⴞ; [C2 P1 V6]; [C2 P1 V6] # .䤫≠ⴞ ++B; 𐽘𑈵.𐹣🕥; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] ++B; 𐽘𑈵.𐹣🕥; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] ++B; xn--bv0d02c.xn--bo0dq650b; [B1 B2 B3 V6]; [B1 B2 B3 V6] ++B; ⒊⒈𑁄。9; [P1 V6]; [P1 V6] ++B; 3.1.𑁄。9; [V5]; [V5] ++B; 3.1.xn--110d.9; [V5]; [V5] ++B; xn--tshd3512p.9; [V6]; [V6] ++T; -\u200C\u2DF1≮.𐹱򭏴4₉; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # -ⷱ≮.𐹱49 ++N; -\u200C\u2DF1≮.𐹱򭏴4₉; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # -ⷱ≮.𐹱49 ++T; -\u200C\u2DF1<\u0338.𐹱򭏴4₉; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # -ⷱ≮.𐹱49 ++N; -\u200C\u2DF1<\u0338.𐹱򭏴4₉; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # -ⷱ≮.𐹱49 ++T; -\u200C\u2DF1≮.𐹱򭏴49; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # -ⷱ≮.𐹱49 ++N; -\u200C\u2DF1≮.𐹱򭏴49; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # -ⷱ≮.𐹱49 ++T; -\u200C\u2DF1<\u0338.𐹱򭏴49; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # -ⷱ≮.𐹱49 ++N; -\u200C\u2DF1<\u0338.𐹱򭏴49; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # -ⷱ≮.𐹱49 ++B; xn----ngo823c.xn--49-ki3om2611f; [B1 V3 V6]; [B1 V3 V6] # -ⷱ≮.𐹱49 ++B; xn----sgn20i14s.xn--49-ki3om2611f; [B1 C1 V3 V6]; [B1 C1 V3 V6] # -ⷱ≮.𐹱49 ++B; -≯딾。\u0847; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≯딾.ࡇ ++B; ->\u0338딾。\u0847; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≯딾.ࡇ ++B; -≯딾。\u0847; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≯딾.ࡇ ++B; ->\u0338딾。\u0847; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≯딾.ࡇ ++B; xn----pgow547d.xn--5vb; [B1 V3 V6]; [B1 V3 V6] # -≯딾.ࡇ ++T; 𑙢⒈𐹠-。󠗐\u200C; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𑙢⒈𐹠-. ++N; 𑙢⒈𐹠-。󠗐\u200C; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𑙢⒈𐹠-. ++T; 𑙢1.𐹠-。󠗐\u200C; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𑙢1.𐹠-. ++N; 𑙢1.𐹠-。󠗐\u200C; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𑙢1.𐹠-. ++B; xn--1-bf0j.xn----516i.xn--jd46e; [B1 V3 V6]; [B1 V3 V6] ++B; xn--1-bf0j.xn----516i.xn--0ug23321l; [B1 C1 V3 V6]; [B1 C1 V3 V6] # 𑙢1.𐹠-. ++B; xn----dcpy090hiyg.xn--jd46e; [B1 V3 V6]; [B1 V3 V6] ++B; xn----dcpy090hiyg.xn--0ug23321l; [B1 C1 V3 V6]; [B1 C1 V3 V6] # 𑙢⒈𐹠-. ++B; \u034A.𐨎; [V5]; [V5] # ͊.𐨎 ++B; \u034A.𐨎; [V5]; [V5] # ͊.𐨎 ++B; xn--oua.xn--mr9c; [V5]; [V5] # ͊.𐨎 ++B; 훉≮。\u0E34; [P1 V5 V6]; [P1 V5 V6] # 훉≮.ิ ++B; 훉<\u0338。\u0E34; [P1 V5 V6]; [P1 V5 V6] # 훉≮.ิ ++B; 훉≮。\u0E34; [P1 V5 V6]; [P1 V5 V6] # 훉≮.ิ ++B; 훉<\u0338。\u0E34; [P1 V5 V6]; [P1 V5 V6] # 훉≮.ิ ++B; xn--gdh2512e.xn--i4c; [V5 V6]; [V5 V6] # 훉≮.ิ ++B; \u2DF7򞣉🃘.𴈇𝟸\u0659𞤯; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ⷷ🃘.2ٙ𞤯 ++B; \u2DF7򞣉🃘.𴈇2\u0659𞤯; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ⷷ🃘.2ٙ𞤯 ++B; \u2DF7򞣉🃘.𴈇2\u0659𞤍; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ⷷ🃘.2ٙ𞤯 ++B; xn--trj8045le6s9b.xn--2-upc23918acjsj; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ⷷ🃘.2ٙ𞤯 ++B; \u2DF7򞣉🃘.𴈇𝟸\u0659𞤍; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ⷷ🃘.2ٙ𞤯 ++T; 󗇩ßᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ßᢞ.٠نخ- ++N; 󗇩ßᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ßᢞ.٠نخ- ++T; 󗇩ßᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ßᢞ.٠نخ- ++N; 󗇩ßᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ßᢞ.٠نخ- ++T; 󗇩SSᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ssᢞ.٠نخ- ++N; 󗇩SSᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ssᢞ.٠نخ- ++T; 󗇩ssᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ssᢞ.٠نخ- ++N; 󗇩ssᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ssᢞ.٠نخ- ++T; 󗇩Ssᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ssᢞ.٠نخ- ++N; 󗇩Ssᢞ\u200C。\u0660𞷻\u0646\u062E-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ssᢞ.٠نخ- ++B; xn--ss-jepz4596r.xn----dnc5e1er384z; [B1 V3 V6]; [B1 V3 V6] # ssᢞ.٠نخ- ++B; xn--ss-jep006bqt765b.xn----dnc5e1er384z; [B1 B6 C1 V3 V6]; [B1 B6 C1 V3 V6] # ssᢞ.٠نخ- ++B; xn--zca272jbif10059a.xn----dnc5e1er384z; [B1 B6 C1 V3 V6]; [B1 B6 C1 V3 V6] # ßᢞ.٠نخ- ++T; 󗇩SSᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ssᢞ.٠نخ- ++N; 󗇩SSᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ssᢞ.٠نخ- ++T; 󗇩ssᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ssᢞ.٠نخ- ++N; 󗇩ssᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ssᢞ.٠نخ- ++T; 󗇩Ssᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 P1 V3 V6] # ssᢞ.٠نخ- ++N; 󗇩Ssᢞ\u200C。\u0660𞷻\uFCD4-; [B1 B6 C1 P1 V3 V6]; [B1 B6 C1 P1 V3 V6] # ssᢞ.٠نخ- ++B; ꡆ。Ↄ\u0FB5놮-; [P1 V3 V6]; [P1 V3 V6] # ꡆ.Ↄྵ놮- ++B; ꡆ。Ↄ\u0FB5놮-; [P1 V3 V6]; [P1 V3 V6] # ꡆ.Ↄྵ놮- ++B; ꡆ。ↄ\u0FB5놮-; [V3]; [V3] # ꡆ.ↄྵ놮- ++B; ꡆ。ↄ\u0FB5놮-; [V3]; [V3] # ꡆ.ↄྵ놮- ++B; xn--fc9a.xn----qmg097k469k; [V3]; [V3] # ꡆ.ↄྵ놮- ++B; xn--fc9a.xn----qmg787k869k; [V3 V6]; [V3 V6] # ꡆ.Ↄྵ놮- ++T; \uFDAD\u200D.񥰌\u06A9; [B3 B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # لمي.ک ++N; \uFDAD\u200D.񥰌\u06A9; [B3 B5 B6 C2 P1 V6]; [B3 B5 B6 C2 P1 V6] # لمي.ک ++T; \u0644\u0645\u064A\u200D.񥰌\u06A9; [B3 B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # لمي.ک ++N; \u0644\u0645\u064A\u200D.񥰌\u06A9; [B3 B5 B6 C2 P1 V6]; [B3 B5 B6 C2 P1 V6] # لمي.ک ++B; xn--ghbcp.xn--ckb36214f; [B5 B6 V6]; [B5 B6 V6] # لمي.ک ++B; xn--ghbcp494x.xn--ckb36214f; [B3 B5 B6 C2 V6]; [B3 B5 B6 C2 V6] # لمي.ک ++B; Ⴜ\u1C2F𐳒≯。\u06E0\u1732\u0FBA; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # Ⴜᰯ𐳒≯.۠ᜲྺ ++B; Ⴜ\u1C2F𐳒>\u0338。\u06E0\u1732\u0FBA; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # Ⴜᰯ𐳒≯.۠ᜲྺ ++B; ⴜ\u1C2F𐳒>\u0338。\u06E0\u1732\u0FBA; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ⴜᰯ𐳒≯.۠ᜲྺ ++B; ⴜ\u1C2F𐳒≯。\u06E0\u1732\u0FBA; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ⴜᰯ𐳒≯.۠ᜲྺ ++B; Ⴜ\u1C2F𐲒≯。\u06E0\u1732\u0FBA; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # Ⴜᰯ𐳒≯.۠ᜲྺ ++B; Ⴜ\u1C2F𐲒>\u0338。\u06E0\u1732\u0FBA; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # Ⴜᰯ𐳒≯.۠ᜲྺ ++B; xn--0nd679cf3eq67y.xn--wlb646b4ng; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # Ⴜᰯ𐳒≯.۠ᜲྺ ++B; xn--r1f68xh1jgv7u.xn--wlb646b4ng; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # ⴜᰯ𐳒≯.۠ᜲྺ ++B; 𐋵。\uFCEC; [B1]; [B1] # 𐋵.كم ++B; 𐋵。\u0643\u0645; [B1]; [B1] # 𐋵.كم ++B; xn--p97c.xn--fhbe; [B1]; [B1] # 𐋵.كم ++B; 𐋵.\u0643\u0645; [B1]; [B1] # 𐋵.كم ++B; ≮𝅶.񱲁\uAAEC⹈󰥭; [P1 V6]; [P1 V6] # ≮.ꫬ⹈ ++B; <\u0338𝅶.񱲁\uAAEC⹈󰥭; [P1 V6]; [P1 V6] # ≮.ꫬ⹈ ++B; ≮𝅶.񱲁\uAAEC⹈󰥭; [P1 V6]; [P1 V6] # ≮.ꫬ⹈ ++B; <\u0338𝅶.񱲁\uAAEC⹈󰥭; [P1 V6]; [P1 V6] # ≮.ꫬ⹈ ++B; xn--gdh0880o.xn--4tjx101bsg00ds9pyc; [V6]; [V6] # ≮.ꫬ⹈ ++B; \u2DF0\u0358ᢕ.\u0361𐹷󠴍; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ⷰ͘ᢕ.͡𐹷 ++B; \u2DF0\u0358ᢕ.\u0361𐹷󠴍; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ⷰ͘ᢕ.͡𐹷 ++B; xn--2ua889htsp.xn--cva2687k2tv0g; [B1 V5 V6]; [B1 V5 V6] # ⷰ͘ᢕ.͡𐹷 ++T; \uFD79ᡐ\u200C\u06AD.𑋪\u05C7; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] # غممᡐڭ.𑋪ׇ ++N; \uFD79ᡐ\u200C\u06AD.𑋪\u05C7; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] # غممᡐڭ.𑋪ׇ ++T; \u063A\u0645\u0645ᡐ\u200C\u06AD.𑋪\u05C7; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] # غممᡐڭ.𑋪ׇ ++N; \u063A\u0645\u0645ᡐ\u200C\u06AD.𑋪\u05C7; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] # غممᡐڭ.𑋪ׇ ++B; xn--5gbwa03bg24e.xn--vdb1198k; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] # غممᡐڭ.𑋪ׇ ++B; xn--5gbwa03bg24eptk.xn--vdb1198k; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] # غممᡐڭ.𑋪ׇ ++T; 𑑂。\u200D󥞀🞕򥁔; [C2 P1 V5 V6]; [P1 V5 V6] # 𑑂.🞕 ++N; 𑑂。\u200D󥞀🞕򥁔; [C2 P1 V5 V6]; [C2 P1 V5 V6] # 𑑂.🞕 ++T; 𑑂。\u200D󥞀🞕򥁔; [C2 P1 V5 V6]; [P1 V5 V6] # 𑑂.🞕 ++N; 𑑂。\u200D󥞀🞕򥁔; [C2 P1 V5 V6]; [C2 P1 V5 V6] # 𑑂.🞕 ++B; xn--8v1d.xn--ye9h41035a2qqs; [V5 V6]; [V5 V6] ++B; xn--8v1d.xn--1ug1386plvx1cd8vya; [C2 V5 V6]; [C2 V5 V6] # 𑑂.🞕 ++B; -\u05E9。⒚; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ש.⒚ ++B; -\u05E9。19.; [B1 V3]; [B1 V3] # -ש.19. ++B; xn----gjc.19.; [B1 V3]; [B1 V3] # -ש.19. ++B; xn----gjc.xn--cth; [B1 V3 V6]; [B1 V3 V6] # -ש.⒚ ++T; 􊾻\u0845\u200C。ᢎ\u200D; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # ࡅ.ᢎ ++N; 􊾻\u0845\u200C。ᢎ\u200D; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # ࡅ.ᢎ ++T; 􊾻\u0845\u200C。ᢎ\u200D; [B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # ࡅ.ᢎ ++N; 􊾻\u0845\u200C。ᢎ\u200D; [B5 B6 C1 C2 P1 V6]; [B5 B6 C1 C2 P1 V6] # ࡅ.ᢎ ++B; xn--3vb50049s.xn--79e; [B5 B6 V6]; [B5 B6 V6] # ࡅ.ᢎ ++B; xn--3vb882jz4411a.xn--79e259a; [B5 B6 C1 C2 V6]; [B5 B6 C1 C2 V6] # ࡅ.ᢎ ++T; ß\u09C1\u1DED。\u06208₅; ß\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ßুᷭ.ؠ85 ++N; ß\u09C1\u1DED。\u06208₅; ß\u09C1\u1DED.\u062085; xn--zca266bwrr.xn--85-psd # ßুᷭ.ؠ85 ++T; ß\u09C1\u1DED。\u062085; ß\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ßুᷭ.ؠ85 ++N; ß\u09C1\u1DED。\u062085; ß\u09C1\u1DED.\u062085; xn--zca266bwrr.xn--85-psd # ßুᷭ.ؠ85 ++B; SS\u09C1\u1DED。\u062085; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 ++B; ss\u09C1\u1DED。\u062085; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 ++B; Ss\u09C1\u1DED。\u062085; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 ++B; xn--ss-e2f077r.xn--85-psd; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 ++B; ss\u09C1\u1DED.\u062085; ; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 ++B; SS\u09C1\u1DED.\u062085; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 ++B; Ss\u09C1\u1DED.\u062085; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 ++B; xn--zca266bwrr.xn--85-psd; ß\u09C1\u1DED.\u062085; xn--zca266bwrr.xn--85-psd # ßুᷭ.ؠ85 ++T; ß\u09C1\u1DED.\u062085; ; xn--ss-e2f077r.xn--85-psd # ßুᷭ.ؠ85 ++N; ß\u09C1\u1DED.\u062085; ; xn--zca266bwrr.xn--85-psd # ßুᷭ.ؠ85 ++B; SS\u09C1\u1DED。\u06208₅; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 ++B; ss\u09C1\u1DED。\u06208₅; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 ++B; Ss\u09C1\u1DED。\u06208₅; ss\u09C1\u1DED.\u062085; xn--ss-e2f077r.xn--85-psd # ssুᷭ.ؠ85 ++T; \u0ACD\u0484魅𝟣.₃𐹥ß; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ß ++N; \u0ACD\u0484魅𝟣.₃𐹥ß; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ß ++T; \u0ACD\u0484魅1.3𐹥ß; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ß ++N; \u0ACD\u0484魅1.3𐹥ß; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ß ++B; \u0ACD\u0484魅1.3𐹥SS; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss ++B; \u0ACD\u0484魅1.3𐹥ss; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss ++B; \u0ACD\u0484魅1.3𐹥Ss; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss ++B; xn--1-0xb049b102o.xn--3ss-nv9t; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss ++B; xn--1-0xb049b102o.xn--3-qfa7018r; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ß ++B; \u0ACD\u0484魅𝟣.₃𐹥SS; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss ++B; \u0ACD\u0484魅𝟣.₃𐹥ss; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss ++B; \u0ACD\u0484魅𝟣.₃𐹥Ss; [B1 V5]; [B1 V5] # ્҄魅1.3𐹥ss ++B; \u072B。𑓂⒈𑜫󠿻; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ܫ.𑓂⒈𑜫 ++B; \u072B。𑓂1.𑜫󠿻; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ܫ.𑓂1.𑜫 ++B; xn--1nb.xn--1-jq9i.xn--ji2dg9877c; [B1 V5 V6]; [B1 V5 V6] # ܫ.𑓂1.𑜫 ++B; xn--1nb.xn--tsh7798f6rbrt828c; [B1 V5 V6]; [B1 V5 V6] # ܫ.𑓂⒈𑜫 ++B; \uFE0Dછ。嵨; છ.嵨; xn--6dc.xn--tot ++B; xn--6dc.xn--tot; છ.嵨; xn--6dc.xn--tot ++B; છ.嵨; ; xn--6dc.xn--tot ++B; Ⴔ≠Ⴀ.𐹥𐹰; [B1 P1 V6]; [B1 P1 V6] ++B; Ⴔ=\u0338Ⴀ.𐹥𐹰; [B1 P1 V6]; [B1 P1 V6] ++B; ⴔ=\u0338ⴀ.𐹥𐹰; [B1 P1 V6]; [B1 P1 V6] ++B; ⴔ≠ⴀ.𐹥𐹰; [B1 P1 V6]; [B1 P1 V6] ++B; xn--1ch603bxb.xn--do0dwa; [B1 V6]; [B1 V6] ++B; xn--7md3b171g.xn--do0dwa; [B1 V6]; [B1 V6] ++T; -\u200C⒙𐫥。𝨵; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # -⒙𐫥.𝨵 ++N; -\u200C⒙𐫥。𝨵; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # -⒙𐫥.𝨵 ++T; -\u200C18.𐫥。𝨵; [C1 V3 V5]; [V3 V5] # -18.𐫥.𝨵 ++N; -\u200C18.𐫥。𝨵; [C1 V3 V5]; [C1 V3 V5] # -18.𐫥.𝨵 ++B; -18.xn--rx9c.xn--382h; [V3 V5]; [V3 V5] ++B; xn---18-9m0a.xn--rx9c.xn--382h; [C1 V3 V5]; [C1 V3 V5] # -18.𐫥.𝨵 ++B; xn----ddps939g.xn--382h; [V3 V5 V6]; [V3 V5 V6] ++B; xn----sgn18r3191a.xn--382h; [C1 V3 V5 V6]; [C1 V3 V5 V6] # -⒙𐫥.𝨵 ++B; ︒.ʌᠣ-𐹽; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] ++B; 。.ʌᠣ-𐹽; [B5 B6 A4_2]; [B5 B6 A4_2] ++B; 。.Ʌᠣ-𐹽; [B5 B6 A4_2]; [B5 B6 A4_2] ++B; ..xn----73a596nuh9t; [B5 B6 A4_2]; [B5 B6 A4_2] ++B; ︒.Ʌᠣ-𐹽; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] ++B; xn--y86c.xn----73a596nuh9t; [B1 B5 B6 V6]; [B1 B5 B6 V6] ++B; \uFE05︒。𦀾\u1CE0; [P1 V6]; [P1 V6] # ︒.𦀾᳠ ++B; \uFE05。。𦀾\u1CE0; [A4_2]; [A4_2] # ..𦀾᳠ ++B; ..xn--t6f5138v; [A4_2]; [A4_2] # ..𦀾᳠ ++B; xn--y86c.xn--t6f5138v; [V6]; [V6] # ︒.𦀾᳠ ++B; xn--t6f5138v; 𦀾\u1CE0; xn--t6f5138v # 𦀾᳠ ++B; 𦀾\u1CE0; ; xn--t6f5138v # 𦀾᳠ ++T; 𞮑ß􏞞。ᡁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++N; 𞮑ß􏞞。ᡁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++B; 𞮑SS􏞞。ᡁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++B; 𞮑ss􏞞。ᡁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++B; 𞮑Ss􏞞。ᡁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++B; xn--ss-o412ac6305g.xn--07e; [B2 B3 V6]; [B2 B3 V6] ++B; xn--zca9432wb989f.xn--07e; [B2 B3 V6]; [B2 B3 V6] ++T; \uA953\u200D\u062C\u066C。𱆎󻡟\u200C󠅆; [B5 B6 C1 P1 V5 V6]; [B5 B6 P1 V5 V6] # ꥓ج٬. ++N; \uA953\u200D\u062C\u066C。𱆎󻡟\u200C󠅆; [B5 B6 C1 P1 V5 V6]; [B5 B6 C1 P1 V5 V6] # ꥓ج٬. ++B; xn--rgb2k6711c.xn--ec8nj3948b; [B5 B6 V5 V6]; [B5 B6 V5 V6] # ꥓ج٬. ++B; xn--rgb2k500fhq9j.xn--0ug78870a5sp9d; [B5 B6 C1 V5 V6]; [B5 B6 C1 V5 V6] # ꥓ج٬. ++T; 󠕏.-ß\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ß≠ ++N; 󠕏.-ß\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ß≠ ++T; 󠕏.-ß\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ß≠ ++N; 󠕏.-ß\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ß≠ ++T; 󠕏.-ß\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ß≠ ++N; 󠕏.-ß\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ß≠ ++T; 󠕏.-ß\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ß≠ ++N; 󠕏.-ß\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ß≠ ++T; 󠕏.-SS\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ ++N; 󠕏.-SS\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ ++T; 󠕏.-SS\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ ++N; 󠕏.-SS\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ ++T; 󠕏.-ss\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ ++N; 󠕏.-ss\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ ++T; 󠕏.-ss\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ ++N; 󠕏.-ss\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ ++T; 󠕏.-Ss\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ ++N; 󠕏.-Ss\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ ++T; 󠕏.-Ss\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ ++N; 󠕏.-Ss\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ ++B; xn--u836e.xn---ss-gl2a; [V3 V6]; [V3 V6] ++B; xn--u836e.xn---ss-cn0at5l; [C1 V3 V6]; [C1 V3 V6] # .-ss≠ ++B; xn--u836e.xn----qfa750ve7b; [C1 V3 V6]; [C1 V3 V6] # .-ß≠ ++T; 󠕏.-SS\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ ++N; 󠕏.-SS\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ ++T; 󠕏.-SS\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ ++N; 󠕏.-SS\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ ++T; 󠕏.-ss\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ ++N; 󠕏.-ss\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ ++T; 󠕏.-ss\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ ++N; 󠕏.-ss\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ ++T; 󠕏.-Ss\u200C=\u0338; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ ++N; 󠕏.-Ss\u200C=\u0338; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ ++T; 󠕏.-Ss\u200C≠; [C1 P1 V3 V6]; [P1 V3 V6] # .-ss≠ ++N; 󠕏.-Ss\u200C≠; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .-ss≠ ++T; ᡙ\u200C。≯𐋲≠; [C1 P1 V6]; [P1 V6] # ᡙ.≯𐋲≠ ++N; ᡙ\u200C。≯𐋲≠; [C1 P1 V6]; [C1 P1 V6] # ᡙ.≯𐋲≠ ++T; ᡙ\u200C。>\u0338𐋲=\u0338; [C1 P1 V6]; [P1 V6] # ᡙ.≯𐋲≠ ++N; ᡙ\u200C。>\u0338𐋲=\u0338; [C1 P1 V6]; [C1 P1 V6] # ᡙ.≯𐋲≠ ++T; ᡙ\u200C。≯𐋲≠; [C1 P1 V6]; [P1 V6] # ᡙ.≯𐋲≠ ++N; ᡙ\u200C。≯𐋲≠; [C1 P1 V6]; [C1 P1 V6] # ᡙ.≯𐋲≠ ++T; ᡙ\u200C。>\u0338𐋲=\u0338; [C1 P1 V6]; [P1 V6] # ᡙ.≯𐋲≠ ++N; ᡙ\u200C。>\u0338𐋲=\u0338; [C1 P1 V6]; [C1 P1 V6] # ᡙ.≯𐋲≠ ++B; xn--p8e.xn--1ch3a7084l; [V6]; [V6] ++B; xn--p8e650b.xn--1ch3a7084l; [C1 V6]; [C1 V6] # ᡙ.≯𐋲≠ ++B; 𐹧𞲄󠁭񆼩。\u034E🄀; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𐹧.͎🄀 ++B; 𐹧𞲄󠁭񆼩。\u034E0.; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𐹧.͎0. ++B; xn--fo0dw409aq58qrn69d.xn--0-bgb.; [B1 V5 V6]; [B1 V5 V6] # 𐹧.͎0. ++B; xn--fo0dw409aq58qrn69d.xn--sua6883w; [B1 V5 V6]; [B1 V5 V6] # 𐹧.͎🄀 ++T; Ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B2 B3 P1 V6] # Ⴄ.ܡς ++N; Ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B1 C2 P1 V6] # Ⴄ.ܡς ++T; Ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B2 B3 P1 V6] # Ⴄ.ܡς ++N; Ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B1 C2 P1 V6] # Ⴄ.ܡς ++T; ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B2 B3 P1 V6] # ⴄ.ܡς ++N; ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ⴄ.ܡς ++T; Ⴄ.\u200D\u0721󻣋Σ; [B1 C2 P1 V6]; [B2 B3 P1 V6] # Ⴄ.ܡσ ++N; Ⴄ.\u200D\u0721󻣋Σ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # Ⴄ.ܡσ ++T; ⴄ.\u200D\u0721󻣋σ; [B1 C2 P1 V6]; [B2 B3 P1 V6] # ⴄ.ܡσ ++N; ⴄ.\u200D\u0721󻣋σ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ⴄ.ܡσ ++B; xn--vkj.xn--4xa73ob5892c; [B2 B3 V6]; [B2 B3 V6] # ⴄ.ܡσ ++B; xn--vkj.xn--4xa73o3t5ajq467a; [B1 C2 V6]; [B1 C2 V6] # ⴄ.ܡσ ++B; xn--cnd.xn--4xa73ob5892c; [B2 B3 V6]; [B2 B3 V6] # Ⴄ.ܡσ ++B; xn--cnd.xn--4xa73o3t5ajq467a; [B1 C2 V6]; [B1 C2 V6] # Ⴄ.ܡσ ++B; xn--vkj.xn--3xa93o3t5ajq467a; [B1 C2 V6]; [B1 C2 V6] # ⴄ.ܡς ++B; xn--cnd.xn--3xa93o3t5ajq467a; [B1 C2 V6]; [B1 C2 V6] # Ⴄ.ܡς ++T; ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B2 B3 P1 V6] # ⴄ.ܡς ++N; ⴄ.\u200D\u0721󻣋ς; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ⴄ.ܡς ++T; Ⴄ.\u200D\u0721󻣋Σ; [B1 C2 P1 V6]; [B2 B3 P1 V6] # Ⴄ.ܡσ ++N; Ⴄ.\u200D\u0721󻣋Σ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # Ⴄ.ܡσ ++T; ⴄ.\u200D\u0721󻣋σ; [B1 C2 P1 V6]; [B2 B3 P1 V6] # ⴄ.ܡσ ++N; ⴄ.\u200D\u0721󻣋σ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ⴄ.ܡσ ++B; 򮵛\u0613.Ⴕ; [P1 V6]; [P1 V6] # ؓ.Ⴕ ++B; 򮵛\u0613.ⴕ; [P1 V6]; [P1 V6] # ؓ.ⴕ ++B; xn--1fb94204l.xn--dlj; [V6]; [V6] # ؓ.ⴕ ++B; xn--1fb94204l.xn--tnd; [V6]; [V6] # ؓ.Ⴕ ++T; ≯\u1DF3𞤥。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 P1 V5 V6] # ≯ᷳ𞤥.꣄ ++N; ≯\u1DF3𞤥。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ≯ᷳ𞤥.꣄ ++T; >\u0338\u1DF3𞤥。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 P1 V5 V6] # ≯ᷳ𞤥.꣄ ++N; >\u0338\u1DF3𞤥。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ≯ᷳ𞤥.꣄ ++T; >\u0338\u1DF3𞤃。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 P1 V5 V6] # ≯ᷳ𞤥.꣄ ++N; >\u0338\u1DF3𞤃。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ≯ᷳ𞤥.꣄ ++T; ≯\u1DF3𞤃。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 P1 V5 V6] # ≯ᷳ𞤥.꣄ ++N; ≯\u1DF3𞤃。\u200C\uA8C4󠪉\u200D; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # ≯ᷳ𞤥.꣄ ++B; xn--ofg13qyr21c.xn--0f9au6706d; [B1 V5 V6]; [B1 V5 V6] # ≯ᷳ𞤥.꣄ ++B; xn--ofg13qyr21c.xn--0ugc0116hix29k; [B1 C1 C2 V6]; [B1 C1 C2 V6] # ≯ᷳ𞤥.꣄ ++T; \u200C󠄷。򒑁; [C1 P1 V6]; [P1 V6 A4_2] # . ++N; \u200C󠄷。򒑁; [C1 P1 V6]; [C1 P1 V6] # . ++T; \u200C󠄷。򒑁; [C1 P1 V6]; [P1 V6 A4_2] # . ++N; \u200C󠄷。򒑁; [C1 P1 V6]; [C1 P1 V6] # . ++B; .xn--w720c; [V6 A4_2]; [V6 A4_2] ++B; xn--0ug.xn--w720c; [C1 V6]; [C1 V6] # . ++T; ⒈\u0DD6焅.󗡙\u200Dꡟ; [C2 P1 V6]; [P1 V6] # ⒈ූ焅.ꡟ ++N; ⒈\u0DD6焅.󗡙\u200Dꡟ; [C2 P1 V6]; [C2 P1 V6] # ⒈ූ焅.ꡟ ++T; 1.\u0DD6焅.󗡙\u200Dꡟ; [C2 P1 V5 V6]; [P1 V5 V6] # 1.ූ焅.ꡟ ++N; 1.\u0DD6焅.󗡙\u200Dꡟ; [C2 P1 V5 V6]; [C2 P1 V5 V6] # 1.ූ焅.ꡟ ++B; 1.xn--t1c6981c.xn--4c9a21133d; [V5 V6]; [V5 V6] # 1.ූ焅.ꡟ ++B; 1.xn--t1c6981c.xn--1ugz184c9lw7i; [C2 V5 V6]; [C2 V5 V6] # 1.ූ焅.ꡟ ++B; xn--t1c337io97c.xn--4c9a21133d; [V6]; [V6] # ⒈ූ焅.ꡟ ++B; xn--t1c337io97c.xn--1ugz184c9lw7i; [C2 V6]; [C2 V6] # ⒈ූ焅.ꡟ ++T; \u1DCDς≮.ς𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 ++N; \u1DCDς≮.ς𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 ++T; \u1DCDς<\u0338.ς𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 ++N; \u1DCDς<\u0338.ς𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 ++T; \u1DCDς<\u0338.ς𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 ++N; \u1DCDς<\u0338.ς𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 ++T; \u1DCDς≮.ς𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 ++N; \u1DCDς≮.ς𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍ς≮.ς𝪦𞤷0 ++B; \u1DCDΣ≮.Σ𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 ++B; \u1DCDΣ<\u0338.Σ𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 ++B; \u1DCDσ<\u0338.σ𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 ++B; \u1DCDσ≮.σ𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 ++B; \u1DCDΣ≮.Σ𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 ++B; \u1DCDΣ<\u0338.Σ𝪦𞤷0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 ++B; xn--4xa544kvid.xn--0-zmb55727aggma; [B1 B5 V5 V6]; [B1 B5 V5 V6] # ᷍σ≮.σ𝪦𞤷0 ++B; xn--3xa744kvid.xn--0-xmb85727aggma; [B1 B5 V5 V6]; [B1 B5 V5 V6] # ᷍ς≮.ς𝪦𞤷0 ++B; \u1DCDσ≮.σ𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 ++B; \u1DCDσ<\u0338.σ𝪦𞤕0; [B1 B5 P1 V5 V6]; [B1 B5 P1 V5 V6] # ᷍σ≮.σ𝪦𞤷0 ++T; 򢦾ß\u05B9𐫙.\u05AD\u08A1; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ßֹ𐫙.֭ࢡ ++N; 򢦾ß\u05B9𐫙.\u05AD\u08A1; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ßֹ𐫙.֭ࢡ ++B; 򢦾SS\u05B9𐫙.\u05AD\u08A1; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ssֹ𐫙.֭ࢡ ++B; 򢦾ss\u05B9𐫙.\u05AD\u08A1; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ssֹ𐫙.֭ࢡ ++B; 򢦾Ss\u05B9𐫙.\u05AD\u08A1; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ssֹ𐫙.֭ࢡ ++B; xn--ss-xjd6058xlz50g.xn--4cb62m; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ssֹ𐫙.֭ࢡ ++B; xn--zca89v339zj118e.xn--4cb62m; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ßֹ𐫙.֭ࢡ ++B; -𞣄。⒈; [B1 P1 V3 V6]; [B1 P1 V3 V6] ++B; -𞣄。1.; [B1 V3]; [B1 V3] ++B; xn----xc8r.1.; [B1 V3]; [B1 V3] ++B; xn----xc8r.xn--tsh; [B1 V3 V6]; [B1 V3 V6] ++B; 񈠢𐫖𝟡。\u063E𑘿; [B5 P1 V6]; [B5 P1 V6] # 𐫖9.ؾ𑘿 ++B; 񈠢𐫖9。\u063E𑘿; [B5 P1 V6]; [B5 P1 V6] # 𐫖9.ؾ𑘿 ++B; xn--9-el5iv442t.xn--9gb0830l; [B5 V6]; [B5 V6] # 𐫖9.ؾ𑘿 ++T; \u0668\uFC8C\u0668\u1A5D.\u200D; [B1 C2]; [B1] # ٨نم٨ᩝ. ++N; \u0668\uFC8C\u0668\u1A5D.\u200D; [B1 C2]; [B1 C2] # ٨نم٨ᩝ. ++T; \u0668\u0646\u0645\u0668\u1A5D.\u200D; [B1 C2]; [B1] # ٨نم٨ᩝ. ++N; \u0668\u0646\u0645\u0668\u1A5D.\u200D; [B1 C2]; [B1 C2] # ٨نم٨ᩝ. ++B; xn--hhbb5hc956w.; [B1]; [B1] # ٨نم٨ᩝ. ++B; xn--hhbb5hc956w.xn--1ug; [B1 C2]; [B1 C2] # ٨نم٨ᩝ. ++B; 𝟘.Ⴇ󀳑\uFD50񫃱; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 0.Ⴇتجم ++B; 0.Ⴇ󀳑\u062A\u062C\u0645񫃱; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 0.Ⴇتجم ++B; 0.ⴇ󀳑\u062A\u062C\u0645񫃱; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 0.ⴇتجم ++B; 0.xn--pgbe9ez79qd207lvff8b; [B1 B5 V6]; [B1 B5 V6] # 0.ⴇتجم ++B; 0.xn--pgbe9e344c2725svff8b; [B1 B5 V6]; [B1 B5 V6] # 0.Ⴇتجم ++B; 𝟘.ⴇ󀳑\uFD50񫃱; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 0.ⴇتجم ++B; 𑇀▍.⁞ᠰ; [V5]; [V5] ++B; xn--9zh3057f.xn--j7e103b; [V5]; [V5] ++T; \u200D-\u067A.򏯩; [B1 C2 P1 V6]; [B1 P1 V3 V6] # -ٺ. ++N; \u200D-\u067A.򏯩; [B1 C2 P1 V6]; [B1 C2 P1 V6] # -ٺ. ++B; xn----qrc.xn--ts49b; [B1 V3 V6]; [B1 V3 V6] # -ٺ. ++B; xn----qrc357q.xn--ts49b; [B1 C2 V6]; [B1 C2 V6] # -ٺ. ++T; ᠢ𐮂𐫘寐。\u200C≯✳; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6] # ᠢ𐮂𐫘寐.≯✳ ++N; ᠢ𐮂𐫘寐。\u200C≯✳; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ᠢ𐮂𐫘寐.≯✳ ++T; ᠢ𐮂𐫘寐。\u200C>\u0338✳; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6] # ᠢ𐮂𐫘寐.≯✳ ++N; ᠢ𐮂𐫘寐。\u200C>\u0338✳; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ᠢ𐮂𐫘寐.≯✳ ++T; ᠢ𐮂𐫘寐。\u200C≯✳; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6] # ᠢ𐮂𐫘寐.≯✳ ++N; ᠢ𐮂𐫘寐。\u200C≯✳; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ᠢ𐮂𐫘寐.≯✳ ++T; ᠢ𐮂𐫘寐。\u200C>\u0338✳; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6] # ᠢ𐮂𐫘寐.≯✳ ++N; ᠢ𐮂𐫘寐。\u200C>\u0338✳; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ᠢ𐮂𐫘寐.≯✳ ++B; xn--46e6675axzzhota.xn--hdh99p; [B1 B5 V6]; [B1 B5 V6] ++B; xn--46e6675axzzhota.xn--0ug06gu8f; [B1 B5 C1 V6]; [B1 B5 C1 V6] # ᠢ𐮂𐫘寐.≯✳ ++T; \u200D。󸲜ႺႴ𞨇; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6 A4_2] # .ႺႴ ++N; \u200D。󸲜ႺႴ𞨇; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # .ႺႴ ++T; \u200D。󸲜ႺႴ𞨇; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6 A4_2] # .ႺႴ ++N; \u200D。󸲜ႺႴ𞨇; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # .ႺႴ ++T; \u200D。󸲜ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6 A4_2] # .ⴚⴔ ++N; \u200D。󸲜ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # .ⴚⴔ ++T; \u200D。󸲜Ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6 A4_2] # .Ⴚⴔ ++N; \u200D。󸲜Ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # .Ⴚⴔ ++B; .xn--ynd036lq981an3r4h; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] ++B; xn--1ug.xn--ynd036lq981an3r4h; [B1 B5 B6 C2 V6]; [B1 B5 B6 C2 V6] # .Ⴚⴔ ++B; .xn--cljl81825an3r4h; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] ++B; xn--1ug.xn--cljl81825an3r4h; [B1 B5 B6 C2 V6]; [B1 B5 B6 C2 V6] # .ⴚⴔ ++B; .xn--sndl01647an3h1h; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] ++B; xn--1ug.xn--sndl01647an3h1h; [B1 B5 B6 C2 V6]; [B1 B5 B6 C2 V6] # .ႺႴ ++T; \u200D。󸲜ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6 A4_2] # .ⴚⴔ ++N; \u200D。󸲜ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # .ⴚⴔ ++T; \u200D。󸲜Ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B5 B6 P1 V6 A4_2] # .Ⴚⴔ ++N; \u200D。󸲜Ⴚⴔ𞨇; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # .Ⴚⴔ ++T; -3.\u200Dヌᢕ; [C2 V3]; [V3] # -3.ヌᢕ ++N; -3.\u200Dヌᢕ; [C2 V3]; [C2 V3] # -3.ヌᢕ ++B; -3.xn--fbf115j; [V3]; [V3] ++B; -3.xn--fbf739aq5o; [C2 V3]; [C2 V3] # -3.ヌᢕ ++T; 🂃\u0666ß\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 P1 V3 V6] # 🂃٦ß.- ++N; 🂃\u0666ß\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # 🂃٦ß.- ++T; 🂃\u0666SS\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 P1 V3 V6] # 🂃٦ss.- ++N; 🂃\u0666SS\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # 🂃٦ss.- ++T; 🂃\u0666ss\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 P1 V3 V6] # 🂃٦ss.- ++N; 🂃\u0666ss\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # 🂃٦ss.- ++T; 🂃\u0666Ss\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 P1 V3 V6] # 🂃٦ss.- ++N; 🂃\u0666Ss\u200D。󠠂򭰍𞩒-; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # 🂃٦ss.- ++B; xn--ss-pyd98921c.xn----nz8rh7531csznt; [B1 V3 V6]; [B1 V3 V6] # 🂃٦ss.- ++B; xn--ss-pyd483x5k99b.xn----nz8rh7531csznt; [B1 C2 V3 V6]; [B1 C2 V3 V6] # 🂃٦ss.- ++B; xn--zca34z68yzu83b.xn----nz8rh7531csznt; [B1 C2 V3 V6]; [B1 C2 V3 V6] # 🂃٦ß.- ++T; ꇟ-𐾺\u069F。򰀺\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ꇟ-ڟ. ++N; ꇟ-𐾺\u069F。򰀺\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ꇟ-ڟ. ++B; xn----utc4430jd3zd.xn--bp20d; [B5 B6 V6]; [B5 B6 V6] # ꇟ-ڟ. ++B; xn----utc4430jd3zd.xn--0ugx6670i; [B5 B6 C1 V6]; [B5 B6 C1 V6] # ꇟ-ڟ. ++B; \u0665.\u0484𐨗𝩋𴤃; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ٥.҄𐨗𝩋 ++B; xn--eib.xn--n3a0405kus8eft5l; [B1 V5 V6]; [B1 V5 V6] # ٥.҄𐨗𝩋 ++B; -.񱼓\u0649𐨿; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # -.ى𐨿 ++B; -.xn--lhb4124khbq4b; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] # -.ى𐨿 ++T; 󾬨ς.𞶙녫ß; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++N; 󾬨ς.𞶙녫ß; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++T; 󾬨ς.𞶙녫ß; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++N; 󾬨ς.𞶙녫ß; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++B; 󾬨Σ.𞶙녫SS; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++B; 󾬨Σ.𞶙녫SS; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++B; 󾬨σ.𞶙녫ss; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++B; 󾬨σ.𞶙녫ss; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++B; 󾬨Σ.𞶙녫Ss; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++B; 󾬨Σ.𞶙녫Ss; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++B; xn--4xa76659r.xn--ss-d64i8755h; [B2 B3 V6]; [B2 B3 V6] ++B; xn--3xa96659r.xn--zca5051g4h4i; [B2 B3 V6]; [B2 B3 V6] ++T; Ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # Ⅎ្.≠ ++N; Ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⅎ្.≠ ++T; Ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # Ⅎ្.≠ ++N; Ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⅎ្.≠ ++T; Ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # Ⅎ្.≠ ++N; Ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⅎ្.≠ ++T; Ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # Ⅎ្.≠ ++N; Ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⅎ្.≠ ++T; ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # ⅎ្.≠ ++N; ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⅎ្.≠ ++T; ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # ⅎ្.≠ ++N; ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⅎ្.≠ ++B; xn--u4e969b.xn--1ch; [V6]; [V6] # ⅎ្.≠ ++B; xn--u4e823bq1a.xn--0ugb89o; [C1 C2 V6]; [C1 C2 V6] # ⅎ្.≠ ++B; xn--u4e319b.xn--1ch; [V6]; [V6] # Ⅎ្.≠ ++B; xn--u4e823bcza.xn--0ugb89o; [C1 C2 V6]; [C1 C2 V6] # Ⅎ្.≠ ++T; ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # ⅎ្.≠ ++N; ⅎ\u17D2\u200D。=\u0338\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⅎ្.≠ ++T; ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [P1 V6] # ⅎ្.≠ ++N; ⅎ\u17D2\u200D。≠\u200D\u200C; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⅎ្.≠ ++T; 𐋺\uAAF6\uA953󧦉.\u200C\u1714\u068F; [B1 C1 P1 V6]; [B1 P1 V5 V6] # 𐋺꫶꥓.᜔ڏ ++N; 𐋺\uAAF6\uA953󧦉.\u200C\u1714\u068F; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐋺꫶꥓.᜔ڏ ++T; 𐋺\uAAF6\uA953󧦉.\u200C\u1714\u068F; [B1 C1 P1 V6]; [B1 P1 V5 V6] # 𐋺꫶꥓.᜔ڏ ++N; 𐋺\uAAF6\uA953󧦉.\u200C\u1714\u068F; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐋺꫶꥓.᜔ڏ ++B; xn--3j9a14ak27osbz2o.xn--ljb175f; [B1 V5 V6]; [B1 V5 V6] # 𐋺꫶꥓.᜔ڏ ++B; xn--3j9a14ak27osbz2o.xn--ljb175f1wg; [B1 C1 V6]; [B1 C1 V6] # 𐋺꫶꥓.᜔ڏ ++B; 񺔯\u0FA8.≯; [P1 V6]; [P1 V6] # ྨ.≯ ++B; 񺔯\u0FA8.>\u0338; [P1 V6]; [P1 V6] # ྨ.≯ ++B; 񺔯\u0FA8.≯; [P1 V6]; [P1 V6] # ྨ.≯ ++B; 񺔯\u0FA8.>\u0338; [P1 V6]; [P1 V6] # ྨ.≯ ++B; xn--4fd57150h.xn--hdh; [V6]; [V6] # ྨ.≯ ++T; \u200D𞡄Ⴓ.𐇽; [B1 B3 B6 C2 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # 𞡄Ⴓ.𐇽 ++N; \u200D𞡄Ⴓ.𐇽; [B1 B3 B6 C2 P1 V5 V6]; [B1 B3 B6 C2 P1 V5 V6] # 𞡄Ⴓ.𐇽 ++T; \u200D𞡄Ⴓ.𐇽; [B1 B3 B6 C2 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # 𞡄Ⴓ.𐇽 ++N; \u200D𞡄Ⴓ.𐇽; [B1 B3 B6 C2 P1 V5 V6]; [B1 B3 B6 C2 P1 V5 V6] # 𞡄Ⴓ.𐇽 ++T; \u200D𞡄ⴓ.𐇽; [B1 B3 B6 C2 V5]; [B1 B2 B3 B6 V5] # 𞡄ⴓ.𐇽 ++N; \u200D𞡄ⴓ.𐇽; [B1 B3 B6 C2 V5]; [B1 B3 B6 C2 V5] # 𞡄ⴓ.𐇽 ++B; xn--blj7492l.xn--m27c; [B1 B2 B3 B6 V5]; [B1 B2 B3 B6 V5] ++B; xn--1ugz52c4i16a.xn--m27c; [B1 B3 B6 C2 V5]; [B1 B3 B6 C2 V5] # 𞡄ⴓ.𐇽 ++B; xn--rnd5552v.xn--m27c; [B1 B2 B3 B6 V5 V6]; [B1 B2 B3 B6 V5 V6] ++B; xn--rnd379ex885a.xn--m27c; [B1 B3 B6 C2 V5 V6]; [B1 B3 B6 C2 V5 V6] # 𞡄Ⴓ.𐇽 ++T; \u200D𞡄ⴓ.𐇽; [B1 B3 B6 C2 V5]; [B1 B2 B3 B6 V5] # 𞡄ⴓ.𐇽 ++N; \u200D𞡄ⴓ.𐇽; [B1 B3 B6 C2 V5]; [B1 B3 B6 C2 V5] # 𞡄ⴓ.𐇽 ++T; 𐪒ß\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ß꣪.ᡤ ++N; 𐪒ß\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ß꣪.ᡤ ++T; 𐪒ß\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ß꣪.ᡤ ++N; 𐪒ß\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ß꣪.ᡤ ++B; 𐪒SS\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ ++B; 𐪒ss\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ ++B; 𐪒Ss\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ ++B; xn--ss-tu9hw933a.xn--08e; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ ++B; xn--zca2517f2hvc.xn--08e; [B2 B3]; [B2 B3] # 𐪒ß꣪.ᡤ ++B; 𐪒SS\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ ++B; 𐪒ss\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ ++B; 𐪒Ss\uA8EA.ᡤ; [B2 B3]; [B2 B3] # 𐪒ss꣪.ᡤ ++T; 𐨿󠆌鸮𑚶.ς; [V5]; [V5] ++N; 𐨿󠆌鸮𑚶.ς; [V5]; [V5] ++B; 𐨿󠆌鸮𑚶.Σ; [V5]; [V5] ++B; 𐨿󠆌鸮𑚶.σ; [V5]; [V5] ++B; xn--l76a726rt2h.xn--4xa; [V5]; [V5] ++B; xn--l76a726rt2h.xn--3xa; [V5]; [V5] ++B; ⒗𞤬。-𑚶; [B1 P1 V3 V6]; [B1 P1 V3 V6] ++B; 16.𞤬。-𑚶; [B1 V3]; [B1 V3] ++B; 16.𞤊。-𑚶; [B1 V3]; [B1 V3] ++B; 16.xn--ke6h.xn----4j0j; [B1 V3]; [B1 V3] ++B; ⒗𞤊。-𑚶; [B1 P1 V3 V6]; [B1 P1 V3 V6] ++B; xn--8shw466n.xn----4j0j; [B1 V3 V6]; [B1 V3 V6] ++B; \u08B3𞤿⾫。𐹣\u068F⒈; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # ࢳ𞤿隹.𐹣ڏ⒈ ++B; \u08B3𞤿隹。𐹣\u068F1.; [B1 B2 B3]; [B1 B2 B3] # ࢳ𞤿隹.𐹣ڏ1. ++B; \u08B3𞤝隹。𐹣\u068F1.; [B1 B2 B3]; [B1 B2 B3] # ࢳ𞤿隹.𐹣ڏ1. ++B; xn--8yb0383efiwk.xn--1-wsc3373r.; [B1 B2 B3]; [B1 B2 B3] # ࢳ𞤿隹.𐹣ڏ1. ++B; \u08B3𞤝⾫。𐹣\u068F⒈; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # ࢳ𞤿隹.𐹣ڏ⒈ ++B; xn--8yb0383efiwk.xn--ljb064mol4n; [B1 B2 B3 V6]; [B1 B2 B3 V6] # ࢳ𞤿隹.𐹣ڏ⒈ ++B; \u2433𚎛𝟧\u0661.ᡢ8\u0F72\u0600; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 5١.ᡢ8ི ++B; \u2433𚎛5\u0661.ᡢ8\u0F72\u0600; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 5١.ᡢ8ི ++B; xn--5-bqc410un435a.xn--8-rkc763epjj; [B5 B6 V6]; [B5 B6 V6] # 5١.ᡢ8ི ++B; 𐹠.🄀⒒-󨰈; [B1 P1 V6]; [B1 P1 V6] ++B; 𐹠.0.11.-󨰈; [B1 P1 V3 V6]; [B1 P1 V3 V6] ++B; xn--7n0d.0.11.xn----8j07m; [B1 V3 V6]; [B1 V3 V6] ++B; xn--7n0d.xn----xcp9757q1s13g; [B1 V6]; [B1 V6] ++T; ς-。\u200C𝟭-; [C1 V3]; [V3] # ς-.1- ++N; ς-。\u200C𝟭-; [C1 V3]; [C1 V3] # ς-.1- ++T; ς-。\u200C1-; [C1 V3]; [V3] # ς-.1- ++N; ς-。\u200C1-; [C1 V3]; [C1 V3] # ς-.1- ++T; Σ-。\u200C1-; [C1 V3]; [V3] # σ-.1- ++N; Σ-。\u200C1-; [C1 V3]; [C1 V3] # σ-.1- ++T; σ-。\u200C1-; [C1 V3]; [V3] # σ-.1- ++N; σ-。\u200C1-; [C1 V3]; [C1 V3] # σ-.1- ++B; xn----zmb.1-; [V3]; [V3] ++B; xn----zmb.xn--1--i1t; [C1 V3]; [C1 V3] # σ-.1- ++B; xn----xmb.xn--1--i1t; [C1 V3]; [C1 V3] # ς-.1- ++T; Σ-。\u200C𝟭-; [C1 V3]; [V3] # σ-.1- ++N; Σ-。\u200C𝟭-; [C1 V3]; [C1 V3] # σ-.1- ++T; σ-。\u200C𝟭-; [C1 V3]; [V3] # σ-.1- ++N; σ-。\u200C𝟭-; [C1 V3]; [C1 V3] # σ-.1- ++B; \u1734-\u0CE2.󠄩Ⴄ; [P1 V5 V6]; [P1 V5 V6] # ᜴-ೢ.Ⴄ ++B; \u1734-\u0CE2.󠄩Ⴄ; [P1 V5 V6]; [P1 V5 V6] # ᜴-ೢ.Ⴄ ++B; \u1734-\u0CE2.󠄩ⴄ; [V5]; [V5] # ᜴-ೢ.ⴄ ++B; xn----ggf830f.xn--vkj; [V5]; [V5] # ᜴-ೢ.ⴄ ++B; xn----ggf830f.xn--cnd; [V5 V6]; [V5 V6] # ᜴-ೢ.Ⴄ ++B; \u1734-\u0CE2.󠄩ⴄ; [V5]; [V5] # ᜴-ೢ.ⴄ ++B; 򭈗♋\u06BB𐦥。\u0954⒈; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ♋ڻ𐦥.॔⒈ ++B; 򭈗♋\u06BB𐦥。\u09541.; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ♋ڻ𐦥.॔1. ++B; xn--ukb372n129m3rs7f.xn--1-fyd.; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ♋ڻ𐦥.॔1. ++B; xn--ukb372n129m3rs7f.xn--u3b240l; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ♋ڻ𐦥.॔⒈ ++T; \u05A4.\u06C1\u1AB3\u200C; [B1 B3 B6 C1 V5]; [B1 B3 B6 V5] # ֤.ہ᪳ ++N; \u05A4.\u06C1\u1AB3\u200C; [B1 B3 B6 C1 V5]; [B1 B3 B6 C1 V5] # ֤.ہ᪳ ++T; \u05A4.\u06C1\u1AB3\u200C; [B1 B3 B6 C1 V5]; [B1 B3 B6 V5] # ֤.ہ᪳ ++N; \u05A4.\u06C1\u1AB3\u200C; [B1 B3 B6 C1 V5]; [B1 B3 B6 C1 V5] # ֤.ہ᪳ ++B; xn--vcb.xn--0kb623h; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ֤.ہ᪳ ++B; xn--vcb.xn--0kb623hm1d; [B1 B3 B6 C1 V5]; [B1 B3 B6 C1 V5] # ֤.ہ᪳ ++B; 񢭏\u0846≮\u0ACD.𞦊; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡆ≮્. ++B; 񢭏\u0846<\u0338\u0ACD.𞦊; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡆ≮્. ++B; 񢭏\u0846≮\u0ACD.𞦊; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡆ≮્. ++B; 񢭏\u0846<\u0338\u0ACD.𞦊; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡆ≮્. ++B; xn--4vb80kq29ayo62l.xn--8g6h; [B5 B6 V6]; [B5 B6 V6] # ࡆ≮્. ++T; \u200D。𞀘⒈ꡍ擉; [C2 P1 V5 V6]; [P1 V5 V6 A4_2] # .𞀘⒈ꡍ擉 ++N; \u200D。𞀘⒈ꡍ擉; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .𞀘⒈ꡍ擉 ++T; \u200D。𞀘1.ꡍ擉; [C2 V5]; [V5 A4_2] # .𞀘1.ꡍ擉 ++N; \u200D。𞀘1.ꡍ擉; [C2 V5]; [C2 V5] # .𞀘1.ꡍ擉 ++B; .xn--1-1p4r.xn--s7uv61m; [V5 A4_2]; [V5 A4_2] ++B; xn--1ug.xn--1-1p4r.xn--s7uv61m; [C2 V5]; [C2 V5] # .𞀘1.ꡍ擉 ++B; .xn--tsh026uql4bew9p; [V5 V6 A4_2]; [V5 V6 A4_2] ++B; xn--1ug.xn--tsh026uql4bew9p; [C2 V5 V6]; [C2 V5 V6] # .𞀘⒈ꡍ擉 ++B; ₈\u07CB.\uFB64≠; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 8ߋ.ٿ≠ ++B; ₈\u07CB.\uFB64=\u0338; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 8ߋ.ٿ≠ ++B; 8\u07CB.\u067F≠; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 8ߋ.ٿ≠ ++B; 8\u07CB.\u067F=\u0338; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 8ߋ.ٿ≠ ++B; xn--8-zbd.xn--4ib883l; [B1 B3 V6]; [B1 B3 V6] # 8ߋ.ٿ≠ ++B; ᢡ\u07DE򹐣.⒒\u0642𑍦; [B1 B5 P1 V6]; [B1 B5 P1 V6] # ᢡߞ.⒒ق𑍦 ++B; ᢡ\u07DE򹐣.11.\u0642𑍦; [B1 B5 P1 V6]; [B1 B5 P1 V6] # ᢡߞ.11.ق𑍦 ++B; xn--5sb596fi873t.11.xn--ehb4198k; [B1 B5 V6]; [B1 B5 V6] # ᢡߞ.11.ق𑍦 ++B; xn--5sb596fi873t.xn--ehb336mvy7n; [B1 B5 V6]; [B1 B5 V6] # ᢡߞ.⒒ق𑍦 ++B; \u0E48-𐹺𝟜.\u0363\u06E1⒏; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ่-𐹺4.ͣۡ⒏ ++B; \u0E48-𐹺4.\u0363\u06E18.; [B1 V5]; [B1 V5] # ่-𐹺4.ͣۡ8. ++B; xn---4-owiz479s.xn--8-ihb69x.; [B1 V5]; [B1 V5] # ่-𐹺4.ͣۡ8. ++B; xn---4-owiz479s.xn--eva20pjv9a; [B1 V5 V6]; [B1 V5 V6] # ่-𐹺4.ͣۡ⒏ ++B; ⫐。Ⴠ-󃐢; [P1 V6]; [P1 V6] ++B; ⫐。Ⴠ-󃐢; [P1 V6]; [P1 V6] ++B; ⫐。ⴠ-󃐢; [P1 V6]; [P1 V6] ++B; xn--r3i.xn----2wst7439i; [V6]; [V6] ++B; xn--r3i.xn----z1g58579u; [V6]; [V6] ++B; ⫐。ⴠ-󃐢; [P1 V6]; [P1 V6] ++B; 𑑂◊.⦟∠; [V5]; [V5] ++B; 𑑂◊.⦟∠; [V5]; [V5] ++B; xn--01h3338f.xn--79g270a; [V5]; [V5] ++B; 𿌰-\u0662。󋸛ꡂ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -٢.ꡂ ++B; xn----dqc20828e.xn--bc9an2879c; [B5 B6 V6]; [B5 B6 V6] # -٢.ꡂ ++B; \u0678。󠏬\u0741𞪭𐹪; [B1 P1 V6]; [B1 P1 V6] # يٴ.݁𐹪 ++B; \u064A\u0674。󠏬\u0741𞪭𐹪; [B1 P1 V6]; [B1 P1 V6] # يٴ.݁𐹪 ++B; xn--mhb8f.xn--oob2585kfdsfsbo7h; [B1 V6]; [B1 V6] # يٴ.݁𐹪 ++T; 𐫆ꌄ。\u200Dᣬ; [B1 B2 B3 C2]; [B2 B3] # 𐫆ꌄ.ᣬ ++N; 𐫆ꌄ。\u200Dᣬ; [B1 B2 B3 C2]; [B1 B2 B3 C2] # 𐫆ꌄ.ᣬ ++T; 𐫆ꌄ。\u200Dᣬ; [B1 B2 B3 C2]; [B2 B3] # 𐫆ꌄ.ᣬ ++N; 𐫆ꌄ。\u200Dᣬ; [B1 B2 B3 C2]; [B1 B2 B3 C2] # 𐫆ꌄ.ᣬ ++B; xn--y77ao18q.xn--wdf; [B2 B3]; [B2 B3] ++B; xn--y77ao18q.xn--wdf367a; [B1 B2 B3 C2]; [B1 B2 B3 C2] # 𐫆ꌄ.ᣬ ++B; ₀\u0662。󅪞≯-; [B1 B6 P1 V3 V6]; [B1 B6 P1 V3 V6] # 0٢.≯- ++B; ₀\u0662。󅪞>\u0338-; [B1 B6 P1 V3 V6]; [B1 B6 P1 V3 V6] # 0٢.≯- ++B; 0\u0662。󅪞≯-; [B1 B6 P1 V3 V6]; [B1 B6 P1 V3 V6] # 0٢.≯- ++B; 0\u0662。󅪞>\u0338-; [B1 B6 P1 V3 V6]; [B1 B6 P1 V3 V6] # 0٢.≯- ++B; xn--0-dqc.xn----ogov3342l; [B1 B6 V3 V6]; [B1 B6 V3 V6] # 0٢.≯- ++B; \u031C𐹫-𞯃.𐋤\u0845; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ̜𐹫-.𐋤ࡅ ++B; xn----gdb7046r692g.xn--3vb1349j; [B1 V5 V6]; [B1 V5 V6] # ̜𐹫-.𐋤ࡅ ++B; ≠。𝩑𐹩Ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩Ⴡ֔ ++B; =\u0338。𝩑𐹩Ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩Ⴡ֔ ++B; ≠。𝩑𐹩Ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩Ⴡ֔ ++B; =\u0338。𝩑𐹩Ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩Ⴡ֔ ++B; =\u0338。𝩑𐹩ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩ⴡ֔ ++B; ≠。𝩑𐹩ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩ⴡ֔ ++B; xn--1ch.xn--fcb363rk03mypug; [B1 V5 V6]; [B1 V5 V6] # ≠.𝩑𐹩ⴡ֔ ++B; xn--1ch.xn--fcb538c649rypog; [B1 V5 V6]; [B1 V5 V6] # ≠.𝩑𐹩Ⴡ֔ ++B; =\u0338。𝩑𐹩ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩ⴡ֔ ++B; ≠。𝩑𐹩ⴡ\u0594; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≠.𝩑𐹩ⴡ֔ ++B; 𖫳≠.Ⴀ𐮀; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] ++B; 𖫳=\u0338.Ⴀ𐮀; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] ++B; 𖫳=\u0338.ⴀ𐮀; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] ++B; 𖫳≠.ⴀ𐮀; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] ++B; xn--1ch9250k.xn--rkj6232e; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] ++B; xn--1ch9250k.xn--7md2659j; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] ++B; 󠅾\u0736\u0726.ᢚ閪\u08E2𝩟; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ܶܦ.ᢚ閪𝩟 ++B; 󠅾\u0736\u0726.ᢚ閪\u08E2𝩟; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ܶܦ.ᢚ閪𝩟 ++B; xn--wnb5a.xn--l0b161fis8gbp5m; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ܶܦ.ᢚ閪𝩟 ++T; \u200D󠇜\u06CB\uA8E9。\u20DD\u0FB0-ᛟ; [B1 C2 V5]; [B1 V5] # ۋ꣩.⃝ྰ-ᛟ ++N; \u200D󠇜\u06CB\uA8E9。\u20DD\u0FB0-ᛟ; [B1 C2 V5]; [B1 C2 V5] # ۋ꣩.⃝ྰ-ᛟ ++T; \u200D󠇜\u06CB\uA8E9。\u20DD\u0FB0-ᛟ; [B1 C2 V5]; [B1 V5] # ۋ꣩.⃝ྰ-ᛟ ++N; \u200D󠇜\u06CB\uA8E9。\u20DD\u0FB0-ᛟ; [B1 C2 V5]; [B1 C2 V5] # ۋ꣩.⃝ྰ-ᛟ ++B; xn--blb8114f.xn----gmg236cj6k; [B1 V5]; [B1 V5] # ۋ꣩.⃝ྰ-ᛟ ++B; xn--blb540ke10h.xn----gmg236cj6k; [B1 C2 V5]; [B1 C2 V5] # ۋ꣩.⃝ྰ-ᛟ ++B; 헁󘖙\u0E3A󚍚。\u06BA𝟜; [P1 V6]; [P1 V6] # 헁ฺ.ں4 ++B; 헁󘖙\u0E3A󚍚。\u06BA𝟜; [P1 V6]; [P1 V6] # 헁ฺ.ں4 ++B; 헁󘖙\u0E3A󚍚。\u06BA4; [P1 V6]; [P1 V6] # 헁ฺ.ں4 ++B; 헁󘖙\u0E3A󚍚。\u06BA4; [P1 V6]; [P1 V6] # 헁ฺ.ں4 ++B; xn--o4c1723h8g85gt4ya.xn--4-dvc; [V6]; [V6] # 헁ฺ.ں4 ++T; 𐹭。󃱂\u200CႾ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹭.Ⴞ ++N; 𐹭。󃱂\u200CႾ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹭.Ⴞ ++T; 𐹭。󃱂\u200CႾ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹭.Ⴞ ++N; 𐹭。󃱂\u200CႾ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹭.Ⴞ ++T; 𐹭。󃱂\u200Cⴞ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹭.ⴞ ++N; 𐹭。󃱂\u200Cⴞ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹭.ⴞ ++B; xn--lo0d.xn--mljx1099g; [B1 V6]; [B1 V6] ++B; xn--lo0d.xn--0ugx72cwi33v; [B1 C1 V6]; [B1 C1 V6] # 𐹭.ⴞ ++B; xn--lo0d.xn--2nd75260n; [B1 V6]; [B1 V6] ++B; xn--lo0d.xn--2nd949eqw95u; [B1 C1 V6]; [B1 C1 V6] # 𐹭.Ⴞ ++T; 𐹭。󃱂\u200Cⴞ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹭.ⴞ ++N; 𐹭。󃱂\u200Cⴞ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹭.ⴞ ++B; \uA953.\u033D𑂽馋; [P1 V5 V6]; [P1 V5 V6] # ꥓.̽馋 ++B; xn--3j9a.xn--bua0708eqzrd; [V5 V6]; [V5 V6] # ꥓.̽馋 ++T; 󈫝򪛸\u200D。䜖; [C2 P1 V6]; [P1 V6] # .䜖 ++N; 󈫝򪛸\u200D。䜖; [C2 P1 V6]; [C2 P1 V6] # .䜖 ++T; 󈫝򪛸\u200D。䜖; [C2 P1 V6]; [P1 V6] # .䜖 ++N; 󈫝򪛸\u200D。䜖; [C2 P1 V6]; [C2 P1 V6] # .䜖 ++B; xn--g138cxw05a.xn--k0o; [V6]; [V6] ++B; xn--1ug30527h9mxi.xn--k0o; [C2 V6]; [C2 V6] # .䜖 ++T; ᡯ⚉姶🄉.۷\u200D🎪\u200D; [C2 P1 V6]; [P1 V6] # ᡯ⚉姶🄉.۷🎪 ++N; ᡯ⚉姶🄉.۷\u200D🎪\u200D; [C2 P1 V6]; [C2 P1 V6] # ᡯ⚉姶🄉.۷🎪 ++T; ᡯ⚉姶8,.۷\u200D🎪\u200D; [C2 P1 V6]; [P1 V6] # ᡯ⚉姶8,.۷🎪 ++N; ᡯ⚉姶8,.۷\u200D🎪\u200D; [C2 P1 V6]; [C2 P1 V6] # ᡯ⚉姶8,.۷🎪 ++B; xn--8,-g9oy26fzu4d.xn--kmb6733w; [P1 V6]; [P1 V6] ++B; xn--8,-g9oy26fzu4d.xn--kmb859ja94998b; [C2 P1 V6]; [C2 P1 V6] # ᡯ⚉姶8,.۷🎪 ++B; xn--c9e433epi4b3j20a.xn--kmb6733w; [V6]; [V6] ++B; xn--c9e433epi4b3j20a.xn--kmb859ja94998b; [C2 V6]; [C2 V6] # ᡯ⚉姶🄉.۷🎪 ++B; 𞽀.𐹸🚖\u0E3A; [B1 P1 V6]; [B1 P1 V6] # .𐹸🚖ฺ ++B; xn--0n7h.xn--o4c9032klszf; [B1 V6]; [B1 V6] # .𐹸🚖ฺ ++B; Ⴔᠵ。𐹧\u0747۹; [B1 P1 V6]; [B1 P1 V6] # Ⴔᠵ.𐹧݇۹ ++B; Ⴔᠵ。𐹧\u0747۹; [B1 P1 V6]; [B1 P1 V6] # Ⴔᠵ.𐹧݇۹ ++B; ⴔᠵ。𐹧\u0747۹; [B1]; [B1] # ⴔᠵ.𐹧݇۹ ++B; xn--o7e997h.xn--mmb9ml895e; [B1]; [B1] # ⴔᠵ.𐹧݇۹ ++B; xn--snd659a.xn--mmb9ml895e; [B1 V6]; [B1 V6] # Ⴔᠵ.𐹧݇۹ ++B; ⴔᠵ。𐹧\u0747۹; [B1]; [B1] # ⴔᠵ.𐹧݇۹ ++T; \u135Fᡈ\u200C.︒-𖾐-; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # ፟ᡈ.︒-𖾐- ++N; \u135Fᡈ\u200C.︒-𖾐-; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # ፟ᡈ.︒-𖾐- ++T; \u135Fᡈ\u200C.。-𖾐-; [C1 V3 V5 A4_2]; [V3 V5 A4_2] # ፟ᡈ..-𖾐- ++N; \u135Fᡈ\u200C.。-𖾐-; [C1 V3 V5 A4_2]; [C1 V3 V5 A4_2] # ፟ᡈ..-𖾐- ++B; xn--b7d82w..xn-----pe4u; [V3 V5 A4_2]; [V3 V5 A4_2] # ፟ᡈ..-𖾐- ++B; xn--b7d82wo4h..xn-----pe4u; [C1 V3 V5 A4_2]; [C1 V3 V5 A4_2] # ፟ᡈ..-𖾐- ++B; xn--b7d82w.xn-----c82nz547a; [V3 V5 V6]; [V3 V5 V6] # ፟ᡈ.︒-𖾐- ++B; xn--b7d82wo4h.xn-----c82nz547a; [C1 V3 V5 V6]; [C1 V3 V5 V6] # ፟ᡈ.︒-𖾐- ++T; ⒈\u0601⒖\u200C.\u1DF0\u07DB; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # ⒈⒖.ᷰߛ ++N; ⒈\u0601⒖\u200C.\u1DF0\u07DB; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # ⒈⒖.ᷰߛ ++T; 1.\u060115.\u200C.\u1DF0\u07DB; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6 A4_2] # 1.15..ᷰߛ ++N; 1.\u060115.\u200C.\u1DF0\u07DB; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 1.15..ᷰߛ ++B; 1.xn--15-1pd..xn--2sb914i; [B1 V5 V6 A4_2]; [B1 V5 V6 A4_2] # 1.15..ᷰߛ ++B; 1.xn--15-1pd.xn--0ug.xn--2sb914i; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 1.15..ᷰߛ ++B; xn--jfb347mib.xn--2sb914i; [B1 V5 V6]; [B1 V5 V6] # ⒈⒖.ᷰߛ ++B; xn--jfb844kmfdwb.xn--2sb914i; [B1 C1 V5 V6]; [B1 C1 V5 V6] # ⒈⒖.ᷰߛ ++B; 𝩜。-\u0B4DႫ; [P1 V3 V5 V6]; [P1 V3 V5 V6] # 𝩜.-୍Ⴋ ++B; 𝩜。-\u0B4Dⴋ; [V3 V5]; [V3 V5] # 𝩜.-୍ⴋ ++B; xn--792h.xn----bse820x; [V3 V5]; [V3 V5] # 𝩜.-୍ⴋ ++B; xn--792h.xn----bse632b; [V3 V5 V6]; [V3 V5 V6] # 𝩜.-୍Ⴋ ++T; ßჀ.\u0620刯Ⴝ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ßჀ.ؠ刯Ⴝ ++N; ßჀ.\u0620刯Ⴝ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ßჀ.ؠ刯Ⴝ ++T; ßⴠ.\u0620刯ⴝ; [B2 B3]; [B2 B3] # ßⴠ.ؠ刯ⴝ ++N; ßⴠ.\u0620刯ⴝ; [B2 B3]; [B2 B3] # ßⴠ.ؠ刯ⴝ ++B; SSჀ.\u0620刯Ⴝ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ssჀ.ؠ刯Ⴝ ++B; ssⴠ.\u0620刯ⴝ; [B2 B3]; [B2 B3] # ssⴠ.ؠ刯ⴝ ++B; Ssⴠ.\u0620刯Ⴝ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ssⴠ.ؠ刯Ⴝ ++B; xn--ss-j81a.xn--fgb845cb66c; [B2 B3 V6]; [B2 B3 V6] # ssⴠ.ؠ刯Ⴝ ++B; xn--ss-j81a.xn--fgb670rovy; [B2 B3]; [B2 B3] # ssⴠ.ؠ刯ⴝ ++B; xn--ss-wgk.xn--fgb845cb66c; [B2 B3 V6]; [B2 B3 V6] # ssჀ.ؠ刯Ⴝ ++B; xn--zca277t.xn--fgb670rovy; [B2 B3]; [B2 B3] # ßⴠ.ؠ刯ⴝ ++B; xn--zca442f.xn--fgb845cb66c; [B2 B3 V6]; [B2 B3 V6] # ßჀ.ؠ刯Ⴝ ++B; \u1BAAႣℲ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪ႣℲ.ᠳ툻ٳ ++B; \u1BAAႣℲ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪ႣℲ.ᠳ툻ٳ ++B; \u1BAAႣℲ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪ႣℲ.ᠳ툻ٳ ++B; \u1BAAႣℲ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪ႣℲ.ᠳ툻ٳ ++B; \u1BAAⴃⅎ。ᠳ툻\u0673; [B5 B6 V5]; [B5 B6 V5] # ᮪ⴃⅎ.ᠳ툻ٳ ++B; \u1BAAⴃⅎ。ᠳ툻\u0673; [B5 B6 V5]; [B5 B6 V5] # ᮪ⴃⅎ.ᠳ툻ٳ ++B; \u1BAAႣⅎ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪Ⴃⅎ.ᠳ툻ٳ ++B; \u1BAAႣⅎ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪Ⴃⅎ.ᠳ툻ٳ ++B; xn--bnd957c2pe.xn--sib102gc69k; [B5 B6 V5 V6]; [B5 B6 V5 V6] # ᮪Ⴃⅎ.ᠳ툻ٳ ++B; xn--yxf24x4ol.xn--sib102gc69k; [B5 B6 V5]; [B5 B6 V5] # ᮪ⴃⅎ.ᠳ툻ٳ ++B; xn--bnd957cone.xn--sib102gc69k; [B5 B6 V5 V6]; [B5 B6 V5 V6] # ᮪ႣℲ.ᠳ툻ٳ ++B; \u1BAAⴃⅎ。ᠳ툻\u0673; [B5 B6 V5]; [B5 B6 V5] # ᮪ⴃⅎ.ᠳ툻ٳ ++B; \u1BAAⴃⅎ。ᠳ툻\u0673; [B5 B6 V5]; [B5 B6 V5] # ᮪ⴃⅎ.ᠳ툻ٳ ++B; \u1BAAႣⅎ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪Ⴃⅎ.ᠳ툻ٳ ++B; \u1BAAႣⅎ。ᠳ툻\u0673; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] # ᮪Ⴃⅎ.ᠳ툻ٳ ++B; \u06EC.\u08A2𐹫\u067C; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ۬.ࢢ𐹫ټ ++B; xn--8lb.xn--1ib31ily45b; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ۬.ࢢ𐹫ټ ++B; \u06B6\u06DF。₇\uA806; [B1]; [B1] # ڶ۟.7꠆ ++B; \u06B6\u06DF。7\uA806; [B1]; [B1] # ڶ۟.7꠆ ++B; xn--pkb6f.xn--7-x93e; [B1]; [B1] # ڶ۟.7꠆ ++B; \u06B6\u06DF.7\uA806; [B1]; [B1] # ڶ۟.7꠆ ++T; Ⴣ𐹻.\u200C𝪣≮󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V5 V6] # Ⴣ𐹻.𝪣≮ ++N; Ⴣ𐹻.\u200C𝪣≮󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # Ⴣ𐹻.𝪣≮ ++T; Ⴣ𐹻.\u200C𝪣<\u0338󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V5 V6] # Ⴣ𐹻.𝪣≮ ++N; Ⴣ𐹻.\u200C𝪣<\u0338󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # Ⴣ𐹻.𝪣≮ ++T; ⴣ𐹻.\u200C𝪣<\u0338󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V5 V6] # ⴣ𐹻.𝪣≮ ++N; ⴣ𐹻.\u200C𝪣<\u0338󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # ⴣ𐹻.𝪣≮ ++T; ⴣ𐹻.\u200C𝪣≮󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V5 V6] # ⴣ𐹻.𝪣≮ ++N; ⴣ𐹻.\u200C𝪣≮󠩉; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # ⴣ𐹻.𝪣≮ ++B; xn--rlj6323e.xn--gdh4944ob3x3e; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] ++B; xn--rlj6323e.xn--0ugy6gn120eb103g; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # ⴣ𐹻.𝪣≮ ++B; xn--7nd8101k.xn--gdh4944ob3x3e; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] ++B; xn--7nd8101k.xn--0ugy6gn120eb103g; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # Ⴣ𐹻.𝪣≮ ++T; 𝟵隁⯮.\u180D\u200C; [C1]; xn--9-mfs8024b. # 9隁⯮. ++N; 𝟵隁⯮.\u180D\u200C; [C1]; [C1] # 9隁⯮. ++T; 9隁⯮.\u180D\u200C; [C1]; xn--9-mfs8024b. # 9隁⯮. ++N; 9隁⯮.\u180D\u200C; [C1]; [C1] # 9隁⯮. ++B; xn--9-mfs8024b.; 9隁⯮.; xn--9-mfs8024b.; NV8 ++B; 9隁⯮.; ; xn--9-mfs8024b.; NV8 ++B; xn--9-mfs8024b.xn--0ug; [C1]; [C1] # 9隁⯮. ++B; ⒏𐹧。Ⴣ\u0F84彦; [B1 P1 V6]; [B1 P1 V6] # ⒏𐹧.Ⴣ྄彦 ++B; 8.𐹧。Ⴣ\u0F84彦; [B1 P1 V6]; [B1 P1 V6] # 8.𐹧.Ⴣ྄彦 ++B; 8.𐹧。ⴣ\u0F84彦; [B1]; [B1] # 8.𐹧.ⴣ྄彦 ++B; 8.xn--fo0d.xn--3ed972m6o8a; [B1]; [B1] # 8.𐹧.ⴣ྄彦 ++B; 8.xn--fo0d.xn--3ed15dt93o; [B1 V6]; [B1 V6] # 8.𐹧.Ⴣ྄彦 ++B; ⒏𐹧。ⴣ\u0F84彦; [B1 P1 V6]; [B1 P1 V6] # ⒏𐹧.ⴣ྄彦 ++B; xn--0sh2466f.xn--3ed972m6o8a; [B1 V6]; [B1 V6] # ⒏𐹧.ⴣ྄彦 ++B; xn--0sh2466f.xn--3ed15dt93o; [B1 V6]; [B1 V6] # ⒏𐹧.Ⴣ྄彦 ++B; -问񬰔⒛。\u0604-񜗉橬; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -问⒛.-橬 ++B; -问񬰔20.。\u0604-񜗉橬; [B1 P1 V3 V6 A4_2]; [B1 P1 V3 V6 A4_2] # -问20..-橬 ++B; xn---20-658jx1776d..xn----ykc7228efm46d; [B1 V3 V6 A4_2]; [B1 V3 V6 A4_2] # -问20..-橬 ++B; xn----hdpu849bhis3e.xn----ykc7228efm46d; [B1 V3 V6]; [B1 V3 V6] # -问⒛.-橬 ++T; \u1BACႬ\u200C\u0325。𝟸; [C1 P1 V5 V6]; [P1 V5 V6] # ᮬႬ̥.2 ++N; \u1BACႬ\u200C\u0325。𝟸; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ᮬႬ̥.2 ++T; \u1BACႬ\u200C\u0325。2; [C1 P1 V5 V6]; [P1 V5 V6] # ᮬႬ̥.2 ++N; \u1BACႬ\u200C\u0325。2; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ᮬႬ̥.2 ++T; \u1BACⴌ\u200C\u0325。2; [C1 V5]; [V5] # ᮬⴌ̥.2 ++N; \u1BACⴌ\u200C\u0325。2; [C1 V5]; [C1 V5] # ᮬⴌ̥.2 ++B; xn--mta176jjjm.2; [V5]; [V5] # ᮬⴌ̥.2 ++B; xn--mta176j97cl2q.2; [C1 V5]; [C1 V5] # ᮬⴌ̥.2 ++B; xn--mta930emri.2; [V5 V6]; [V5 V6] # ᮬႬ̥.2 ++B; xn--mta930emribme.2; [C1 V5 V6]; [C1 V5 V6] # ᮬႬ̥.2 ++T; \u1BACⴌ\u200C\u0325。𝟸; [C1 V5]; [V5] # ᮬⴌ̥.2 ++N; \u1BACⴌ\u200C\u0325。𝟸; [C1 V5]; [C1 V5] # ᮬⴌ̥.2 ++B; \uDC5F。\uA806\u0669󠒩; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # .꠆٩ ++B; \uDC5F.xn--iib9583fusy0i; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # .꠆٩ ++B; \uDC5F.XN--IIB9583FUSY0I; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # .꠆٩ ++B; \uDC5F.Xn--Iib9583fusy0i; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # .꠆٩ ++B; 󠄁\u035F⾶。₇︒눇≮; [P1 V5 V6]; [P1 V5 V6] # ͟飛.7︒눇≮ ++B; 󠄁\u035F⾶。₇︒눇<\u0338; [P1 V5 V6]; [P1 V5 V6] # ͟飛.7︒눇≮ ++B; 󠄁\u035F飛。7。눇≮; [P1 V5 V6]; [P1 V5 V6] # ͟飛.7.눇≮ ++B; 󠄁\u035F飛。7。눇<\u0338; [P1 V5 V6]; [P1 V5 V6] # ͟飛.7.눇≮ ++B; xn--9ua0567e.7.xn--gdh6767c; [V5 V6]; [V5 V6] # ͟飛.7.눇≮ ++B; xn--9ua0567e.xn--7-ngou006d1ttc; [V5 V6]; [V5 V6] # ͟飛.7︒눇≮ ++T; \u200C\uFE09𐹴\u200D.\u200C⿃; [B1 C1 C2]; [B1] # 𐹴.鳥 ++N; \u200C\uFE09𐹴\u200D.\u200C⿃; [B1 C1 C2]; [B1 C1 C2] # 𐹴.鳥 ++T; \u200C\uFE09𐹴\u200D.\u200C鳥; [B1 C1 C2]; [B1] # 𐹴.鳥 ++N; \u200C\uFE09𐹴\u200D.\u200C鳥; [B1 C1 C2]; [B1 C1 C2] # 𐹴.鳥 ++B; xn--so0d.xn--6x6a; [B1]; [B1] ++B; xn--0ugc6024p.xn--0ug1920c; [B1 C1 C2]; [B1 C1 C2] # 𐹴.鳥 ++T; 🍮.\u200D󠗒𐦁𝨝; [B1 C2 P1 V6]; [B1 P1 V6] # 🍮.𐦁𝨝 ++N; 🍮.\u200D󠗒𐦁𝨝; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 🍮.𐦁𝨝 ++T; 🍮.\u200D󠗒𐦁𝨝; [B1 C2 P1 V6]; [B1 P1 V6] # 🍮.𐦁𝨝 ++N; 🍮.\u200D󠗒𐦁𝨝; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 🍮.𐦁𝨝 ++B; xn--lj8h.xn--ln9ci476aqmr2g; [B1 V6]; [B1 V6] ++B; xn--lj8h.xn--1ug6603gr1pfwq37h; [B1 C2 V6]; [B1 C2 V6] # 🍮.𐦁𝨝 ++T; \u067D\u0943.𞤓\u200D; [B3 C2]; xn--2ib43l.xn--te6h # ٽृ.𞤵 ++N; \u067D\u0943.𞤓\u200D; [B3 C2]; [B3 C2] # ٽृ.𞤵 ++T; \u067D\u0943.𞤵\u200D; [B3 C2]; xn--2ib43l.xn--te6h # ٽृ.𞤵 ++N; \u067D\u0943.𞤵\u200D; [B3 C2]; [B3 C2] # ٽृ.𞤵 ++B; xn--2ib43l.xn--te6h; \u067D\u0943.𞤵; xn--2ib43l.xn--te6h # ٽृ.𞤵 ++B; \u067D\u0943.𞤵; ; xn--2ib43l.xn--te6h # ٽृ.𞤵 ++B; \u067D\u0943.𞤓; \u067D\u0943.𞤵; xn--2ib43l.xn--te6h # ٽृ.𞤵 ++B; xn--2ib43l.xn--1ugy711p; [B3 C2]; [B3 C2] # ٽृ.𞤵 ++B; \u0664\u0A4D-.󥜽\u1039񦦐; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ٤੍-.္ ++B; \u0664\u0A4D-.󥜽\u1039񦦐; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ٤੍-.္ ++B; xn----gqc711a.xn--9jd88234f3qm0b; [B1 V3 V6]; [B1 V3 V6] # ٤੍-.္ ++T; 4\u103A-𐹸。\uAA29\u200C𐹴≮; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ ++N; 4\u103A-𐹸。\uAA29\u200C𐹴≮; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ ++T; 4\u103A-𐹸。\uAA29\u200C𐹴<\u0338; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ ++N; 4\u103A-𐹸。\uAA29\u200C𐹴<\u0338; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ ++T; 4\u103A-𐹸。\uAA29\u200C𐹴≮; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ ++N; 4\u103A-𐹸。\uAA29\u200C𐹴≮; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ ++T; 4\u103A-𐹸。\uAA29\u200C𐹴<\u0338; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ ++N; 4\u103A-𐹸。\uAA29\u200C𐹴<\u0338; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ ++B; xn--4--e4j7831r.xn--gdh8754cz40c; [B1 V5 V6]; [B1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ ++B; xn--4--e4j7831r.xn--0ugy6gjy5sl3ud; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 4်-𐹸.ꨩ𐹴≮ ++T; \u200C。\uFFA0\u0F84\u0F96; [C1 P1 V6]; [P1 V6 A4_2] # .྄ྖ ++N; \u200C。\uFFA0\u0F84\u0F96; [C1 P1 V6]; [C1 P1 V6] # .྄ྖ ++T; \u200C。\u1160\u0F84\u0F96; [C1 P1 V6]; [P1 V6 A4_2] # .྄ྖ ++N; \u200C。\u1160\u0F84\u0F96; [C1 P1 V6]; [C1 P1 V6] # .྄ྖ ++B; .xn--3ed0b20h; [V6 A4_2]; [V6 A4_2] # .྄ྖ ++B; xn--0ug.xn--3ed0b20h; [C1 V6]; [C1 V6] # .྄ྖ ++B; .xn--3ed0by082k; [V6 A4_2]; [V6 A4_2] # .྄ྖ ++B; xn--0ug.xn--3ed0by082k; [C1 V6]; [C1 V6] # .྄ྖ ++T; ≯򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [P1 V6] # ≯.𐅼 ++N; ≯򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [C2 P1 V6] # ≯.𐅼 ++T; >\u0338򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [P1 V6] # ≯.𐅼 ++N; >\u0338򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [C2 P1 V6] # ≯.𐅼 ++T; ≯򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [P1 V6] # ≯.𐅼 ++N; ≯򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [C2 P1 V6] # ≯.𐅼 ++T; >\u0338򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [P1 V6] # ≯.𐅼 ++N; >\u0338򍘅.\u200D𐅼򲇛; [C2 P1 V6]; [C2 P1 V6] # ≯.𐅼 ++B; xn--hdh84488f.xn--xy7cw2886b; [V6]; [V6] ++B; xn--hdh84488f.xn--1ug8099fbjp4e; [C2 V6]; [C2 V6] # ≯.𐅼 ++T; \u0641ß𐰯。𝟕𐫫; [B1 B2]; [B1 B2] # فß𐰯.7𐫫 ++N; \u0641ß𐰯。𝟕𐫫; [B1 B2]; [B1 B2] # فß𐰯.7𐫫 ++T; \u0641ß𐰯。7𐫫; [B1 B2]; [B1 B2] # فß𐰯.7𐫫 ++N; \u0641ß𐰯。7𐫫; [B1 B2]; [B1 B2] # فß𐰯.7𐫫 ++B; \u0641SS𐰯。7𐫫; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 ++B; \u0641ss𐰯。7𐫫; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 ++B; \u0641Ss𐰯。7𐫫; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 ++B; xn--ss-jvd2339x.xn--7-mm5i; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 ++B; xn--zca96ys96y.xn--7-mm5i; [B1 B2]; [B1 B2] # فß𐰯.7𐫫 ++B; \u0641SS𐰯。𝟕𐫫; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 ++B; \u0641ss𐰯。𝟕𐫫; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 ++B; \u0641Ss𐰯。𝟕𐫫; [B1 B2]; [B1 B2] # فss𐰯.7𐫫 ++T; ß\u07AC\u07A7\u08B1。𐭁􅮙𐹲; [B2 B5 B6 P1 V6]; [B2 B5 B6 P1 V6] # ßެާࢱ.𐭁𐹲 ++N; ß\u07AC\u07A7\u08B1。𐭁􅮙𐹲; [B2 B5 B6 P1 V6]; [B2 B5 B6 P1 V6] # ßެާࢱ.𐭁𐹲 ++B; SS\u07AC\u07A7\u08B1。𐭁􅮙𐹲; [B2 B5 B6 P1 V6]; [B2 B5 B6 P1 V6] # ssެާࢱ.𐭁𐹲 ++B; ss\u07AC\u07A7\u08B1。𐭁􅮙𐹲; [B2 B5 B6 P1 V6]; [B2 B5 B6 P1 V6] # ssެާࢱ.𐭁𐹲 ++B; Ss\u07AC\u07A7\u08B1。𐭁􅮙𐹲; [B2 B5 B6 P1 V6]; [B2 B5 B6 P1 V6] # ssެާࢱ.𐭁𐹲 ++B; xn--ss-9qet02k.xn--e09co8cr9861c; [B2 B5 B6 V6]; [B2 B5 B6 V6] # ssެާࢱ.𐭁𐹲 ++B; xn--zca685aoa95h.xn--e09co8cr9861c; [B2 B5 B6 V6]; [B2 B5 B6 V6] # ßެާࢱ.𐭁𐹲 ++B; -。󠉗⒌𞯛; [B1 P1 V3 V6]; [B1 P1 V3 V6] ++B; -。󠉗5.𞯛; [B1 P1 V3 V6]; [B1 P1 V3 V6] ++B; -.xn--5-zz21m.xn--6x6h; [B1 V3 V6]; [B1 V3 V6] ++B; -.xn--xsh6367n1bi3e; [B1 V3 V6]; [B1 V3 V6] ++T; 𼎏ς.-≮\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج ++N; 𼎏ς.-≮\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج ++T; 𼎏ς.-<\u0338\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج ++N; 𼎏ς.-<\u0338\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج ++T; 𼎏ς.-≮\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج ++N; 𼎏ς.-≮\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج ++T; 𼎏ς.-<\u0338\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج ++N; 𼎏ς.-<\u0338\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ς.-≮خج ++B; 𼎏Σ.-<\u0338\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج ++B; 𼎏Σ.-≮\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج ++B; 𼎏σ.-≮\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج ++B; 𼎏σ.-<\u0338\u062E\u062C; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج ++B; xn--4xa92520c.xn----9mcf1400a; [B1 V3 V6]; [B1 V3 V6] # σ.-≮خج ++B; xn--3xa13520c.xn----9mcf1400a; [B1 V3 V6]; [B1 V3 V6] # ς.-≮خج ++B; 𼎏Σ.-<\u0338\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج ++B; 𼎏Σ.-≮\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج ++B; 𼎏σ.-≮\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج ++B; 𼎏σ.-<\u0338\uFCAB; [B1 P1 V3 V6]; [B1 P1 V3 V6] # σ.-≮خج ++B; ꡗ\u08B8\u0719.񔤔󠛙\u0C4D\uFC3E; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ꡗࢸܙ.్كي ++B; ꡗ\u08B8\u0719.񔤔󠛙\u0C4D\u0643\u064A; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ꡗࢸܙ.్كي ++B; xn--jnb34fs003a.xn--fhbo927bk128mpi24d; [B5 B6 V6]; [B5 B6 V6] # ꡗࢸܙ.్كي ++B; 𐠰\u08B7𞤌𐫭。𐋦\u17CD𝩃; [B1]; [B1] # 𐠰ࢷ𞤮𐫭.𐋦៍𝩃 ++B; 𐠰\u08B7𞤮𐫭。𐋦\u17CD𝩃; [B1]; [B1] # 𐠰ࢷ𞤮𐫭.𐋦៍𝩃 ++B; xn--dzb5191kezbrw47a.xn--p4e3841jz9tf; [B1]; [B1] # 𐠰ࢷ𞤮𐫭.𐋦៍𝩃 ++B; 𐠰\u08B7𞤮𐫭.𐋦\u17CD𝩃; [B1]; [B1] # 𐠰ࢷ𞤮𐫭.𐋦៍𝩃 ++B; 𐠰\u08B7𞤌𐫭.𐋦\u17CD𝩃; [B1]; [B1] # 𐠰ࢷ𞤮𐫭.𐋦៍𝩃 ++T; ₂㘷--。\u06D3\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 V2 V3] # 2㘷--.ۓ𐫆𑖿 ++N; ₂㘷--。\u06D3\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 C1 V2 V3] # 2㘷--.ۓ𐫆𑖿 ++T; ₂㘷--。\u06D2\u0654\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 V2 V3] # 2㘷--.ۓ𐫆𑖿 ++N; ₂㘷--。\u06D2\u0654\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 C1 V2 V3] # 2㘷--.ۓ𐫆𑖿 ++T; 2㘷--。\u06D3\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 V2 V3] # 2㘷--.ۓ𐫆𑖿 ++N; 2㘷--。\u06D3\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 C1 V2 V3] # 2㘷--.ۓ𐫆𑖿 ++T; 2㘷--。\u06D2\u0654\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 V2 V3] # 2㘷--.ۓ𐫆𑖿 ++N; 2㘷--。\u06D2\u0654\u200C𐫆𑖿; [B1 C1 V2 V3]; [B1 C1 V2 V3] # 2㘷--.ۓ𐫆𑖿 ++B; xn--2---u58b.xn--jlb8024k14g; [B1 V2 V3]; [B1 V2 V3] # 2㘷--.ۓ𐫆𑖿 ++B; xn--2---u58b.xn--jlb820ku99nbgj; [B1 C1 V2 V3]; [B1 C1 V2 V3] # 2㘷--.ۓ𐫆𑖿 ++B; -𘊻.ᡮ\u062D-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # -𘊻.ᡮح- ++B; -𘊻.ᡮ\u062D-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # -𘊻.ᡮح- ++B; xn----bp5n.xn----bnc231l; [B1 B5 B6 V3]; [B1 B5 B6 V3] # -𘊻.ᡮح- ++T; \u200C-ß。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ß.ᢣ𐹭ؿ ++N; \u200C-ß。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ß.ᢣ𐹭ؿ ++T; \u200C-ß。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ß.ᢣ𐹭ؿ ++N; \u200C-ß。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ß.ᢣ𐹭ؿ ++T; \u200C-SS。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ ++N; \u200C-SS。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ ++T; \u200C-ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ ++N; \u200C-ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ ++T; \u200C-Ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ ++N; \u200C-Ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ ++B; -ss.xn--bhb925glx3p; [B1 B5 B6 V3]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ ++B; xn---ss-8m0a.xn--bhb925glx3p; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ ++B; xn----qfa550v.xn--bhb925glx3p; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ß.ᢣ𐹭ؿ ++T; \u200C-SS。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ ++N; \u200C-SS。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ ++T; \u200C-ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ ++N; \u200C-ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ ++T; \u200C-Ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 V3] # -ss.ᢣ𐹭ؿ ++N; \u200C-Ss。ᢣ𐹭\u063F; [B1 B5 B6 C1]; [B1 B5 B6 C1] # -ss.ᢣ𐹭ؿ ++B; ꧐Ӏ\u1BAA\u08F6.눵; [P1 V6]; [P1 V6] # ꧐Ӏ᮪ࣶ.눵 ++B; ꧐Ӏ\u1BAA\u08F6.눵; [P1 V6]; [P1 V6] # ꧐Ӏ᮪ࣶ.눵 ++B; ꧐Ӏ\u1BAA\u08F6.눵; [P1 V6]; [P1 V6] # ꧐Ӏ᮪ࣶ.눵 ++B; ꧐Ӏ\u1BAA\u08F6.눵; [P1 V6]; [P1 V6] # ꧐Ӏ᮪ࣶ.눵 ++B; ꧐ӏ\u1BAA\u08F6.눵; ꧐ӏ\u1BAA\u08F6.눵; xn--s5a04sn4u297k.xn--2e1b # ꧐ӏ᮪ࣶ.눵 ++B; ꧐ӏ\u1BAA\u08F6.눵; ; xn--s5a04sn4u297k.xn--2e1b # ꧐ӏ᮪ࣶ.눵 ++B; xn--s5a04sn4u297k.xn--2e1b; ꧐ӏ\u1BAA\u08F6.눵; xn--s5a04sn4u297k.xn--2e1b # ꧐ӏ᮪ࣶ.눵 ++B; xn--d5a07sn4u297k.xn--2e1b; [V6]; [V6] # ꧐Ӏ᮪ࣶ.눵 ++B; ꧐ӏ\u1BAA\u08F6.눵; ꧐ӏ\u1BAA\u08F6.눵; xn--s5a04sn4u297k.xn--2e1b # ꧐ӏ᮪ࣶ.눵 ++B; ꧐ӏ\u1BAA\u08F6.눵; ꧐ӏ\u1BAA\u08F6.눵; xn--s5a04sn4u297k.xn--2e1b # ꧐ӏ᮪ࣶ.눵 ++B; \uA8EA。𖄿𑆾󠇗; [P1 V5 V6]; [P1 V5 V6] # ꣪.𑆾 ++B; \uA8EA。𖄿𑆾󠇗; [P1 V5 V6]; [P1 V5 V6] # ꣪.𑆾 ++B; xn--3g9a.xn--ud1dz07k; [V5 V6]; [V5 V6] # ꣪.𑆾 ++B; 󇓓𑚳。񐷿≯⾇; [P1 V6]; [P1 V6] ++B; 󇓓𑚳。񐷿>\u0338⾇; [P1 V6]; [P1 V6] ++B; 󇓓𑚳。񐷿≯舛; [P1 V6]; [P1 V6] ++B; 󇓓𑚳。񐷿>\u0338舛; [P1 V6]; [P1 V6] ++B; xn--3e2d79770c.xn--hdh0088abyy1c; [V6]; [V6] ++T; 𐫇\u0661\u200C.\u200D\u200C; [B1 B3 C1 C2]; xn--9hb7344k. # 𐫇١. ++N; 𐫇\u0661\u200C.\u200D\u200C; [B1 B3 C1 C2]; [B1 B3 C1 C2] # 𐫇١. ++T; 𐫇\u0661\u200C.\u200D\u200C; [B1 B3 C1 C2]; xn--9hb7344k. # 𐫇١. ++N; 𐫇\u0661\u200C.\u200D\u200C; [B1 B3 C1 C2]; [B1 B3 C1 C2] # 𐫇١. ++B; xn--9hb7344k.; 𐫇\u0661.; xn--9hb7344k. # 𐫇١. ++B; 𐫇\u0661.; ; xn--9hb7344k. # 𐫇١. ++B; xn--9hb652kv99n.xn--0ugb; [B1 B3 C1 C2]; [B1 B3 C1 C2] # 𐫇١. ++T; 񡅈砪≯ᢑ。≯𝩚򓴔\u200C; [C1 P1 V6]; [P1 V6] # 砪≯ᢑ.≯𝩚 ++N; 񡅈砪≯ᢑ。≯𝩚򓴔\u200C; [C1 P1 V6]; [C1 P1 V6] # 砪≯ᢑ.≯𝩚 ++T; 񡅈砪>\u0338ᢑ。>\u0338𝩚򓴔\u200C; [C1 P1 V6]; [P1 V6] # 砪≯ᢑ.≯𝩚 ++N; 񡅈砪>\u0338ᢑ。>\u0338𝩚򓴔\u200C; [C1 P1 V6]; [C1 P1 V6] # 砪≯ᢑ.≯𝩚 ++T; 񡅈砪≯ᢑ。≯𝩚򓴔\u200C; [C1 P1 V6]; [P1 V6] # 砪≯ᢑ.≯𝩚 ++N; 񡅈砪≯ᢑ。≯𝩚򓴔\u200C; [C1 P1 V6]; [C1 P1 V6] # 砪≯ᢑ.≯𝩚 ++T; 񡅈砪>\u0338ᢑ。>\u0338𝩚򓴔\u200C; [C1 P1 V6]; [P1 V6] # 砪≯ᢑ.≯𝩚 ++N; 񡅈砪>\u0338ᢑ。>\u0338𝩚򓴔\u200C; [C1 P1 V6]; [C1 P1 V6] # 砪≯ᢑ.≯𝩚 ++B; xn--bbf561cf95e57y3e.xn--hdh0834o7mj6b; [V6]; [V6] ++B; xn--bbf561cf95e57y3e.xn--0ugz6gc910ejro8c; [C1 V6]; [C1 V6] # 砪≯ᢑ.≯𝩚 ++B; Ⴥ.𑄳㊸; [P1 V5 V6]; [P1 V5 V6] ++B; Ⴥ.𑄳43; [P1 V5 V6]; [P1 V5 V6] ++B; ⴥ.𑄳43; [V5]; [V5] ++B; xn--tlj.xn--43-274o; [V5]; [V5] ++B; xn--9nd.xn--43-274o; [V5 V6]; [V5 V6] ++B; ⴥ.𑄳㊸; [V5]; [V5] ++B; 𝟎\u0663。Ⴒᡇ\u08F2𐹠; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 0٣.Ⴒᡇࣲ𐹠 ++B; 0\u0663。Ⴒᡇ\u08F2𐹠; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 0٣.Ⴒᡇࣲ𐹠 ++B; 0\u0663。ⴒᡇ\u08F2𐹠; [B1 B5 B6]; [B1 B5 B6] # 0٣.ⴒᡇࣲ𐹠 ++B; xn--0-fqc.xn--10b369eivp359r; [B1 B5 B6]; [B1 B5 B6] # 0٣.ⴒᡇࣲ𐹠 ++B; xn--0-fqc.xn--10b180bnwgfy0z; [B1 B5 B6 V6]; [B1 B5 B6 V6] # 0٣.Ⴒᡇࣲ𐹠 ++B; 𝟎\u0663。ⴒᡇ\u08F2𐹠; [B1 B5 B6]; [B1 B5 B6] # 0٣.ⴒᡇࣲ𐹠 ++B; 񗪨󠄉\uFFA0\u0FB7.񸞰\uA953; [P1 V6]; [P1 V6] # ྷ.꥓ ++B; 񗪨󠄉\u1160\u0FB7.񸞰\uA953; [P1 V6]; [P1 V6] # ྷ.꥓ ++B; xn--kgd36f9z57y.xn--3j9au7544a; [V6]; [V6] # ྷ.꥓ ++B; xn--kgd7493jee34a.xn--3j9au7544a; [V6]; [V6] # ྷ.꥓ ++T; \u0618.۳\u200C\uA953; [C1 V5]; [V5] # ؘ.۳꥓ ++N; \u0618.۳\u200C\uA953; [C1 V5]; [C1 V5] # ؘ.۳꥓ ++B; xn--6fb.xn--gmb0524f; [V5]; [V5] # ؘ.۳꥓ ++B; xn--6fb.xn--gmb469jjf1h; [C1 V5]; [C1 V5] # ؘ.۳꥓ ++B; ᡌ.︒ᢑ; [P1 V6]; [P1 V6] ++B; ᡌ.。ᢑ; [A4_2]; [A4_2] ++B; xn--c8e..xn--bbf; [A4_2]; [A4_2] ++B; xn--c8e.xn--bbf9168i; [V6]; [V6] ++B; 𑋪\u1073。𞽧; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑋪ၳ. ++B; 𑋪\u1073。𞽧; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑋪ၳ. ++B; xn--xld7443k.xn--4o7h; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # 𑋪ၳ. ++B; 𞷏。ᠢ򓘆; [P1 V6]; [P1 V6] ++B; xn--hd7h.xn--46e66060j; [V6]; [V6] ++T; 𑄳㴼.\u200C𐹡\u20EB񫺦; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𑄳㴼.𐹡⃫ ++N; 𑄳㴼.\u200C𐹡\u20EB񫺦; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𑄳㴼.𐹡⃫ ++T; 𑄳㴼.\u200C𐹡\u20EB񫺦; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𑄳㴼.𐹡⃫ ++N; 𑄳㴼.\u200C𐹡\u20EB񫺦; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𑄳㴼.𐹡⃫ ++B; xn--iym9428c.xn--e1g3464g08p3b; [B1 V5 V6]; [B1 V5 V6] # 𑄳㴼.𐹡⃫ ++B; xn--iym9428c.xn--0ug46a7218cllv0c; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 𑄳㴼.𐹡⃫ ++B; 񠻟𐹳𑈯。\u031D; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # 𐹳𑈯.̝ ++B; 񠻟𐹳𑈯。\u031D; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # 𐹳𑈯.̝ ++B; xn--ro0dw7dey96m.xn--eta; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # 𐹳𑈯.̝ ++B; ᢊ뾜󠱴𑚶。\u089D𐹥; [P1 V6]; [P1 V6] # ᢊ뾜𑚶.𐹥 ++B; ᢊ뾜󠱴𑚶。\u089D𐹥; [P1 V6]; [P1 V6] # ᢊ뾜𑚶.𐹥 ++B; xn--39e4566fjv8bwmt6n.xn--myb6415k; [V6]; [V6] # ᢊ뾜𑚶.𐹥 ++T; 𐹥≠。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹥≠.𐋲 ++N; 𐹥≠。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹥≠.𐋲 ++T; 𐹥=\u0338。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹥≠.𐋲 ++N; 𐹥=\u0338。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹥≠.𐋲 ++T; 𐹥≠。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹥≠.𐋲 ++N; 𐹥≠。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹥≠.𐋲 ++T; 𐹥=\u0338。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹥≠.𐋲 ++N; 𐹥=\u0338。𐋲󠧠\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹥≠.𐋲 ++B; xn--1ch6704g.xn--m97cw2999c; [B1 V6]; [B1 V6] ++B; xn--1ch6704g.xn--0ug3840g51u4g; [B1 C1 V6]; [B1 C1 V6] # 𐹥≠.𐋲 ++T; \u115F񙯠\u094D.\u200D\uA953𐪤; [B1 C2 P1 V6]; [B5 B6 P1 V5 V6] # ्.꥓ ++N; \u115F񙯠\u094D.\u200D\uA953𐪤; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ्.꥓ ++T; \u115F񙯠\u094D.\u200D\uA953𐪤; [B1 C2 P1 V6]; [B5 B6 P1 V5 V6] # ्.꥓ ++N; \u115F񙯠\u094D.\u200D\uA953𐪤; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ्.꥓ ++B; xn--n3b542bb085j.xn--3j9al95p; [B5 B6 V5 V6]; [B5 B6 V5 V6] # ्.꥓ ++B; xn--n3b542bb085j.xn--1ug6815co9wc; [B1 C2 V6]; [B1 C2 V6] # ्.꥓ ++B; 򌋔󠆎󠆗𑲕。≮; [P1 V6]; [P1 V6] ++B; 򌋔󠆎󠆗𑲕。<\u0338; [P1 V6]; [P1 V6] ++B; xn--4m3dv4354a.xn--gdh; [V6]; [V6] ++B; 󠆦.\u08E3暀≠; [P1 V5 V6 A4_2]; [P1 V5 V6 A4_2] # .ࣣ暀≠ ++B; 󠆦.\u08E3暀=\u0338; [P1 V5 V6 A4_2]; [P1 V5 V6 A4_2] # .ࣣ暀≠ ++B; .xn--m0b461k3g2c; [V5 V6 A4_2]; [V5 V6 A4_2] # .ࣣ暀≠ ++B; 𐡤\uABED。\uFD30򜖅\u1DF0; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐡤꯭.شمᷰ ++B; 𐡤\uABED。\u0634\u0645򜖅\u1DF0; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐡤꯭.شمᷰ ++B; xn--429ak76o.xn--zgb8a701kox37t; [B2 B3 V6]; [B2 B3 V6] # 𐡤꯭.شمᷰ ++T; 𝉃\u200D⒈。Ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𝉃⒈.Ⴌ ++N; 𝉃\u200D⒈。Ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # 𝉃⒈.Ⴌ ++T; 𝉃\u200D1.。Ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6 A4_2]; [B1 B5 B6 P1 V5 V6 A4_2] # 𝉃1..Ⴌ ++N; 𝉃\u200D1.。Ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6 A4_2]; [B1 B5 B6 C2 P1 V5 V6 A4_2] # 𝉃1..Ⴌ ++T; 𝉃\u200D1.。ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6 A4_2]; [B1 B5 B6 P1 V5 V6 A4_2] # 𝉃1..ⴌ ++N; 𝉃\u200D1.。ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6 A4_2]; [B1 B5 B6 C2 P1 V5 V6 A4_2] # 𝉃1..ⴌ ++B; xn--1-px8q..xn--3kj4524l; [B1 B5 B6 V5 V6 A4_2]; [B1 B5 B6 V5 V6 A4_2] ++B; xn--1-tgn9827q..xn--3kj4524l; [B1 B5 B6 C2 V5 V6 A4_2]; [B1 B5 B6 C2 V5 V6 A4_2] # 𝉃1..ⴌ ++B; xn--1-px8q..xn--knd8464v; [B1 B5 B6 V5 V6 A4_2]; [B1 B5 B6 V5 V6 A4_2] ++B; xn--1-tgn9827q..xn--knd8464v; [B1 B5 B6 C2 V5 V6 A4_2]; [B1 B5 B6 C2 V5 V6 A4_2] # 𝉃1..Ⴌ ++T; 𝉃\u200D⒈。ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𝉃⒈.ⴌ ++N; 𝉃\u200D⒈。ⴌ𞱓; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # 𝉃⒈.ⴌ ++B; xn--tshz828m.xn--3kj4524l; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] ++B; xn--1ug68oq348b.xn--3kj4524l; [B1 B5 B6 C2 V5 V6]; [B1 B5 B6 C2 V5 V6] # 𝉃⒈.ⴌ ++B; xn--tshz828m.xn--knd8464v; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] ++B; xn--1ug68oq348b.xn--knd8464v; [B1 B5 B6 C2 V5 V6]; [B1 B5 B6 C2 V5 V6] # 𝉃⒈.Ⴌ ++T; 󠣙\u0A4D𱫘𞤸.ς񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.ς ++N; 󠣙\u0A4D𱫘𞤸.ς񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.ς ++B; 󠣙\u0A4D𱫘𞤖.Σ񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.σ ++B; 󠣙\u0A4D𱫘𞤸.σ񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.σ ++B; 󠣙\u0A4D𱫘𞤖.σ񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.σ ++B; xn--ybc0236vjvxgt5q0g.xn--4xa82737giye6b; [B1 V6]; [B1 V6] # ੍𞤸.σ ++T; 󠣙\u0A4D𱫘𞤖.ς񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.ς ++N; 󠣙\u0A4D𱫘𞤖.ς񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.ς ++B; xn--ybc0236vjvxgt5q0g.xn--3xa03737giye6b; [B1 V6]; [B1 V6] # ੍𞤸.ς ++B; 󠣙\u0A4D𱫘𞤸.Σ񵯞􈰔; [B1 P1 V6]; [B1 P1 V6] # ੍𞤸.σ ++T; \u07D3。\u200C𐫀򞭱; [B1 C1 P1 V6]; [B2 B3 P1 V6] # ߓ.𐫀 ++N; \u07D3。\u200C𐫀򞭱; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ߓ.𐫀 ++B; xn--usb.xn--pw9ci1099a; [B2 B3 V6]; [B2 B3 V6] # ߓ.𐫀 ++B; xn--usb.xn--0ug9553gm3v5d; [B1 C1 V6]; [B1 C1 V6] # ߓ.𐫀 ++B; \u1C2E𞀝.\u05A6ꡟ𞤕󠆖; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ᰮ𞀝.֦ꡟ𞤷 ++B; \u1C2E𞀝.\u05A6ꡟ𞤷󠆖; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ᰮ𞀝.֦ꡟ𞤷 ++B; xn--q1f4493q.xn--xcb8244fifvj; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ᰮ𞀝.֦ꡟ𞤷 ++T; 䂹󾖅𐋦.\u200D; [C2 P1 V6]; [P1 V6] # 䂹𐋦. ++N; 䂹󾖅𐋦.\u200D; [C2 P1 V6]; [C2 P1 V6] # 䂹𐋦. ++T; 䂹󾖅𐋦.\u200D; [C2 P1 V6]; [P1 V6] # 䂹𐋦. ++N; 䂹󾖅𐋦.\u200D; [C2 P1 V6]; [C2 P1 V6] # 䂹𐋦. ++B; xn--0on3543c5981i.; [V6]; [V6] ++B; xn--0on3543c5981i.xn--1ug; [C2 V6]; [C2 V6] # 䂹𐋦. ++T; \uA9C0\u200C𐹲\u200C。\u0767🄉; [B5 B6 C1 P1 V5 V6]; [B5 B6 P1 V5 V6] # ꧀𐹲.ݧ🄉 ++N; \uA9C0\u200C𐹲\u200C。\u0767🄉; [B5 B6 C1 P1 V5 V6]; [B5 B6 C1 P1 V5 V6] # ꧀𐹲.ݧ🄉 ++T; \uA9C0\u200C𐹲\u200C。\u07678,; [B3 B5 B6 C1 P1 V5 V6]; [B3 B5 B6 P1 V5 V6] # ꧀𐹲.ݧ8, ++N; \uA9C0\u200C𐹲\u200C。\u07678,; [B3 B5 B6 C1 P1 V5 V6]; [B3 B5 B6 C1 P1 V5 V6] # ꧀𐹲.ݧ8, ++B; xn--7m9an32q.xn--8,-qle; [B3 B5 B6 P1 V5 V6]; [B3 B5 B6 P1 V5 V6] # ꧀𐹲.ݧ8, ++B; xn--0uga8686hdgvd.xn--8,-qle; [B3 B5 B6 C1 P1 V5 V6]; [B3 B5 B6 C1 P1 V5 V6] # ꧀𐹲.ݧ8, ++B; xn--7m9an32q.xn--rpb6081w; [B5 B6 V5 V6]; [B5 B6 V5 V6] # ꧀𐹲.ݧ🄉 ++B; xn--0uga8686hdgvd.xn--rpb6081w; [B5 B6 C1 V5 V6]; [B5 B6 C1 V5 V6] # ꧀𐹲.ݧ🄉 ++B; ︒。Ⴃ≯; [P1 V6]; [P1 V6] ++B; ︒。Ⴃ>\u0338; [P1 V6]; [P1 V6] ++B; 。。Ⴃ≯; [P1 V6 A4_2]; [P1 V6 A4_2] ++B; 。。Ⴃ>\u0338; [P1 V6 A4_2]; [P1 V6 A4_2] ++B; 。。ⴃ>\u0338; [P1 V6 A4_2]; [P1 V6 A4_2] ++B; 。。ⴃ≯; [P1 V6 A4_2]; [P1 V6 A4_2] ++B; ..xn--hdh782b; [V6 A4_2]; [V6 A4_2] ++B; ..xn--bnd622g; [V6 A4_2]; [V6 A4_2] ++B; ︒。ⴃ>\u0338; [P1 V6]; [P1 V6] ++B; ︒。ⴃ≯; [P1 V6]; [P1 V6] ++B; xn--y86c.xn--hdh782b; [V6]; [V6] ++B; xn--y86c.xn--bnd622g; [V6]; [V6] ++T; 𐹮。󠢼\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹮. ++N; 𐹮。󠢼\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹮. ++T; 𐹮。󠢼\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹮. ++N; 𐹮。󠢼\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹮. ++B; xn--mo0d.xn--wy46e; [B1 V6]; [B1 V6] ++B; xn--mo0d.xn--1ug18431l; [B1 C2 V6]; [B1 C2 V6] # 𐹮. ++T; Ⴞ𐹨。︒\u077D\u200DႯ; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 P1 V6] # Ⴞ𐹨.︒ݽႯ ++N; Ⴞ𐹨。︒\u077D\u200DႯ; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # Ⴞ𐹨.︒ݽႯ ++T; Ⴞ𐹨。。\u077D\u200DႯ; [B2 B3 B5 B6 C2 P1 V6 A4_2]; [B2 B3 B5 B6 P1 V6 A4_2] # Ⴞ𐹨..ݽႯ ++N; Ⴞ𐹨。。\u077D\u200DႯ; [B2 B3 B5 B6 C2 P1 V6 A4_2]; [B2 B3 B5 B6 C2 P1 V6 A4_2] # Ⴞ𐹨..ݽႯ ++T; ⴞ𐹨。。\u077D\u200Dⴏ; [B2 B3 B5 B6 C2 A4_2]; [B2 B3 B5 B6 A4_2] # ⴞ𐹨..ݽⴏ ++N; ⴞ𐹨。。\u077D\u200Dⴏ; [B2 B3 B5 B6 C2 A4_2]; [B2 B3 B5 B6 C2 A4_2] # ⴞ𐹨..ݽⴏ ++B; xn--mlju223e..xn--eqb053q; [B2 B3 B5 B6 A4_2]; [B2 B3 B5 B6 A4_2] # ⴞ𐹨..ݽⴏ ++B; xn--mlju223e..xn--eqb096jpgj; [B2 B3 B5 B6 C2 A4_2]; [B2 B3 B5 B6 C2 A4_2] # ⴞ𐹨..ݽⴏ ++B; xn--2nd0990k..xn--eqb228b; [B2 B3 B5 B6 V6 A4_2]; [B2 B3 B5 B6 V6 A4_2] # Ⴞ𐹨..ݽႯ ++B; xn--2nd0990k..xn--eqb228bgzm; [B2 B3 B5 B6 C2 V6 A4_2]; [B2 B3 B5 B6 C2 V6 A4_2] # Ⴞ𐹨..ݽႯ ++T; ⴞ𐹨。︒\u077D\u200Dⴏ; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 P1 V6] # ⴞ𐹨.︒ݽⴏ ++N; ⴞ𐹨。︒\u077D\u200Dⴏ; [B1 B5 B6 C2 P1 V6]; [B1 B5 B6 C2 P1 V6] # ⴞ𐹨.︒ݽⴏ ++B; xn--mlju223e.xn--eqb053qjk7l; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ⴞ𐹨.︒ݽⴏ ++B; xn--mlju223e.xn--eqb096jpgj9y7r; [B1 B5 B6 C2 V6]; [B1 B5 B6 C2 V6] # ⴞ𐹨.︒ݽⴏ ++B; xn--2nd0990k.xn--eqb228b583r; [B1 B5 B6 V6]; [B1 B5 B6 V6] # Ⴞ𐹨.︒ݽႯ ++B; xn--2nd0990k.xn--eqb228bgzmvp0t; [B1 B5 B6 C2 V6]; [B1 B5 B6 C2 V6] # Ⴞ𐹨.︒ݽႯ ++T; \u200CႦ𝟹。-\u20D2-\u07D1; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # Ⴆ3.-⃒-ߑ ++N; \u200CႦ𝟹。-\u20D2-\u07D1; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # Ⴆ3.-⃒-ߑ ++T; \u200CႦ3。-\u20D2-\u07D1; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # Ⴆ3.-⃒-ߑ ++N; \u200CႦ3。-\u20D2-\u07D1; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # Ⴆ3.-⃒-ߑ ++T; \u200Cⴆ3。-\u20D2-\u07D1; [B1 C1 V3]; [B1 V3] # ⴆ3.-⃒-ߑ ++N; \u200Cⴆ3。-\u20D2-\u07D1; [B1 C1 V3]; [B1 C1 V3] # ⴆ3.-⃒-ߑ ++B; xn--3-lvs.xn-----vue617w; [B1 V3]; [B1 V3] # ⴆ3.-⃒-ߑ ++B; xn--3-rgnv99c.xn-----vue617w; [B1 C1 V3]; [B1 C1 V3] # ⴆ3.-⃒-ߑ ++B; xn--3-i0g.xn-----vue617w; [B1 V3 V6]; [B1 V3 V6] # Ⴆ3.-⃒-ߑ ++B; xn--3-i0g939i.xn-----vue617w; [B1 C1 V3 V6]; [B1 C1 V3 V6] # Ⴆ3.-⃒-ߑ ++T; \u200Cⴆ𝟹。-\u20D2-\u07D1; [B1 C1 V3]; [B1 V3] # ⴆ3.-⃒-ߑ ++N; \u200Cⴆ𝟹。-\u20D2-\u07D1; [B1 C1 V3]; [B1 C1 V3] # ⴆ3.-⃒-ߑ ++B; 箃Ⴡ-󠁝。≠-🤖; [P1 V6]; [P1 V6] ++B; 箃Ⴡ-󠁝。=\u0338-🤖; [P1 V6]; [P1 V6] ++B; 箃Ⴡ-󠁝。≠-🤖; [P1 V6]; [P1 V6] ++B; 箃Ⴡ-󠁝。=\u0338-🤖; [P1 V6]; [P1 V6] ++B; 箃ⴡ-󠁝。=\u0338-🤖; [P1 V6]; [P1 V6] ++B; 箃ⴡ-󠁝。≠-🤖; [P1 V6]; [P1 V6] ++B; xn----4wsr321ay823p.xn----tfot873s; [V6]; [V6] ++B; xn----11g3013fy8x5m.xn----tfot873s; [V6]; [V6] ++B; 箃ⴡ-󠁝。=\u0338-🤖; [P1 V6]; [P1 V6] ++B; 箃ⴡ-󠁝。≠-🤖; [P1 V6]; [P1 V6] ++B; \u07E5.\u06B5; ; xn--dtb.xn--okb # ߥ.ڵ ++B; xn--dtb.xn--okb; \u07E5.\u06B5; xn--dtb.xn--okb # ߥ.ڵ ++T; \u200C\u200D.𞤿; [B1 C1 C2]; [A4_2] # .𞤿 ++N; \u200C\u200D.𞤿; [B1 C1 C2]; [B1 C1 C2] # .𞤿 ++T; \u200C\u200D.𞤝; [B1 C1 C2]; [A4_2] # .𞤿 ++N; \u200C\u200D.𞤝; [B1 C1 C2]; [B1 C1 C2] # .𞤿 ++B; .xn--3e6h; [A4_2]; [A4_2] ++B; xn--0ugc.xn--3e6h; [B1 C1 C2]; [B1 C1 C2] # .𞤿 ++B; xn--3e6h; 𞤿; xn--3e6h ++B; 𞤿; ; xn--3e6h ++B; 𞤝; 𞤿; xn--3e6h ++T; 🜑𐹧\u0639.ς𑍍蜹; [B1]; [B1] # 🜑𐹧ع.ς𑍍蜹 ++N; 🜑𐹧\u0639.ς𑍍蜹; [B1]; [B1] # 🜑𐹧ع.ς𑍍蜹 ++B; 🜑𐹧\u0639.Σ𑍍蜹; [B1]; [B1] # 🜑𐹧ع.σ𑍍蜹 ++B; 🜑𐹧\u0639.σ𑍍蜹; [B1]; [B1] # 🜑𐹧ع.σ𑍍蜹 ++B; xn--4gb3736kk4zf.xn--4xa2248dy27d; [B1]; [B1] # 🜑𐹧ع.σ𑍍蜹 ++B; xn--4gb3736kk4zf.xn--3xa4248dy27d; [B1]; [B1] # 🜑𐹧ع.ς𑍍蜹 ++B; 򫠐ス􆟤\u0669.󚃟; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ス٩. ++B; 򫠐ス􆟤\u0669.󚃟; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ス٩. ++B; xn--iib777sp230oo708a.xn--7824e; [B5 B6 V6]; [B5 B6 V6] # ス٩. ++B; 𝪣򕡝.\u059A\uD850\u06C2; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ ++B; 𝪣򕡝.\u059A\uD850\u06C1\u0654; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ ++B; 𝪣򕡝.\u059A\uD850\u06C2; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ ++B; 𝪣򕡝.\u059A\uD850\u06C1\u0654; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ ++B; xn--8c3hu7971a.\u059A\uD850\u06C2; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ ++B; xn--8c3hu7971a.\u059A\uD850\u06C1\u0654; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ ++B; XN--8C3HU7971A.\u059A\uD850\u06C1\u0654; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ ++B; XN--8C3HU7971A.\u059A\uD850\u06C2; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ ++B; Xn--8C3hu7971a.\u059A\uD850\u06C2; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ ++B; Xn--8C3hu7971a.\u059A\uD850\u06C1\u0654; [B1 P1 V5 V6]; [B1 P1 V5 V6 A3] # 𝪣.֚ۂ ++T; \u0660򪓵\u200C。\u0757; [B1 C1 P1 V6]; [B1 P1 V6] # ٠.ݗ ++N; \u0660򪓵\u200C。\u0757; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ٠.ݗ ++B; xn--8hb82030l.xn--bpb; [B1 V6]; [B1 V6] # ٠.ݗ ++B; xn--8hb852ke991q.xn--bpb; [B1 C1 V6]; [B1 C1 V6] # ٠.ݗ ++T; \u103A\u200D\u200C。-\u200C; [C1 V3 V5]; [V3 V5] # ်.- ++N; \u103A\u200D\u200C。-\u200C; [C1 V3 V5]; [C1 V3 V5] # ်.- ++B; xn--bkd.-; [V3 V5]; [V3 V5] # ်.- ++B; xn--bkd412fca.xn----sgn; [C1 V3 V5]; [C1 V3 V5] # ်.- ++B; ︒。\u1B44ᡉ; [P1 V5 V6]; [P1 V5 V6] # ︒.᭄ᡉ ++B; 。。\u1B44ᡉ; [V5 A4_2]; [V5 A4_2] # ..᭄ᡉ ++B; ..xn--87e93m; [V5 A4_2]; [V5 A4_2] # ..᭄ᡉ ++B; xn--y86c.xn--87e93m; [V5 V6]; [V5 V6] # ︒.᭄ᡉ ++T; \u0758ß。ጫᢊ\u0768𝟐; [B2 B3 B5]; [B2 B3 B5] # ݘß.ጫᢊݨ2 ++N; \u0758ß。ጫᢊ\u0768𝟐; [B2 B3 B5]; [B2 B3 B5] # ݘß.ጫᢊݨ2 ++T; \u0758ß。ጫᢊ\u07682; [B2 B3 B5]; [B2 B3 B5] # ݘß.ጫᢊݨ2 ++N; \u0758ß。ጫᢊ\u07682; [B2 B3 B5]; [B2 B3 B5] # ݘß.ጫᢊݨ2 ++B; \u0758SS。ጫᢊ\u07682; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 ++B; \u0758ss。ጫᢊ\u07682; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 ++B; \u0758Ss。ጫᢊ\u07682; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 ++B; xn--ss-gke.xn--2-b5c641gfmf; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 ++B; xn--zca724a.xn--2-b5c641gfmf; [B2 B3 B5]; [B2 B3 B5] # ݘß.ጫᢊݨ2 ++B; \u0758SS。ጫᢊ\u0768𝟐; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 ++B; \u0758ss。ጫᢊ\u0768𝟐; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 ++B; \u0758Ss。ጫᢊ\u0768𝟐; [B2 B3 B5]; [B2 B3 B5] # ݘss.ጫᢊݨ2 ++B; \u07C3𞶇ᚲ.\u0902\u0353𝟚\u09CD; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ߃ᚲ.ं͓2্ ++B; \u07C3𞶇ᚲ.\u0902\u03532\u09CD; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ߃ᚲ.ं͓2্ ++B; xn--esb067enh07a.xn--2-lgb874bjxa; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # ߃ᚲ.ं͓2্ ++T; -\u1BAB︒\u200D.񒶈񥹓; [C2 P1 V3 V6]; [P1 V3 V6] # -᮫︒. ++N; -\u1BAB︒\u200D.񒶈񥹓; [C2 P1 V3 V6]; [C2 P1 V3 V6] # -᮫︒. ++T; -\u1BAB。\u200D.񒶈񥹓; [C2 P1 V3 V6]; [P1 V3 V6 A4_2] # -᮫.. ++N; -\u1BAB。\u200D.񒶈񥹓; [C2 P1 V3 V6]; [C2 P1 V3 V6] # -᮫.. ++B; xn----qml..xn--x50zy803a; [V3 V6 A4_2]; [V3 V6 A4_2] # -᮫.. ++B; xn----qml.xn--1ug.xn--x50zy803a; [C2 V3 V6]; [C2 V3 V6] # -᮫.. ++B; xn----qml1407i.xn--x50zy803a; [V3 V6]; [V3 V6] # -᮫︒. ++B; xn----qmlv7tw180a.xn--x50zy803a; [C2 V3 V6]; [C2 V3 V6] # -᮫︒. ++B; 󠦮.≯𞀆; [P1 V6]; [P1 V6] ++B; 󠦮.>\u0338𞀆; [P1 V6]; [P1 V6] ++B; xn--t546e.xn--hdh5166o; [V6]; [V6] ++B; -𑄳󠊗𐹩。𞮱; [B1 P1 V3 V6]; [B1 P1 V3 V6] ++B; xn----p26i72em2894c.xn--zw6h; [B1 V3 V6]; [B1 V3 V6] ++B; \u06B9.ᡳ\u115F; [P1 V6]; [P1 V6] # ڹ.ᡳ ++B; \u06B9.ᡳ\u115F; [P1 V6]; [P1 V6] # ڹ.ᡳ ++B; xn--skb.xn--osd737a; [V6]; [V6] # ڹ.ᡳ ++B; 㨛𘱎.︒𝟕\u0D01; [P1 V6]; [P1 V6] # 㨛.︒7ഁ ++B; 㨛𘱎.。7\u0D01; [P1 V6 A4_2]; [P1 V6 A4_2] # 㨛..7ഁ ++B; xn--mbm8237g..xn--7-7hf; [V6 A4_2]; [V6 A4_2] # 㨛..7ഁ ++B; xn--mbm8237g.xn--7-7hf1526p; [V6]; [V6] # 㨛.︒7ഁ ++B; \u06DD𻱧-。𞷁\u2064𞤣≮; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ ++B; \u06DD𻱧-。𞷁\u2064𞤣<\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ ++B; \u06DD𻱧-。𞷁\u2064𞤣≮; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ ++B; \u06DD𻱧-。𞷁\u2064𞤣<\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ ++B; \u06DD𻱧-。𞷁\u2064𞤁<\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ ++B; \u06DD𻱧-。𞷁\u2064𞤁≮; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ ++B; xn----dxc06304e.xn--gdh5020pk5c; [B1 B3 V3 V6]; [B1 B3 V3 V6] # -.𞤣≮ ++B; \u06DD𻱧-。𞷁\u2064𞤁<\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ ++B; \u06DD𻱧-。𞷁\u2064𞤁≮; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # -.𞤣≮ ++T; ß\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [P1 V6] # ß꫶ᢥ.⊶ჁႶ ++N; ß\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [C1 P1 V6] # ß꫶ᢥ.⊶ჁႶ ++T; ß\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [P1 V6] # ß꫶ᢥ.⊶ჁႶ ++N; ß\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [C1 P1 V6] # ß꫶ᢥ.⊶ჁႶ ++T; ß\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; xn--ss-4epx629f.xn--ifh802b6a # ß꫶ᢥ.⊶ⴡⴖ ++N; ß\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; [C1] # ß꫶ᢥ.⊶ⴡⴖ ++T; SS\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [P1 V6] # ss꫶ᢥ.⊶ჁႶ ++N; SS\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [C1 P1 V6] # ss꫶ᢥ.⊶ჁႶ ++T; ss\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; xn--ss-4epx629f.xn--ifh802b6a # ss꫶ᢥ.⊶ⴡⴖ ++N; ss\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; [C1] # ss꫶ᢥ.⊶ⴡⴖ ++T; Ss\u200C\uAAF6ᢥ.⊶Ⴡⴖ; [C1 P1 V6]; [P1 V6] # ss꫶ᢥ.⊶Ⴡⴖ ++N; Ss\u200C\uAAF6ᢥ.⊶Ⴡⴖ; [C1 P1 V6]; [C1 P1 V6] # ss꫶ᢥ.⊶Ⴡⴖ ++B; xn--ss-4epx629f.xn--5nd703gyrh; [V6]; [V6] # ss꫶ᢥ.⊶Ⴡⴖ ++B; xn--ss-4ep585bkm5p.xn--5nd703gyrh; [C1 V6]; [C1 V6] # ss꫶ᢥ.⊶Ⴡⴖ ++B; xn--ss-4epx629f.xn--ifh802b6a; ss\uAAF6ᢥ.⊶ⴡⴖ; xn--ss-4epx629f.xn--ifh802b6a; NV8 # ss꫶ᢥ.⊶ⴡⴖ ++B; ss\uAAF6ᢥ.⊶ⴡⴖ; ; xn--ss-4epx629f.xn--ifh802b6a; NV8 # ss꫶ᢥ.⊶ⴡⴖ ++B; SS\uAAF6ᢥ.⊶ჁႶ; [P1 V6]; [P1 V6] # ss꫶ᢥ.⊶ჁႶ ++B; Ss\uAAF6ᢥ.⊶Ⴡⴖ; [P1 V6]; [P1 V6] # ss꫶ᢥ.⊶Ⴡⴖ ++B; xn--ss-4epx629f.xn--undv409k; [V6]; [V6] # ss꫶ᢥ.⊶ჁႶ ++B; xn--ss-4ep585bkm5p.xn--ifh802b6a; [C1]; [C1] # ss꫶ᢥ.⊶ⴡⴖ ++B; xn--ss-4ep585bkm5p.xn--undv409k; [C1 V6]; [C1 V6] # ss꫶ᢥ.⊶ჁႶ ++B; xn--zca682johfi89m.xn--ifh802b6a; [C1]; [C1] # ß꫶ᢥ.⊶ⴡⴖ ++B; xn--zca682johfi89m.xn--undv409k; [C1 V6]; [C1 V6] # ß꫶ᢥ.⊶ჁႶ ++T; ß\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; xn--ss-4epx629f.xn--ifh802b6a # ß꫶ᢥ.⊶ⴡⴖ ++N; ß\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; [C1] # ß꫶ᢥ.⊶ⴡⴖ ++T; SS\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [P1 V6] # ss꫶ᢥ.⊶ჁႶ ++N; SS\u200C\uAAF6ᢥ.⊶ჁႶ; [C1 P1 V6]; [C1 P1 V6] # ss꫶ᢥ.⊶ჁႶ ++T; ss\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; xn--ss-4epx629f.xn--ifh802b6a # ss꫶ᢥ.⊶ⴡⴖ ++N; ss\u200C\uAAF6ᢥ.⊶ⴡⴖ; [C1]; [C1] # ss꫶ᢥ.⊶ⴡⴖ ++T; Ss\u200C\uAAF6ᢥ.⊶Ⴡⴖ; [C1 P1 V6]; [P1 V6] # ss꫶ᢥ.⊶Ⴡⴖ ++N; Ss\u200C\uAAF6ᢥ.⊶Ⴡⴖ; [C1 P1 V6]; [C1 P1 V6] # ss꫶ᢥ.⊶Ⴡⴖ ++T; \u200D。ς󠁉; [C2 P1 V6]; [P1 V6 A4_2] # .ς ++N; \u200D。ς󠁉; [C2 P1 V6]; [C2 P1 V6] # .ς ++T; \u200D。Σ󠁉; [C2 P1 V6]; [P1 V6 A4_2] # .σ ++N; \u200D。Σ󠁉; [C2 P1 V6]; [C2 P1 V6] # .σ ++T; \u200D。σ󠁉; [C2 P1 V6]; [P1 V6 A4_2] # .σ ++N; \u200D。σ󠁉; [C2 P1 V6]; [C2 P1 V6] # .σ ++B; .xn--4xa24344p; [V6 A4_2]; [V6 A4_2] ++B; xn--1ug.xn--4xa24344p; [C2 V6]; [C2 V6] # .σ ++B; xn--1ug.xn--3xa44344p; [C2 V6]; [C2 V6] # .ς ++T; 𞵑ß.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ß.ݑ𞤽- ++N; 𞵑ß.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ß.ݑ𞤽- ++T; 𞵑ß.\u0751\u200D𞤽-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ß.ݑ𞤽- ++N; 𞵑ß.\u0751\u200D𞤽-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ß.ݑ𞤽- ++T; 𞵑SS.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ss.ݑ𞤽- ++N; 𞵑SS.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ss.ݑ𞤽- ++T; 𞵑ss.\u0751\u200D𞤽-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ss.ݑ𞤽- ++N; 𞵑ss.\u0751\u200D𞤽-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ss.ݑ𞤽- ++T; 𞵑Ss.\u0751\u200D𞤽-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ss.ݑ𞤽- ++N; 𞵑Ss.\u0751\u200D𞤽-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ss.ݑ𞤽- ++B; xn--ss-2722a.xn----z3c03218a; [B2 B3 V3 V6]; [B2 B3 V3 V6] # ss.ݑ𞤽- ++B; xn--ss-2722a.xn----z3c011q9513b; [B2 B3 C2 V3 V6]; [B2 B3 C2 V3 V6] # ss.ݑ𞤽- ++B; xn--zca5423w.xn----z3c011q9513b; [B2 B3 C2 V3 V6]; [B2 B3 C2 V3 V6] # ß.ݑ𞤽- ++T; 𞵑ss.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ss.ݑ𞤽- ++N; 𞵑ss.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ss.ݑ𞤽- ++T; 𞵑Ss.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 P1 V3 V6] # ss.ݑ𞤽- ++N; 𞵑Ss.\u0751\u200D𞤛-; [B2 B3 C2 P1 V3 V6]; [B2 B3 C2 P1 V3 V6] # ss.ݑ𞤽- ++T; 𑘽\u200D𞤧.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # 𑘽𞤧.𐹧- ++N; 𑘽\u200D𞤧.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 C2 P1 V3 V5 V6] # 𑘽𞤧.𐹧- ++T; 𑘽\u200D𞤧.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # 𑘽𞤧.𐹧- ++N; 𑘽\u200D𞤧.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 C2 P1 V3 V5 V6] # 𑘽𞤧.𐹧- ++T; 𑘽\u200D𞤅.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # 𑘽𞤧.𐹧- ++N; 𑘽\u200D𞤅.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 C2 P1 V3 V5 V6] # 𑘽𞤧.𐹧- ++B; xn--qb2ds317a.xn----k26iq1483f; [B1 V3 V5 V6]; [B1 V3 V5 V6] ++B; xn--1ugz808gdimf.xn----k26iq1483f; [B1 C2 V3 V5 V6]; [B1 C2 V3 V5 V6] # 𑘽𞤧.𐹧- ++T; 𑘽\u200D𞤅.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # 𑘽𞤧.𐹧- ++N; 𑘽\u200D𞤅.𐹧󡦪-; [B1 C2 P1 V3 V5 V6]; [B1 C2 P1 V3 V5 V6] # 𑘽𞤧.𐹧- ++B; ⒒򨘙򳳠𑓀.-󞡊; [P1 V3 V6]; [P1 V3 V6] ++B; 11.򨘙򳳠𑓀.-󞡊; [P1 V3 V6]; [P1 V3 V6] ++B; 11.xn--uz1d59632bxujd.xn----x310m; [V3 V6]; [V3 V6] ++B; xn--3shy698frsu9dt1me.xn----x310m; [V3 V6]; [V3 V6] ++T; -。\u200D; [C2 V3]; [V3] # -. ++N; -。\u200D; [C2 V3]; [C2 V3] # -. ++T; -。\u200D; [C2 V3]; [V3] # -. ++N; -。\u200D; [C2 V3]; [C2 V3] # -. ++B; -.; [V3]; [V3] ++B; -.xn--1ug; [C2 V3]; [C2 V3] # -. ++T; ≮ᡬ.ς¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- ++N; ≮ᡬ.ς¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- ++T; <\u0338ᡬ.ς¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- ++N; <\u0338ᡬ.ς¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- ++T; ≮ᡬ.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- ++N; ≮ᡬ.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- ++T; <\u0338ᡬ.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- ++N; <\u0338ᡬ.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- ++B; <\u0338ᡬ.Σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- ++B; ≮ᡬ.Σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- ++B; ≮ᡬ.σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- ++B; <\u0338ᡬ.σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- ++B; xn--88e732c.σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- ++B; XN--88E732C.Σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- ++T; xn--88e732c.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- ++N; xn--88e732c.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- ++T; Xn--88E732c.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- ++N; Xn--88E732c.ς1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.ς1- ++B; Xn--88E732c.σ1-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- ++B; <\u0338ᡬ.Σ¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- ++B; ≮ᡬ.Σ¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- ++B; ≮ᡬ.σ¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- ++B; <\u0338ᡬ.σ¹-\uDB09; [P1 V6]; [P1 V6 A3] # ≮ᡬ.σ1- ++B; ቬ򔠼񁗶。𐨬𝟠; [P1 V6]; [P1 V6] ++B; ቬ򔠼񁗶。𐨬8; [P1 V6]; [P1 V6] ++B; xn--d0d41273c887z.xn--8-ob5i; [V6]; [V6] ++B; 𐱲。蔫\u0766; [B5 B6 P1 V6]; [B5 B6 P1 V6] # .蔫ݦ ++B; xn--389c.xn--qpb7055d; [B5 B6 V6]; [B5 B6 V6] # .蔫ݦ ++B; 򒲧₃。ꡚ𛇑󠄳\u0647; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 3.ꡚ𛇑ه ++B; 򒲧3。ꡚ𛇑󠄳\u0647; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 3.ꡚ𛇑ه ++B; xn--3-ep59g.xn--jhb5904fcp0h; [B5 B6 V6]; [B5 B6 V6] # 3.ꡚ𛇑ه ++T; 蓸\u0642≠.ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ß ++N; 蓸\u0642≠.ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ß ++T; 蓸\u0642=\u0338.ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ß ++N; 蓸\u0642=\u0338.ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ß ++B; 蓸\u0642=\u0338.SS; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ss ++B; 蓸\u0642≠.SS; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ss ++B; 蓸\u0642≠.ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ss ++B; 蓸\u0642=\u0338.ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ss ++B; 蓸\u0642=\u0338.Ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ss ++B; 蓸\u0642≠.Ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 蓸ق≠.ss ++B; xn--ehb015lnt1e.ss; [B5 B6 V6]; [B5 B6 V6] # 蓸ق≠.ss ++B; xn--ehb015lnt1e.xn--zca; [B5 B6 V6]; [B5 B6 V6] # 蓸ق≠.ß ++T; \u084E\u067A\u0DD3⒊.𐹹𞱩󠃪\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # ࡎٺී⒊.𐹹 ++N; \u084E\u067A\u0DD3⒊.𐹹𞱩󠃪\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ࡎٺී⒊.𐹹 ++T; \u084E\u067A\u0DD33..𐹹𞱩󠃪\u200C; [B1 C1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ࡎٺී3..𐹹 ++N; \u084E\u067A\u0DD33..𐹹𞱩󠃪\u200C; [B1 C1 P1 V6 A4_2]; [B1 C1 P1 V6 A4_2] # ࡎٺී3..𐹹 ++B; xn--3-prc71ls9j..xn--xo0dw109an237f; [B1 V6 A4_2]; [B1 V6 A4_2] # ࡎٺී3..𐹹 ++B; xn--3-prc71ls9j..xn--0ug3205g7eyf3c96h; [B1 C1 V6 A4_2]; [B1 C1 V6 A4_2] # ࡎٺී3..𐹹 ++B; xn--zib94gfziuq1a.xn--xo0dw109an237f; [B1 V6]; [B1 V6] # ࡎٺී⒊.𐹹 ++B; xn--zib94gfziuq1a.xn--0ug3205g7eyf3c96h; [B1 C1 V6]; [B1 C1 V6] # ࡎٺී⒊.𐹹 ++T; ς\u200D-.Ⴣ𦟙; [C2 P1 V3 V6]; [P1 V3 V6] # ς-.Ⴣ𦟙 ++N; ς\u200D-.Ⴣ𦟙; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ς-.Ⴣ𦟙 ++T; ς\u200D-.ⴣ𦟙; [C2 V3]; [V3] # ς-.ⴣ𦟙 ++N; ς\u200D-.ⴣ𦟙; [C2 V3]; [C2 V3] # ς-.ⴣ𦟙 ++T; Σ\u200D-.Ⴣ𦟙; [C2 P1 V3 V6]; [P1 V3 V6] # σ-.Ⴣ𦟙 ++N; Σ\u200D-.Ⴣ𦟙; [C2 P1 V3 V6]; [C2 P1 V3 V6] # σ-.Ⴣ𦟙 ++T; σ\u200D-.ⴣ𦟙; [C2 V3]; [V3] # σ-.ⴣ𦟙 ++N; σ\u200D-.ⴣ𦟙; [C2 V3]; [C2 V3] # σ-.ⴣ𦟙 ++B; xn----zmb.xn--rlj2573p; [V3]; [V3] ++B; xn----zmb048s.xn--rlj2573p; [C2 V3]; [C2 V3] # σ-.ⴣ𦟙 ++B; xn----zmb.xn--7nd64871a; [V3 V6]; [V3 V6] ++B; xn----zmb048s.xn--7nd64871a; [C2 V3 V6]; [C2 V3 V6] # σ-.Ⴣ𦟙 ++B; xn----xmb348s.xn--rlj2573p; [C2 V3]; [C2 V3] # ς-.ⴣ𦟙 ++B; xn----xmb348s.xn--7nd64871a; [C2 V3 V6]; [C2 V3 V6] # ς-.Ⴣ𦟙 ++B; ≠。🞳𝟲; [P1 V6]; [P1 V6] ++B; =\u0338。🞳𝟲; [P1 V6]; [P1 V6] ++B; ≠。🞳6; [P1 V6]; [P1 V6] ++B; =\u0338。🞳6; [P1 V6]; [P1 V6] ++B; xn--1ch.xn--6-dl4s; [V6]; [V6] ++B; 󅬽.蠔; [P1 V6]; [P1 V6] ++B; xn--g747d.xn--xl2a; [V6]; [V6] ++T; \u08E6\u200D.뼽; [C2 V5]; [V5] # ࣦ.뼽 ++N; \u08E6\u200D.뼽; [C2 V5]; [C2 V5] # ࣦ.뼽 ++T; \u08E6\u200D.뼽; [C2 V5]; [V5] # ࣦ.뼽 ++N; \u08E6\u200D.뼽; [C2 V5]; [C2 V5] # ࣦ.뼽 ++T; \u08E6\u200D.뼽; [C2 V5]; [V5] # ࣦ.뼽 ++N; \u08E6\u200D.뼽; [C2 V5]; [C2 V5] # ࣦ.뼽 ++T; \u08E6\u200D.뼽; [C2 V5]; [V5] # ࣦ.뼽 ++N; \u08E6\u200D.뼽; [C2 V5]; [C2 V5] # ࣦ.뼽 ++B; xn--p0b.xn--e43b; [V5]; [V5] # ࣦ.뼽 ++B; xn--p0b869i.xn--e43b; [C2 V5]; [C2 V5] # ࣦ.뼽 ++B; ₇\u0BCD􃂷\u06D2。👖\u0675-𞪑; [B1 P1 V6]; [B1 P1 V6] # 7்ے.👖اٴ- ++B; 7\u0BCD􃂷\u06D2。👖\u0627\u0674-𞪑; [B1 P1 V6]; [B1 P1 V6] # 7்ے.👖اٴ- ++B; xn--7-rwc839aj3073c.xn----ymc5uv818oghka; [B1 V6]; [B1 V6] # 7்ے.👖اٴ- ++B; -。\u077B; [B1 V3]; [B1 V3] # -.ݻ ++B; -。\u077B; [B1 V3]; [B1 V3] # -.ݻ ++B; -.xn--cqb; [B1 V3]; [B1 V3] # -.ݻ ++B; 𑇌𵛓。-⒈ꡏ\u072B; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # 𑇌.-⒈ꡏܫ ++B; 𑇌𵛓。-1.ꡏ\u072B; [B1 B5 B6 P1 V3 V5 V6]; [B1 B5 B6 P1 V3 V5 V6] # 𑇌.-1.ꡏܫ ++B; xn--8d1dg030h.-1.xn--1nb7163f; [B1 B5 B6 V3 V5 V6]; [B1 B5 B6 V3 V5 V6] # 𑇌.-1.ꡏܫ ++B; xn--8d1dg030h.xn----u1c466tp10j; [B1 V3 V5 V6]; [B1 V3 V5 V6] # 𑇌.-⒈ꡏܫ ++B; 璛\u1734\u06AF.-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # 璛᜴گ.- ++B; xn--ikb175frt4e.-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # 璛᜴گ.- ++B; 󠆰\u08A1\u0A4D샕.𐹲휁; [B1 B2 B3]; [B1 B2 B3] # ࢡ੍샕.𐹲휁 ++B; 󠆰\u08A1\u0A4D샕.𐹲휁; [B1 B2 B3]; [B1 B2 B3] # ࢡ੍샕.𐹲휁 ++B; 󠆰\u08A1\u0A4D샕.𐹲휁; [B1 B2 B3]; [B1 B2 B3] # ࢡ੍샕.𐹲휁 ++B; 󠆰\u08A1\u0A4D샕.𐹲휁; [B1 B2 B3]; [B1 B2 B3] # ࢡ੍샕.𐹲휁 ++B; xn--qyb07fj857a.xn--728bv72h; [B1 B2 B3]; [B1 B2 B3] # ࢡ੍샕.𐹲휁 ++B; 񍨽.񋸕; [P1 V6]; [P1 V6] ++B; 񍨽.񋸕; [P1 V6]; [P1 V6] ++B; xn--pr3x.xn--rv7w; [V6]; [V6] ++B; \u067D𞥕。𑑂𞤶Ⴍ-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ٽ𞥕.𑑂𞤶Ⴍ- ++B; \u067D𞥕。𑑂𞤶Ⴍ-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ٽ𞥕.𑑂𞤶Ⴍ- ++B; \u067D𞥕。𑑂𞤶ⴍ-; [B1 V3 V5]; [B1 V3 V5] # ٽ𞥕.𑑂𞤶ⴍ- ++B; \u067D𞥕。𑑂𞤔Ⴍ-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ٽ𞥕.𑑂𞤶Ⴍ- ++B; \u067D𞥕。𑑂𞤔ⴍ-; [B1 V3 V5]; [B1 V3 V5] # ٽ𞥕.𑑂𞤶ⴍ- ++B; xn--2ib0338v.xn----zvs0199fo91g; [B1 V3 V5]; [B1 V3 V5] # ٽ𞥕.𑑂𞤶ⴍ- ++B; xn--2ib0338v.xn----w0g2740ro9vg; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ٽ𞥕.𑑂𞤶Ⴍ- ++B; \u067D𞥕。𑑂𞤶ⴍ-; [B1 V3 V5]; [B1 V3 V5] # ٽ𞥕.𑑂𞤶ⴍ- ++B; \u067D𞥕。𑑂𞤔Ⴍ-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ٽ𞥕.𑑂𞤶Ⴍ- ++B; \u067D𞥕。𑑂𞤔ⴍ-; [B1 V3 V5]; [B1 V3 V5] # ٽ𞥕.𑑂𞤶ⴍ- ++B; 𐯀𐸉𞧏。񢚧₄Ⴋ񂹫; [P1 V6]; [P1 V6] ++B; 𐯀𐸉𞧏。񢚧4Ⴋ񂹫; [P1 V6]; [P1 V6] ++B; 𐯀𐸉𞧏。񢚧4ⴋ񂹫; [P1 V6]; [P1 V6] ++B; xn--039c42bq865a.xn--4-wvs27840bnrzm; [V6]; [V6] ++B; xn--039c42bq865a.xn--4-t0g49302fnrzm; [V6]; [V6] ++B; 𐯀𐸉𞧏。񢚧₄ⴋ񂹫; [P1 V6]; [P1 V6] ++B; 4\u06BD︒󠑥.≠; [B1 P1 V6]; [B1 P1 V6] # 4ڽ︒.≠ ++B; 4\u06BD︒󠑥.=\u0338; [B1 P1 V6]; [B1 P1 V6] # 4ڽ︒.≠ ++B; 4\u06BD。󠑥.≠; [B1 P1 V6]; [B1 P1 V6] # 4ڽ..≠ ++B; 4\u06BD。󠑥.=\u0338; [B1 P1 V6]; [B1 P1 V6] # 4ڽ..≠ ++B; xn--4-kvc.xn--5136e.xn--1ch; [B1 V6]; [B1 V6] # 4ڽ..≠ ++B; xn--4-kvc5601q2h50i.xn--1ch; [B1 V6]; [B1 V6] # 4ڽ︒.≠ ++B; 𝟓。\u06D7; [V5]; [V5] # 5.ۗ ++B; 5。\u06D7; [V5]; [V5] # 5.ۗ ++B; 5.xn--nlb; [V5]; [V5] # 5.ۗ ++T; \u200C򺸩.⾕; [C1 P1 V6]; [P1 V6] # .谷 ++N; \u200C򺸩.⾕; [C1 P1 V6]; [C1 P1 V6] # .谷 ++T; \u200C򺸩.谷; [C1 P1 V6]; [P1 V6] # .谷 ++N; \u200C򺸩.谷; [C1 P1 V6]; [C1 P1 V6] # .谷 ++B; xn--i183d.xn--6g3a; [V6]; [V6] ++B; xn--0ug26167i.xn--6g3a; [C1 V6]; [C1 V6] # .谷 ++T; ︒󎰇\u200D.-\u073C\u200C; [C1 C2 P1 V3 V6]; [P1 V3 V6] # ︒.-ܼ ++N; ︒󎰇\u200D.-\u073C\u200C; [C1 C2 P1 V3 V6]; [C1 C2 P1 V3 V6] # ︒.-ܼ ++T; 。󎰇\u200D.-\u073C\u200C; [C1 C2 P1 V3 V6 A4_2]; [P1 V3 V6 A4_2] # ..-ܼ ++N; 。󎰇\u200D.-\u073C\u200C; [C1 C2 P1 V3 V6 A4_2]; [C1 C2 P1 V3 V6 A4_2] # ..-ܼ ++B; .xn--hh50e.xn----t2c; [V3 V6 A4_2]; [V3 V6 A4_2] # ..-ܼ ++B; .xn--1ug05310k.xn----t2c071q; [C1 C2 V3 V6 A4_2]; [C1 C2 V3 V6 A4_2] # ..-ܼ ++B; xn--y86c71305c.xn----t2c; [V3 V6]; [V3 V6] # ︒.-ܼ ++B; xn--1ug1658ftw26f.xn----t2c071q; [C1 C2 V3 V6]; [C1 C2 V3 V6] # ︒.-ܼ ++B; ≯𞤟。ᡨ; [B1 P1 V6]; [B1 P1 V6] ++B; >\u0338𞤟。ᡨ; [B1 P1 V6]; [B1 P1 V6] ++B; >\u0338𞥁。ᡨ; [B1 P1 V6]; [B1 P1 V6] ++B; ≯𞥁。ᡨ; [B1 P1 V6]; [B1 P1 V6] ++B; xn--hdhz520p.xn--48e; [B1 V6]; [B1 V6] ++B; \u0F74𫫰𝨄。\u0713𐹦; [B1 V5]; [B1 V5] # ུ𫫰𝨄.ܓ𐹦 ++B; xn--ned8985uo92e.xn--dnb6395k; [B1 V5]; [B1 V5] # ུ𫫰𝨄.ܓ𐹦 ++B; \u033C\u07DB⁷𝟹。𝟬; [B1 V5]; [B1 V5] # ̼ߛ73.0 ++B; \u033C\u07DB73。0; [B1 V5]; [B1 V5] # ̼ߛ73.0 ++B; xn--73-9yb648b.0; [B1 V5]; [B1 V5] # ̼ߛ73.0 ++T; \u200D.𝟗; [C2]; [A4_2] # .9 ++N; \u200D.𝟗; [C2]; [C2] # .9 ++T; \u200D.9; [C2]; [A4_2] # .9 ++N; \u200D.9; [C2]; [C2] # .9 ++B; .9; [A4_2]; [A4_2] ++B; xn--1ug.9; [C2]; [C2] # .9 ++B; 9; ; ++B; \u0779ᡭ𪕈。\u06B6\u08D9; [B2 B3]; [B2 B3] # ݹᡭ𪕈.ڶࣙ ++B; xn--9pb497fs270c.xn--pkb80i; [B2 B3]; [B2 B3] # ݹᡭ𪕈.ڶࣙ ++B; \u07265\u07E2겙。\u1CF4𐷚; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ܦ5ߢ겙.᳴ ++B; \u07265\u07E2겙。\u1CF4𐷚; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ܦ5ߢ겙.᳴ ++B; \u07265\u07E2겙。\u1CF4𐷚; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ܦ5ߢ겙.᳴ ++B; \u07265\u07E2겙。\u1CF4𐷚; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ܦ5ߢ겙.᳴ ++B; xn--5-j1c97c2483c.xn--e7f2093h; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # ܦ5ߢ겙.᳴ ++T; Ⴍ𿣍ꡨ\u05AE。Ⴞ\u200C\u200C; [C1 P1 V6]; [P1 V6] # Ⴍꡨ֮.Ⴞ ++N; Ⴍ𿣍ꡨ\u05AE。Ⴞ\u200C\u200C; [C1 P1 V6]; [C1 P1 V6] # Ⴍꡨ֮.Ⴞ ++T; ⴍ𿣍ꡨ\u05AE。ⴞ\u200C\u200C; [C1 P1 V6]; [P1 V6] # ⴍꡨ֮.ⴞ ++N; ⴍ𿣍ꡨ\u05AE。ⴞ\u200C\u200C; [C1 P1 V6]; [C1 P1 V6] # ⴍꡨ֮.ⴞ ++B; xn--5cb172r175fug38a.xn--mlj; [V6]; [V6] # ⴍꡨ֮.ⴞ ++B; xn--5cb172r175fug38a.xn--0uga051h; [C1 V6]; [C1 V6] # ⴍꡨ֮.ⴞ ++B; xn--5cb347co96jug15a.xn--2nd; [V6]; [V6] # Ⴍꡨ֮.Ⴞ ++B; xn--5cb347co96jug15a.xn--2nd059ea; [C1 V6]; [C1 V6] # Ⴍꡨ֮.Ⴞ ++B; 𐋰。󑓱; [P1 V6]; [P1 V6] ++B; xn--k97c.xn--q031e; [V6]; [V6] ++B; 󡎦\u17B4\u0B4D.𐹾; [B1 P1 V6]; [B1 P1 V6] # ୍.𐹾 ++B; xn--9ic364dho91z.xn--2o0d; [B1 V6]; [B1 V6] # ୍.𐹾 ++B; \u08DFႫ𶿸귤.򠅼𝟢휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟႫ귤.0휪ૣ ++B; \u08DFႫ𶿸귤.򠅼𝟢휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟႫ귤.0휪ૣ ++B; \u08DFႫ𶿸귤.򠅼0휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟႫ귤.0휪ૣ ++B; \u08DFႫ𶿸귤.򠅼0휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟႫ귤.0휪ૣ ++B; \u08DFⴋ𶿸귤.򠅼0휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟⴋ귤.0휪ૣ ++B; \u08DFⴋ𶿸귤.򠅼0휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟⴋ귤.0휪ૣ ++B; xn--i0b436pkl2g2h42a.xn--0-8le8997mulr5f; [V5 V6]; [V5 V6] # ࣟⴋ귤.0휪ૣ ++B; xn--i0b601b6r7l2hs0a.xn--0-8le8997mulr5f; [V5 V6]; [V5 V6] # ࣟႫ귤.0휪ૣ ++B; \u08DFⴋ𶿸귤.򠅼𝟢휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟⴋ귤.0휪ૣ ++B; \u08DFⴋ𶿸귤.򠅼𝟢휪\u0AE3; [P1 V5 V6]; [P1 V5 V6] # ࣟⴋ귤.0휪ૣ ++B; \u0784.𞡝\u0601; [P1 V6]; [P1 V6] # ބ.𞡝 ++B; \u0784.𞡝\u0601; [P1 V6]; [P1 V6] # ބ.𞡝 ++B; xn--lqb.xn--jfb1808v; [V6]; [V6] # ބ.𞡝 ++T; \u0ACD₃.8\uA8C4\u200D🃤; [V5]; [V5] # ્3.8꣄🃤 ++N; \u0ACD₃.8\uA8C4\u200D🃤; [V5]; [V5] # ્3.8꣄🃤 ++T; \u0ACD3.8\uA8C4\u200D🃤; [V5]; [V5] # ્3.8꣄🃤 ++N; \u0ACD3.8\uA8C4\u200D🃤; [V5]; [V5] # ્3.8꣄🃤 ++B; xn--3-yke.xn--8-sl4et308f; [V5]; [V5] # ્3.8꣄🃤 ++B; xn--3-yke.xn--8-ugnv982dbkwm; [V5]; [V5] # ્3.8꣄🃤 ++B; ℻⩷𝆆。𞤠󠆁\u180C; [B6]; [B6] ++B; FAX⩷𝆆。𞤠󠆁\u180C; [B6]; [B6] ++B; fax⩷𝆆。𞥂󠆁\u180C; [B6]; [B6] ++B; Fax⩷𝆆。𞤠󠆁\u180C; [B6]; [B6] ++B; xn--fax-4c9a1676t.xn--6e6h; [B6]; [B6] ++B; ℻⩷𝆆。𞥂󠆁\u180C; [B6]; [B6] ++B; FAX⩷𝆆。𞥂󠆁\u180C; [B6]; [B6] ++B; fax⩷𝆆。𞤠󠆁\u180C; [B6]; [B6] ++B; fax⩷𝆆.𞥂; [B6]; [B6] ++B; FAX⩷𝆆.𞤠; [B6]; [B6] ++B; Fax⩷𝆆.𞤠; [B6]; [B6] ++B; FAX⩷𝆆.𞥂; [B6]; [B6] ++B; Fax⩷𝆆.𞥂; [B6]; [B6] ++B; ꡕ≠\u105E󮿱。𐵧󠄫\uFFA0; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ꡕ≠ၞ. ++B; ꡕ=\u0338\u105E󮿱。𐵧󠄫\uFFA0; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ꡕ≠ၞ. ++B; ꡕ≠\u105E󮿱。𐵧󠄫\u1160; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ꡕ≠ၞ. ++B; ꡕ=\u0338\u105E󮿱。𐵧󠄫\u1160; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ꡕ≠ၞ. ++B; xn--cld333gn31h0158l.xn--psd1510k; [B2 B3 V6]; [B2 B3 V6] # ꡕ≠ၞ. ++B; xn--cld333gn31h0158l.xn--cl7c96v; [B2 B3 V6]; [B2 B3 V6] # ꡕ≠ၞ. ++T; 鱊。\u200C; [C1]; xn--rt6a. # 鱊. ++N; 鱊。\u200C; [C1]; [C1] # 鱊. ++B; xn--rt6a.; 鱊.; xn--rt6a. ++B; 鱊.; ; xn--rt6a. ++B; xn--rt6a.xn--0ug; [C1]; [C1] # 鱊. ++B; 8𐹣.𑍨; [B1 B3 B6 V5]; [B1 B3 B6 V5] ++B; 8𐹣.𑍨; [B1 B3 B6 V5]; [B1 B3 B6 V5] ++B; xn--8-d26i.xn--0p1d; [B1 B3 B6 V5]; [B1 B3 B6 V5] ++B; ⏹𐧀.𐫯; [B1]; [B1] ++B; ⏹𐧀.𐫯; [B1]; [B1] ++B; xn--qoh9161g.xn--1x9c; [B1]; [B1] ++T; 𞤺\u07CC4.\u200D; [B1 C2]; xn--4-0bd15808a. # 𞤺ߌ4. ++N; 𞤺\u07CC4.\u200D; [B1 C2]; [B1 C2] # 𞤺ߌ4. ++T; 𞤺\u07CC4.\u200D; [B1 C2]; xn--4-0bd15808a. # 𞤺ߌ4. ++N; 𞤺\u07CC4.\u200D; [B1 C2]; [B1 C2] # 𞤺ߌ4. ++T; 𞤘\u07CC4.\u200D; [B1 C2]; xn--4-0bd15808a. # 𞤺ߌ4. ++N; 𞤘\u07CC4.\u200D; [B1 C2]; [B1 C2] # 𞤺ߌ4. ++B; xn--4-0bd15808a.; 𞤺\u07CC4.; xn--4-0bd15808a. # 𞤺ߌ4. ++B; 𞤺\u07CC4.; ; xn--4-0bd15808a. # 𞤺ߌ4. ++B; 𞤘\u07CC4.; 𞤺\u07CC4.; xn--4-0bd15808a. # 𞤺ߌ4. ++B; xn--4-0bd15808a.xn--1ug; [B1 C2]; [B1 C2] # 𞤺ߌ4. ++T; 𞤘\u07CC4.\u200D; [B1 C2]; xn--4-0bd15808a. # 𞤺ߌ4. ++N; 𞤘\u07CC4.\u200D; [B1 C2]; [B1 C2] # 𞤺ߌ4. ++B; ⒗\u0981\u20EF-.\u08E2•; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ⒗ঁ⃯-.• ++B; 16.\u0981\u20EF-.\u08E2•; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # 16.ঁ⃯-.• ++B; 16.xn----z0d801p.xn--l0b810j; [B1 V3 V5 V6]; [B1 V3 V5 V6] # 16.ঁ⃯-.• ++B; xn----z0d801p6kd.xn--l0b810j; [B1 V3 V6]; [B1 V3 V6] # ⒗ঁ⃯-.• ++B; -。䏛; [V3]; [V3] ++B; -。䏛; [V3]; [V3] ++B; -.xn--xco; [V3]; [V3] ++T; \u200C񒃠.\u200D; [C1 C2 P1 V6]; [P1 V6] # . ++N; \u200C񒃠.\u200D; [C1 C2 P1 V6]; [C1 C2 P1 V6] # . ++T; \u200C񒃠.\u200D; [C1 C2 P1 V6]; [P1 V6] # . ++N; \u200C񒃠.\u200D; [C1 C2 P1 V6]; [C1 C2 P1 V6] # . ++B; xn--dj8y.; [V6]; [V6] ++B; xn--0ugz7551c.xn--1ug; [C1 C2 V6]; [C1 C2 V6] # . ++T; ⒈⓰󥣇。𐹠\u200D򗷦Ⴕ; [B1 C2 P1 V6]; [B1 P1 V6] # ⒈⓰.𐹠Ⴕ ++N; ⒈⓰󥣇。𐹠\u200D򗷦Ⴕ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ⒈⓰.𐹠Ⴕ ++T; 1.⓰󥣇。𐹠\u200D򗷦Ⴕ; [B1 C2 P1 V6]; [B1 P1 V6] # 1.⓰.𐹠Ⴕ ++N; 1.⓰󥣇。𐹠\u200D򗷦Ⴕ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 1.⓰.𐹠Ⴕ ++T; 1.⓰󥣇。𐹠\u200D򗷦ⴕ; [B1 C2 P1 V6]; [B1 P1 V6] # 1.⓰.𐹠ⴕ ++N; 1.⓰󥣇。𐹠\u200D򗷦ⴕ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 1.⓰.𐹠ⴕ ++B; 1.xn--svh00804k.xn--dljv223ee5t2d; [B1 V6]; [B1 V6] ++B; 1.xn--svh00804k.xn--1ug352csp0psg45e; [B1 C2 V6]; [B1 C2 V6] # 1.⓰.𐹠ⴕ ++B; 1.xn--svh00804k.xn--tnd1990ke579c; [B1 V6]; [B1 V6] ++B; 1.xn--svh00804k.xn--tnd969erj4psgl3e; [B1 C2 V6]; [B1 C2 V6] # 1.⓰.𐹠Ⴕ ++T; ⒈⓰󥣇。𐹠\u200D򗷦ⴕ; [B1 C2 P1 V6]; [B1 P1 V6] # ⒈⓰.𐹠ⴕ ++N; ⒈⓰󥣇。𐹠\u200D򗷦ⴕ; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ⒈⓰.𐹠ⴕ ++B; xn--tsh0nz9380h.xn--dljv223ee5t2d; [B1 V6]; [B1 V6] ++B; xn--tsh0nz9380h.xn--1ug352csp0psg45e; [B1 C2 V6]; [B1 C2 V6] # ⒈⓰.𐹠ⴕ ++B; xn--tsh0nz9380h.xn--tnd1990ke579c; [B1 V6]; [B1 V6] ++B; xn--tsh0nz9380h.xn--tnd969erj4psgl3e; [B1 C2 V6]; [B1 C2 V6] # ⒈⓰.𐹠Ⴕ ++T; 𞠊ᠮ-ß。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ß.᳐効 ++N; 𞠊ᠮ-ß。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ß.᳐効 ++T; 𞠊ᠮ-ß。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ß.᳐効 ++N; 𞠊ᠮ-ß。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ß.᳐効 ++B; 𞠊ᠮ-SS。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ss.᳐効 ++B; 𞠊ᠮ-ss。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ss.᳐効 ++B; 𞠊ᠮ-Ss。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ss.᳐効 ++B; xn---ss-21t18904a.xn--jfb197i791bi6x4c; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # 𞠊ᠮ-ss.᳐効 ++B; xn----qfa310pg973b.xn--jfb197i791bi6x4c; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # 𞠊ᠮ-ß.᳐効 ++B; 𞠊ᠮ-SS。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ss.᳐効 ++B; 𞠊ᠮ-ss。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ss.᳐効 ++B; 𞠊ᠮ-Ss。\u1CD0効\u0601𷣭; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # 𞠊ᠮ-ss.᳐効 ++B; 𑇀.󠨱; [P1 V5 V6]; [P1 V5 V6] ++B; xn--wd1d.xn--k946e; [V5 V6]; [V5 V6] ++B; ␒3\uFB88。𝟘𐨿𐹆; [B1 P1 V6]; [B1 P1 V6] # ␒3ڈ.0𐨿 ++B; ␒3\u0688。0𐨿𐹆; [B1 P1 V6]; [B1 P1 V6] # ␒3ڈ.0𐨿 ++B; xn--3-jsc897t.xn--0-sc5iy3h; [B1 V6]; [B1 V6] # ␒3ڈ.0𐨿 ++B; \u076B6\u0A81\u08A6。\u1DE3; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ݫ6ઁࢦ.ᷣ ++B; \u076B6\u0A81\u08A6。\u1DE3; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ݫ6ઁࢦ.ᷣ ++B; xn--6-h5c06gj6c.xn--7eg; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ݫ6ઁࢦ.ᷣ ++T; \u0605-𽤞Ⴂ。򅤶\u200D; [B1 B6 C2 P1 V6]; [B1 P1 V6] # -Ⴂ. ++N; \u0605-𽤞Ⴂ。򅤶\u200D; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # -Ⴂ. ++T; \u0605-𽤞ⴂ。򅤶\u200D; [B1 B6 C2 P1 V6]; [B1 P1 V6] # -ⴂ. ++N; \u0605-𽤞ⴂ。򅤶\u200D; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # -ⴂ. ++B; xn----0kc8501a5399e.xn--ss06b; [B1 V6]; [B1 V6] # -ⴂ. ++B; xn----0kc8501a5399e.xn--1ugy3204f; [B1 B6 C2 V6]; [B1 B6 C2 V6] # -ⴂ. ++B; xn----0kc662fc152h.xn--ss06b; [B1 V6]; [B1 V6] # -Ⴂ. ++B; xn----0kc662fc152h.xn--1ugy3204f; [B1 B6 C2 V6]; [B1 B6 C2 V6] # -Ⴂ. ++T; ⾆.ꡈ5≯ß; [P1 V6]; [P1 V6] ++N; ⾆.ꡈ5≯ß; [P1 V6]; [P1 V6] ++T; ⾆.ꡈ5>\u0338ß; [P1 V6]; [P1 V6] ++N; ⾆.ꡈ5>\u0338ß; [P1 V6]; [P1 V6] ++T; 舌.ꡈ5≯ß; [P1 V6]; [P1 V6] ++N; 舌.ꡈ5≯ß; [P1 V6]; [P1 V6] ++T; 舌.ꡈ5>\u0338ß; [P1 V6]; [P1 V6] ++N; 舌.ꡈ5>\u0338ß; [P1 V6]; [P1 V6] ++B; 舌.ꡈ5>\u0338SS; [P1 V6]; [P1 V6] ++B; 舌.ꡈ5≯SS; [P1 V6]; [P1 V6] ++B; 舌.ꡈ5≯ss; [P1 V6]; [P1 V6] ++B; 舌.ꡈ5>\u0338ss; [P1 V6]; [P1 V6] ++B; 舌.ꡈ5>\u0338Ss; [P1 V6]; [P1 V6] ++B; 舌.ꡈ5≯Ss; [P1 V6]; [P1 V6] ++B; xn--tc1a.xn--5ss-3m2a5009e; [V6]; [V6] ++B; xn--tc1a.xn--5-qfa988w745i; [V6]; [V6] ++B; ⾆.ꡈ5>\u0338SS; [P1 V6]; [P1 V6] ++B; ⾆.ꡈ5≯SS; [P1 V6]; [P1 V6] ++B; ⾆.ꡈ5≯ss; [P1 V6]; [P1 V6] ++B; ⾆.ꡈ5>\u0338ss; [P1 V6]; [P1 V6] ++B; ⾆.ꡈ5>\u0338Ss; [P1 V6]; [P1 V6] ++B; ⾆.ꡈ5≯Ss; [P1 V6]; [P1 V6] ++T; \u0ACD8\u200D.򾂈\u075C; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ્8.ݜ ++N; \u0ACD8\u200D.򾂈\u075C; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # ્8.ݜ ++T; \u0ACD8\u200D.򾂈\u075C; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ્8.ݜ ++N; \u0ACD8\u200D.򾂈\u075C; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # ્8.ݜ ++B; xn--8-yke.xn--gpb79046m; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ્8.ݜ ++B; xn--8-yke534n.xn--gpb79046m; [B1 B5 B6 C2 V5 V6]; [B1 B5 B6 C2 V5 V6] # ્8.ݜ ++B; 򸷆\u0A70≮򹓙.񞎧⁷󠯙\u06B6; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ੰ≮.7ڶ ++B; 򸷆\u0A70<\u0338򹓙.񞎧⁷󠯙\u06B6; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ੰ≮.7ڶ ++B; 򸷆\u0A70≮򹓙.񞎧7󠯙\u06B6; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ੰ≮.7ڶ ++B; 򸷆\u0A70<\u0338򹓙.񞎧7󠯙\u06B6; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ੰ≮.7ڶ ++B; xn--ycc893jqh38rb6fa.xn--7-5uc53836ixt41c; [B5 B6 V6]; [B5 B6 V6] # ੰ≮.7ڶ ++T; 𞤪.ς; ; xn--ie6h.xn--4xa ++N; 𞤪.ς; ; xn--ie6h.xn--3xa ++B; 𞤈.Σ; 𞤪.σ; xn--ie6h.xn--4xa ++B; 𞤪.σ; ; xn--ie6h.xn--4xa ++B; 𞤈.σ; 𞤪.σ; xn--ie6h.xn--4xa ++B; xn--ie6h.xn--4xa; 𞤪.σ; xn--ie6h.xn--4xa ++T; 𞤈.ς; 𞤪.ς; xn--ie6h.xn--4xa ++N; 𞤈.ς; 𞤪.ς; xn--ie6h.xn--3xa ++B; xn--ie6h.xn--3xa; 𞤪.ς; xn--ie6h.xn--3xa ++B; 𞤪.Σ; 𞤪.σ; xn--ie6h.xn--4xa ++T; \u200CႺ。ς; [C1 P1 V6]; [P1 V6] # Ⴚ.ς ++N; \u200CႺ。ς; [C1 P1 V6]; [C1 P1 V6] # Ⴚ.ς ++T; \u200CႺ。ς; [C1 P1 V6]; [P1 V6] # Ⴚ.ς ++N; \u200CႺ。ς; [C1 P1 V6]; [C1 P1 V6] # Ⴚ.ς ++T; \u200Cⴚ。ς; [C1]; xn--ilj.xn--4xa # ⴚ.ς ++N; \u200Cⴚ。ς; [C1]; [C1] # ⴚ.ς ++T; \u200CႺ。Σ; [C1 P1 V6]; [P1 V6] # Ⴚ.σ ++N; \u200CႺ。Σ; [C1 P1 V6]; [C1 P1 V6] # Ⴚ.σ ++T; \u200Cⴚ。σ; [C1]; xn--ilj.xn--4xa # ⴚ.σ ++N; \u200Cⴚ。σ; [C1]; [C1] # ⴚ.σ ++B; xn--ilj.xn--4xa; ⴚ.σ; xn--ilj.xn--4xa ++B; ⴚ.σ; ; xn--ilj.xn--4xa ++B; Ⴚ.Σ; [P1 V6]; [P1 V6] ++T; ⴚ.ς; ; xn--ilj.xn--4xa ++N; ⴚ.ς; ; xn--ilj.xn--3xa ++T; Ⴚ.ς; [P1 V6]; [P1 V6] ++N; Ⴚ.ς; [P1 V6]; [P1 V6] ++B; xn--ynd.xn--4xa; [V6]; [V6] ++B; xn--ynd.xn--3xa; [V6]; [V6] ++B; xn--ilj.xn--3xa; ⴚ.ς; xn--ilj.xn--3xa ++B; Ⴚ.σ; [P1 V6]; [P1 V6] ++B; xn--0ug262c.xn--4xa; [C1]; [C1] # ⴚ.σ ++B; xn--ynd759e.xn--4xa; [C1 V6]; [C1 V6] # Ⴚ.σ ++B; xn--0ug262c.xn--3xa; [C1]; [C1] # ⴚ.ς ++B; xn--ynd759e.xn--3xa; [C1 V6]; [C1 V6] # Ⴚ.ς ++T; \u200Cⴚ。ς; [C1]; xn--ilj.xn--4xa # ⴚ.ς ++N; \u200Cⴚ。ς; [C1]; [C1] # ⴚ.ς ++T; \u200CႺ。Σ; [C1 P1 V6]; [P1 V6] # Ⴚ.σ ++N; \u200CႺ。Σ; [C1 P1 V6]; [C1 P1 V6] # Ⴚ.σ ++T; \u200Cⴚ。σ; [C1]; xn--ilj.xn--4xa # ⴚ.σ ++N; \u200Cⴚ。σ; [C1]; [C1] # ⴚ.σ ++B; 𞤃.𐹦; [B1]; [B1] ++B; 𞤃.𐹦; [B1]; [B1] ++B; 𞤥.𐹦; [B1]; [B1] ++B; xn--de6h.xn--eo0d; [B1]; [B1] ++B; 𞤥.𐹦; [B1]; [B1] ++T; \u200D⾕。\u200C\u0310\uA953ꡎ; [C1 C2]; [V5] # 谷.꥓̐ꡎ ++N; \u200D⾕。\u200C\u0310\uA953ꡎ; [C1 C2]; [C1 C2] # 谷.꥓̐ꡎ ++T; \u200D⾕。\u200C\uA953\u0310ꡎ; [C1 C2]; [V5] # 谷.꥓̐ꡎ ++N; \u200D⾕。\u200C\uA953\u0310ꡎ; [C1 C2]; [C1 C2] # 谷.꥓̐ꡎ ++T; \u200D谷。\u200C\uA953\u0310ꡎ; [C1 C2]; [V5] # 谷.꥓̐ꡎ ++N; \u200D谷。\u200C\uA953\u0310ꡎ; [C1 C2]; [C1 C2] # 谷.꥓̐ꡎ ++B; xn--6g3a.xn--0sa8175flwa; [V5]; [V5] # 谷.꥓̐ꡎ ++B; xn--1ug0273b.xn--0sa359l6n7g13a; [C1 C2]; [C1 C2] # 谷.꥓̐ꡎ ++T; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 ++N; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 ++T; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 ++N; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 ++T; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 ++N; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 ++T; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 ++N; \u06AA-뉔.𞤐\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 ++T; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 ++N; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 ++T; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 ++N; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 ++B; xn----guc3592k.xn--qe6h; [B2 B3]; [B2 B3] # ڪ-뉔.𞤲 ++B; xn----guc3592k.xn--0ug7611p; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 ++T; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 ++N; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 ++T; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3] # ڪ-뉔.𞤲 ++N; \u06AA-뉔.𞤲\u200C; [B2 B3 C1]; [B2 B3 C1] # ڪ-뉔.𞤲 ++T; 񔲵5ᦛς.\uA8C4\u077B\u1CD2\u0738; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ ++N; 񔲵5ᦛς.\uA8C4\u077B\u1CD2\u0738; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ ++T; 񔲵5ᦛς.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ ++N; 񔲵5ᦛς.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ ++T; 񔲵5ᦛς.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ ++N; 񔲵5ᦛς.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ ++B; 񔲵5ᦛΣ.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ ++B; 񔲵5ᦛσ.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ ++B; xn--5-0mb988ng603j.xn--fob7kk44dl41k; [B1 V5 V6]; [B1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ ++B; xn--5-ymb298ng603j.xn--fob7kk44dl41k; [B1 V5 V6]; [B1 V5 V6] # 5ᦛς.꣄ݻܸ᳒ ++B; 񔲵5ᦛΣ.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ ++B; 񔲵5ᦛσ.\uA8C4\u077B\u0738\u1CD2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ ++B; 񔲵5ᦛΣ.\uA8C4\u077B\u1CD2\u0738; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ ++B; 񔲵5ᦛσ.\uA8C4\u077B\u1CD2\u0738; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 5ᦛσ.꣄ݻܸ᳒ ++B; 淽。ᠾ; 淽.ᠾ; xn--34w.xn--x7e ++B; xn--34w.xn--x7e; 淽.ᠾ; xn--34w.xn--x7e ++B; 淽.ᠾ; ; xn--34w.xn--x7e ++B; 𐹴𑘷。-; [B1 V3]; [B1 V3] ++B; xn--so0do6k.-; [B1 V3]; [B1 V3] ++B; 򬨩Ⴓ❓。𑄨; [P1 V5 V6]; [P1 V5 V6] ++B; 򬨩Ⴓ❓。𑄨; [P1 V5 V6]; [P1 V5 V6] ++B; 򬨩ⴓ❓。𑄨; [P1 V5 V6]; [P1 V5 V6] ++B; xn--8di78qvw32y.xn--k80d; [V5 V6]; [V5 V6] ++B; xn--rnd896i0j14q.xn--k80d; [V5 V6]; [V5 V6] ++B; 򬨩ⴓ❓。𑄨; [P1 V5 V6]; [P1 V5 V6] ++T; \u200C𐹡𞤌Ⴇ。ßႣ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹡𞤮Ⴇ.ßႣ ++N; \u200C𐹡𞤌Ⴇ。ßႣ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹡𞤮Ⴇ.ßႣ ++T; \u200C𐹡𞤌Ⴇ。ßႣ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹡𞤮Ⴇ.ßႣ ++N; \u200C𐹡𞤌Ⴇ。ßႣ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹡𞤮Ⴇ.ßႣ ++T; \u200C𐹡𞤮ⴇ。ßⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ßⴃ ++N; \u200C𐹡𞤮ⴇ。ßⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ßⴃ ++T; \u200C𐹡𞤌Ⴇ。SSႣ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹡𞤮Ⴇ.ssႣ ++N; \u200C𐹡𞤌Ⴇ。SSႣ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹡𞤮Ⴇ.ssႣ ++T; \u200C𐹡𞤮ⴇ。ssⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ssⴃ ++N; \u200C𐹡𞤮ⴇ。ssⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ ++T; \u200C𐹡𞤌ⴇ。Ssⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ssⴃ ++N; \u200C𐹡𞤌ⴇ。Ssⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ ++B; xn--ykj9323eegwf.xn--ss-151a; [B1]; [B1] ++B; xn--0ug332c3q0pr56g.xn--ss-151a; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ ++B; xn--fnd1201kegrf.xn--ss-fek; [B1 V6]; [B1 V6] ++B; xn--fnd599eyj4pr50g.xn--ss-fek; [B1 C1 V6]; [B1 C1 V6] # 𐹡𞤮Ⴇ.ssႣ ++B; xn--0ug332c3q0pr56g.xn--zca417t; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ßⴃ ++B; xn--fnd599eyj4pr50g.xn--zca681f; [B1 C1 V6]; [B1 C1 V6] # 𐹡𞤮Ⴇ.ßႣ ++T; \u200C𐹡𞤮ⴇ。ßⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ßⴃ ++N; \u200C𐹡𞤮ⴇ。ßⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ßⴃ ++T; \u200C𐹡𞤌Ⴇ。SSႣ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹡𞤮Ⴇ.ssႣ ++N; \u200C𐹡𞤌Ⴇ。SSႣ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹡𞤮Ⴇ.ssႣ ++T; \u200C𐹡𞤮ⴇ。ssⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ssⴃ ++N; \u200C𐹡𞤮ⴇ。ssⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ ++T; \u200C𐹡𞤌ⴇ。Ssⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ssⴃ ++N; \u200C𐹡𞤌ⴇ。Ssⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ ++T; \u200C𐹡𞤌ⴇ。ßⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ßⴃ ++N; \u200C𐹡𞤌ⴇ。ßⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ßⴃ ++T; \u200C𐹡𞤌ⴇ。ssⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ssⴃ ++N; \u200C𐹡𞤌ⴇ。ssⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ ++T; \u200C𐹡𞤌Ⴇ。Ssⴃ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹡𞤮Ⴇ.ssⴃ ++N; \u200C𐹡𞤌Ⴇ。Ssⴃ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹡𞤮Ⴇ.ssⴃ ++B; xn--fnd1201kegrf.xn--ss-151a; [B1 V6]; [B1 V6] ++B; xn--fnd599eyj4pr50g.xn--ss-151a; [B1 C1 V6]; [B1 C1 V6] # 𐹡𞤮Ⴇ.ssⴃ ++T; \u200C𐹡𞤌ⴇ。ßⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ßⴃ ++N; \u200C𐹡𞤌ⴇ。ßⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ßⴃ ++T; \u200C𐹡𞤌ⴇ。ssⴃ; [B1 C1]; [B1] # 𐹡𞤮ⴇ.ssⴃ ++N; \u200C𐹡𞤌ⴇ。ssⴃ; [B1 C1]; [B1 C1] # 𐹡𞤮ⴇ.ssⴃ ++T; \u200C𐹡𞤌Ⴇ。Ssⴃ; [B1 C1 P1 V6]; [B1 P1 V6] # 𐹡𞤮Ⴇ.ssⴃ ++N; \u200C𐹡𞤌Ⴇ。Ssⴃ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹡𞤮Ⴇ.ssⴃ ++B; \u17FF。𞬳; [P1 V6]; [P1 V6] # . ++B; \u17FF。𞬳; [P1 V6]; [P1 V6] # . ++B; xn--45e.xn--et6h; [V6]; [V6] # . ++T; \u0652\u200D。\u0CCD𑚳; [C2 V5]; [V5] # ْ.್𑚳 ++N; \u0652\u200D。\u0CCD𑚳; [C2 V5]; [C2 V5] # ْ.್𑚳 ++T; \u0652\u200D。\u0CCD𑚳; [C2 V5]; [V5] # ْ.್𑚳 ++N; \u0652\u200D。\u0CCD𑚳; [C2 V5]; [C2 V5] # ْ.್𑚳 ++B; xn--uhb.xn--8tc4527k; [V5]; [V5] # ْ.್𑚳 ++B; xn--uhb882k.xn--8tc4527k; [C2 V5]; [C2 V5] # ْ.್𑚳 ++B; -≠ᠻ.\u076D𞥃≮󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ ++B; -=\u0338ᠻ.\u076D𞥃<\u0338󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ ++B; -≠ᠻ.\u076D𞥃≮󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ ++B; -=\u0338ᠻ.\u076D𞥃<\u0338󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ ++B; -=\u0338ᠻ.\u076D𞤡<\u0338󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ ++B; -≠ᠻ.\u076D𞤡≮󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ ++B; xn----g6j886c.xn--xpb049kk353abj99f; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # -≠ᠻ.ݭ𞥃≮ ++B; -=\u0338ᠻ.\u076D𞤡<\u0338󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ ++B; -≠ᠻ.\u076D𞤡≮󟷺; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # -≠ᠻ.ݭ𞥃≮ ++B; 󠰆≯\u07B5𐻪.򊥕≮𑁆\u084C; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ≯.≮𑁆ࡌ ++B; 󠰆>\u0338\u07B5𐻪.򊥕<\u0338𑁆\u084C; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ≯.≮𑁆ࡌ ++B; 󠰆≯\u07B5𐻪.򊥕≮𑁆\u084C; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ≯.≮𑁆ࡌ ++B; 󠰆>\u0338\u07B5𐻪.򊥕<\u0338𑁆\u084C; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ≯.≮𑁆ࡌ ++B; xn--zrb797kdm1oes34i.xn--bwb394k8k2o25n6d; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ≯.≮𑁆ࡌ ++B; ≠󦋂.\u0600\u0BCD-\u06B9; [B1 P1 V6]; [B1 P1 V6] # ≠.்-ڹ ++B; =\u0338󦋂.\u0600\u0BCD-\u06B9; [B1 P1 V6]; [B1 P1 V6] # ≠.்-ڹ ++B; xn--1ch22084l.xn----qkc07co6n; [B1 V6]; [B1 V6] # ≠.்-ڹ ++B; \u17DD󠁣≠。𐹼𐋤; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ៝≠.𐹼𐋤 ++B; \u17DD󠁣=\u0338。𐹼𐋤; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ៝≠.𐹼𐋤 ++B; \u17DD󠁣≠。𐹼𐋤; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ៝≠.𐹼𐋤 ++B; \u17DD󠁣=\u0338。𐹼𐋤; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ៝≠.𐹼𐋤 ++B; xn--54e694cn389z.xn--787ct8r; [B1 V5 V6]; [B1 V5 V6] # ៝≠.𐹼𐋤 ++T; ß𰀻񆬗。𝩨🕮ß; [P1 V5 V6]; [P1 V5 V6] ++N; ß𰀻񆬗。𝩨🕮ß; [P1 V5 V6]; [P1 V5 V6] ++T; ß𰀻񆬗。𝩨🕮ß; [P1 V5 V6]; [P1 V5 V6] ++N; ß𰀻񆬗。𝩨🕮ß; [P1 V5 V6]; [P1 V5 V6] ++B; SS𰀻񆬗。𝩨🕮SS; [P1 V5 V6]; [P1 V5 V6] ++B; ss𰀻񆬗。𝩨🕮ss; [P1 V5 V6]; [P1 V5 V6] ++B; Ss𰀻񆬗。𝩨🕮Ss; [P1 V5 V6]; [P1 V5 V6] ++B; xn--ss-jl59biy67d.xn--ss-4d11aw87d; [V5 V6]; [V5 V6] ++B; xn--zca20040bgrkh.xn--zca3653v86qa; [V5 V6]; [V5 V6] ++B; SS𰀻񆬗。𝩨🕮SS; [P1 V5 V6]; [P1 V5 V6] ++B; ss𰀻񆬗。𝩨🕮ss; [P1 V5 V6]; [P1 V5 V6] ++B; Ss𰀻񆬗。𝩨🕮Ss; [P1 V5 V6]; [P1 V5 V6] ++T; \u200D。\u200C; [C1 C2]; [A4_2] # . ++N; \u200D。\u200C; [C1 C2]; [C1 C2] # . ++B; xn--1ug.xn--0ug; [C1 C2]; [C1 C2] # . ++T; \u0483𐭞\u200D.\u17B9𞯌򟩚; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ҃𐭞.ឹ ++N; \u0483𐭞\u200D.\u17B9𞯌򟩚; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ҃𐭞.ឹ ++B; xn--m3a6965k.xn--43e8670vmd79b; [B1 V5 V6]; [B1 V5 V6] # ҃𐭞.ឹ ++B; xn--m3a412lrr0o.xn--43e8670vmd79b; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ҃𐭞.ឹ ++T; \u200C𐠨\u200C临。ꡢ򄷞ⶏ𐹣; [B1 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # 𐠨临.ꡢⶏ𐹣 ++N; \u200C𐠨\u200C临。ꡢ򄷞ⶏ𐹣; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # 𐠨临.ꡢⶏ𐹣 ++B; xn--miq9646b.xn--uojv340bk71c99u9f; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] ++B; xn--0uga2656aop9k.xn--uojv340bk71c99u9f; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # 𐠨临.ꡢⶏ𐹣 ++B; 󠑘.󠄮; [P1 V6]; [P1 V6] ++B; 󠑘.󠄮; [P1 V6]; [P1 V6] ++B; xn--s136e.; [V6]; [V6] ++B; 𐫄\u0D4D.\uAAF6; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐫄്.꫶ ++B; 𐫄\u0D4D.\uAAF6; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐫄്.꫶ ++B; xn--wxc7880k.xn--2v9a; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐫄്.꫶ ++B; \uA9B7󝵙멹。⒛󠨇; [P1 V5 V6]; [P1 V5 V6] # ꦷ멹.⒛ ++B; \uA9B7󝵙멹。⒛󠨇; [P1 V5 V6]; [P1 V5 V6] # ꦷ멹.⒛ ++B; \uA9B7󝵙멹。20.󠨇; [P1 V5 V6]; [P1 V5 V6] # ꦷ멹.20. ++B; \uA9B7󝵙멹。20.󠨇; [P1 V5 V6]; [P1 V5 V6] # ꦷ멹.20. ++B; xn--ym9av13acp85w.20.xn--d846e; [V5 V6]; [V5 V6] # ꦷ멹.20. ++B; xn--ym9av13acp85w.xn--dth22121k; [V5 V6]; [V5 V6] # ꦷ멹.⒛ ++B; Ⴅ󲬹릖󠶚.\u0777𐹳⒊; [B4 B6 P1 V6]; [B4 B6 P1 V6] # Ⴅ릖.ݷ𐹳⒊ ++B; Ⴅ󲬹릖󠶚.\u0777𐹳⒊; [B4 B6 P1 V6]; [B4 B6 P1 V6] # Ⴅ릖.ݷ𐹳⒊ ++B; Ⴅ󲬹릖󠶚.\u0777𐹳3.; [B4 B6 P1 V6]; [B4 B6 P1 V6] # Ⴅ릖.ݷ𐹳3. ++B; Ⴅ󲬹릖󠶚.\u0777𐹳3.; [B4 B6 P1 V6]; [B4 B6 P1 V6] # Ⴅ릖.ݷ𐹳3. ++B; ⴅ󲬹릖󠶚.\u0777𐹳3.; [B4 B6 P1 V6]; [B4 B6 P1 V6] # ⴅ릖.ݷ𐹳3. ++B; ⴅ󲬹릖󠶚.\u0777𐹳3.; [B4 B6 P1 V6]; [B4 B6 P1 V6] # ⴅ릖.ݷ𐹳3. ++B; xn--wkj8016bne45io02g.xn--3-55c6803r.; [B4 B6 V6]; [B4 B6 V6] # ⴅ릖.ݷ𐹳3. ++B; xn--dnd2167fnet0io02g.xn--3-55c6803r.; [B4 B6 V6]; [B4 B6 V6] # Ⴅ릖.ݷ𐹳3. ++B; ⴅ󲬹릖󠶚.\u0777𐹳⒊; [B4 B6 P1 V6]; [B4 B6 P1 V6] # ⴅ릖.ݷ𐹳⒊ ++B; ⴅ󲬹릖󠶚.\u0777𐹳⒊; [B4 B6 P1 V6]; [B4 B6 P1 V6] # ⴅ릖.ݷ𐹳⒊ ++B; xn--wkj8016bne45io02g.xn--7pb000mwm4n; [B4 B6 V6]; [B4 B6 V6] # ⴅ릖.ݷ𐹳⒊ ++B; xn--dnd2167fnet0io02g.xn--7pb000mwm4n; [B4 B6 V6]; [B4 B6 V6] # Ⴅ릖.ݷ𐹳⒊ ++T; \u200C。︒; [C1 P1 V6]; [P1 V6 A4_2] # .︒ ++N; \u200C。︒; [C1 P1 V6]; [C1 P1 V6] # .︒ ++T; \u200C。。; [C1 A4_2]; [A4_2] # .. ++N; \u200C。。; [C1 A4_2]; [C1 A4_2] # .. ++B; ..; [A4_2]; [A4_2] ++B; xn--0ug..; [C1 A4_2]; [C1 A4_2] # .. ++B; .xn--y86c; [V6 A4_2]; [V6 A4_2] ++B; xn--0ug.xn--y86c; [C1 V6]; [C1 V6] # .︒ ++B; ≯\u076D.₄; [B1 P1 V6]; [B1 P1 V6] # ≯ݭ.4 ++B; >\u0338\u076D.₄; [B1 P1 V6]; [B1 P1 V6] # ≯ݭ.4 ++B; ≯\u076D.4; [B1 P1 V6]; [B1 P1 V6] # ≯ݭ.4 ++B; >\u0338\u076D.4; [B1 P1 V6]; [B1 P1 V6] # ≯ݭ.4 ++B; xn--xpb149k.4; [B1 V6]; [B1 V6] # ≯ݭ.4 ++T; ᡲ-𝟹.ß-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ß-- ++N; ᡲ-𝟹.ß-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ß-- ++T; ᡲ-3.ß-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ß-- ++N; ᡲ-3.ß-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ß-- ++T; ᡲ-3.SS-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ss-- ++N; ᡲ-3.SS-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ss-- ++T; ᡲ-3.ss-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ss-- ++N; ᡲ-3.ss-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ss-- ++T; ᡲ-3.Ss-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ss-- ++N; ᡲ-3.Ss-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ss-- ++B; xn---3-p9o.ss--; [V2 V3]; [V2 V3] ++B; xn---3-p9o.xn--ss---276a; [C1 V3]; [C1 V3] # ᡲ-3.ss-- ++B; xn---3-p9o.xn-----fia9303a; [C1 V3]; [C1 V3] # ᡲ-3.ß-- ++T; ᡲ-𝟹.SS-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ss-- ++N; ᡲ-𝟹.SS-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ss-- ++T; ᡲ-𝟹.ss-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ss-- ++N; ᡲ-𝟹.ss-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ss-- ++T; ᡲ-𝟹.Ss-\u200C-; [C1 V3]; [V2 V3] # ᡲ-3.ss-- ++N; ᡲ-𝟹.Ss-\u200C-; [C1 V3]; [C1 V3] # ᡲ-3.ss-- ++B; \uFD08𝟦\u0647󎊯。Ӏ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ضي4ه.Ӏ ++B; \u0636\u064A4\u0647󎊯。Ӏ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ضي4ه.Ӏ ++B; \u0636\u064A4\u0647󎊯。ӏ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ضي4ه.ӏ ++B; xn--4-tnc6ck183523b.xn--s5a; [B2 B3 V6]; [B2 B3 V6] # ضي4ه.ӏ ++B; xn--4-tnc6ck183523b.xn--d5a; [B2 B3 V6]; [B2 B3 V6] # ضي4ه.Ӏ ++B; \uFD08𝟦\u0647󎊯。ӏ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ضي4ه.ӏ ++B; -.\u0602\u0622𑆾🐹; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.آ𑆾🐹 ++B; -.\u0602\u0627\u0653𑆾🐹; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.آ𑆾🐹 ++B; -.xn--kfb8dy983hgl7g; [B1 V3 V6]; [B1 V3 V6] # -.آ𑆾🐹 ++B; 󙶜ᢘ。\u1A7F⺢; [P1 V5 V6]; [P1 V5 V6] # ᢘ.᩿⺢ ++B; xn--ibf35138o.xn--fpfz94g; [V5 V6]; [V5 V6] # ᢘ.᩿⺢ ++B; ≠ႷᠤႫ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 ++B; =\u0338ႷᠤႫ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 ++B; ≠ႷᠤႫ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 ++B; =\u0338ႷᠤႫ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 ++B; =\u0338ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 ++B; ≠ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 ++B; ≠Ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 ++B; =\u0338Ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 ++B; xn--vnd619as6ig6k.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 ++B; XN--VND619AS6IG6K.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 ++B; Xn--Vnd619as6ig6k.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 ++B; xn--66e353ce0ilb.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 ++B; XN--66E353CE0ILB.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 ++B; Xn--66E353ce0ilb.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 ++B; xn--jndx718cnnl.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 ++B; XN--JNDX718CNNL.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 ++B; Xn--Jndx718cnnl.\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ႷᠤႫ.͌س觴 ++B; =\u0338ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 ++B; ≠ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠ⴗᠤⴋ.͌س觴 ++B; ≠Ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 ++B; =\u0338Ⴗᠤⴋ。\uD907\u034C\u0633觴; [B1 B5 P1 V6]; [B1 B5 P1 V6 A3] # ≠Ⴗᠤⴋ.͌س觴 ++B; \u0667.𐥨; [B1 P1 V6]; [B1 P1 V6] # ٧. ++B; xn--gib.xn--vm9c; [B1 V6]; [B1 V6] # ٧. ++T; \uA9C0𝟯。\u200D񼑥𐹪\u1BF3; [B1 C2 P1 V5 V6]; [B5 P1 V5 V6] # ꧀3.𐹪᯳ ++N; \uA9C0𝟯。\u200D񼑥𐹪\u1BF3; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ꧀3.𐹪᯳ ++T; \uA9C03。\u200D񼑥𐹪\u1BF3; [B1 C2 P1 V5 V6]; [B5 P1 V5 V6] # ꧀3.𐹪᯳ ++N; \uA9C03。\u200D񼑥𐹪\u1BF3; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ꧀3.𐹪᯳ ++B; xn--3-5z4e.xn--1zfz754hncv8b; [B5 V5 V6]; [B5 V5 V6] # ꧀3.𐹪᯳ ++B; xn--3-5z4e.xn--1zf96ony8ygd68c; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ꧀3.𐹪᯳ ++B; 򣕄4񠖽.≯\u0664𑀾󠸌; [B1 P1 V6]; [B1 P1 V6] # 4.≯٤𑀾 ++B; 򣕄4񠖽.>\u0338\u0664𑀾󠸌; [B1 P1 V6]; [B1 P1 V6] # 4.≯٤𑀾 ++B; xn--4-fg85dl688i.xn--dib174li86ntdy0i; [B1 V6]; [B1 V6] # 4.≯٤𑀾 ++B; 򗆧𝟯。⒈\u1A76𝟚򠘌; [P1 V6]; [P1 V6] # 3.⒈᩶2 ++B; 򗆧3。1.\u1A762򠘌; [P1 V5 V6]; [P1 V5 V6] # 3.1.᩶2 ++B; xn--3-rj42h.1.xn--2-13k96240l; [V5 V6]; [V5 V6] # 3.1.᩶2 ++B; xn--3-rj42h.xn--2-13k746cq465x; [V6]; [V6] # 3.⒈᩶2 ++T; \u200D₅⒈。≯𝟴\u200D; [C2 P1 V6]; [P1 V6] # 5⒈.≯8 ++N; \u200D₅⒈。≯𝟴\u200D; [C2 P1 V6]; [C2 P1 V6] # 5⒈.≯8 ++T; \u200D₅⒈。>\u0338𝟴\u200D; [C2 P1 V6]; [P1 V6] # 5⒈.≯8 ++N; \u200D₅⒈。>\u0338𝟴\u200D; [C2 P1 V6]; [C2 P1 V6] # 5⒈.≯8 ++T; \u200D51.。≯8\u200D; [C2 P1 V6 A4_2]; [P1 V6 A4_2] # 51..≯8 ++N; \u200D51.。≯8\u200D; [C2 P1 V6 A4_2]; [C2 P1 V6 A4_2] # 51..≯8 ++T; \u200D51.。>\u03388\u200D; [C2 P1 V6 A4_2]; [P1 V6 A4_2] # 51..≯8 ++N; \u200D51.。>\u03388\u200D; [C2 P1 V6 A4_2]; [C2 P1 V6 A4_2] # 51..≯8 ++B; 51..xn--8-ogo; [V6 A4_2]; [V6 A4_2] ++B; xn--51-l1t..xn--8-ugn00i; [C2 V6 A4_2]; [C2 V6 A4_2] # 51..≯8 ++B; xn--5-ecp.xn--8-ogo; [V6]; [V6] ++B; xn--5-tgnz5r.xn--8-ugn00i; [C2 V6]; [C2 V6] # 5⒈.≯8 ++T; ꡰ\u0697\u1086.򪘙\u072F≠\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ꡰڗႆ.ܯ≠ ++N; ꡰ\u0697\u1086.򪘙\u072F≠\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ꡰڗႆ.ܯ≠ ++T; ꡰ\u0697\u1086.򪘙\u072F=\u0338\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ꡰڗႆ.ܯ≠ ++N; ꡰ\u0697\u1086.򪘙\u072F=\u0338\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ꡰڗႆ.ܯ≠ ++T; ꡰ\u0697\u1086.򪘙\u072F≠\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ꡰڗႆ.ܯ≠ ++N; ꡰ\u0697\u1086.򪘙\u072F≠\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ꡰڗႆ.ܯ≠ ++T; ꡰ\u0697\u1086.򪘙\u072F=\u0338\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ꡰڗႆ.ܯ≠ ++N; ꡰ\u0697\u1086.򪘙\u072F=\u0338\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ꡰڗႆ.ܯ≠ ++B; xn--tjb002cn51k.xn--5nb630lbj91q; [B5 B6 V6]; [B5 B6 V6] # ꡰڗႆ.ܯ≠ ++B; xn--tjb002cn51k.xn--5nb448jcubcz547b; [B5 B6 C1 V6]; [B5 B6 C1 V6] # ꡰڗႆ.ܯ≠ ++B; 𑄱。򪌿𐹵; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] ++B; 𑄱。򪌿𐹵; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] ++B; xn--t80d.xn--to0d14792b; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] ++B; 𝟥\u0600。\u073D; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 3.ܽ ++B; 3\u0600。\u073D; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 3.ܽ ++B; xn--3-rkc.xn--kob; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # 3.ܽ ++B; \u0637𐹣\u0666.\u076D긷; [B2 B3]; [B2 B3] # ط𐹣٦.ݭ긷 ++B; \u0637𐹣\u0666.\u076D긷; [B2 B3]; [B2 B3] # ط𐹣٦.ݭ긷 ++B; xn--2gb8gu829f.xn--xpb0156f; [B2 B3]; [B2 B3] # ط𐹣٦.ݭ긷 ++B; ︒Ↄ\u2DE7򾀃.Ⴗ𐣞; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ︒Ↄⷧ.Ⴗ ++B; 。Ↄ\u2DE7򾀃.Ⴗ𐣞; [B5 B6 P1 V6 A4_2]; [B5 B6 P1 V6 A4_2] # .Ↄⷧ.Ⴗ ++B; 。ↄ\u2DE7򾀃.ⴗ𐣞; [B5 B6 P1 V6 A4_2]; [B5 B6 P1 V6 A4_2] # .ↄⷧ.ⴗ ++B; .xn--r5gy00cll06u.xn--flj4541e; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] # .ↄⷧ.ⴗ ++B; .xn--q5g000cll06u.xn--vnd8618j; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] # .Ↄⷧ.Ⴗ ++B; ︒ↄ\u2DE7򾀃.ⴗ𐣞; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ︒ↄⷧ.ⴗ ++B; xn--r5gy00c056n0226g.xn--flj4541e; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ︒ↄⷧ.ⴗ ++B; xn--q5g000c056n0226g.xn--vnd8618j; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ︒Ↄⷧ.Ⴗ ++B; \u0600.\u05B1; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # .ֱ ++B; xn--ifb.xn--8cb; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # .ֱ ++T; ς≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] ++N; ς≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] ++T; ς>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] ++N; ς>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] ++T; ς≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] ++N; ς≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] ++T; ς>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] ++N; ς>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] ++B; Σ>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] ++B; Σ≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] ++B; σ≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] ++B; σ>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] ++B; xn--4xa818m.xn--1o0d; [B1 B6 V6]; [B1 B6 V6] ++B; xn--3xa028m.xn--1o0d; [B1 B6 V6]; [B1 B6 V6] ++B; Σ>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] ++B; Σ≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] ++B; σ≯。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] ++B; σ>\u0338。𐹽; [B1 B6 P1 V6]; [B1 B6 P1 V6] ++T; \u17D2\u200D\u075F。𐹶; [B1 V5]; [B1 V5] # ្ݟ.𐹶 ++N; \u17D2\u200D\u075F。𐹶; [B1 V5]; [B1 V5] # ្ݟ.𐹶 ++B; xn--jpb535f.xn--uo0d; [B1 V5]; [B1 V5] # ្ݟ.𐹶 ++B; xn--jpb535fv9f.xn--uo0d; [B1 V5]; [B1 V5] # ្ݟ.𐹶 ++B; 𾷂\u0A42Ⴊ񂂟.≮; [P1 V6]; [P1 V6] # ੂႪ.≮ ++B; 𾷂\u0A42Ⴊ񂂟.<\u0338; [P1 V6]; [P1 V6] # ੂႪ.≮ ++B; 𾷂\u0A42ⴊ񂂟.<\u0338; [P1 V6]; [P1 V6] # ੂⴊ.≮ ++B; 𾷂\u0A42ⴊ񂂟.≮; [P1 V6]; [P1 V6] # ੂⴊ.≮ ++B; xn--nbc229o4y27dgskb.xn--gdh; [V6]; [V6] # ੂⴊ.≮ ++B; xn--nbc493aro75ggskb.xn--gdh; [V6]; [V6] # ੂႪ.≮ ++B; ꡠ.۲; ꡠ.۲; xn--5c9a.xn--fmb ++B; ꡠ.۲; ; xn--5c9a.xn--fmb ++B; xn--5c9a.xn--fmb; ꡠ.۲; xn--5c9a.xn--fmb ++B; 𐹣񄷄。ꡬ🄄; [B1 P1 V6]; [B1 P1 V6] ++B; 𐹣񄷄。ꡬ3,; [B1 B6 P1 V6]; [B1 B6 P1 V6] ++B; xn--bo0d0203l.xn--3,-yj9h; [B1 B6 P1 V6]; [B1 B6 P1 V6] ++B; xn--bo0d0203l.xn--id9a4443d; [B1 V6]; [B1 V6] ++T; -\u0C4D𞾀𑲓。\u200D\u0D4D; [B1 C2 P1 V3 V6]; [B1 B3 B6 P1 V3 V5 V6] # -్𑲓.് ++N; -\u0C4D𞾀𑲓。\u200D\u0D4D; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # -్𑲓.് ++T; -\u0C4D𞾀𑲓。\u200D\u0D4D; [B1 C2 P1 V3 V6]; [B1 B3 B6 P1 V3 V5 V6] # -్𑲓.് ++N; -\u0C4D𞾀𑲓。\u200D\u0D4D; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # -్𑲓.് ++B; xn----x6e0220sclug.xn--wxc; [B1 B3 B6 V3 V5 V6]; [B1 B3 B6 V3 V5 V6] # -్𑲓.് ++B; xn----x6e0220sclug.xn--wxc317g; [B1 C2 V3 V6]; [B1 C2 V3 V6] # -్𑲓.് ++T; \uA67D\u200C霣🄆。\u200C𑁂\u1B01; [C1 P1 V5 V6]; [P1 V5 V6] # ꙽霣🄆.𑁂ᬁ ++N; \uA67D\u200C霣🄆。\u200C𑁂\u1B01; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ꙽霣🄆.𑁂ᬁ ++T; \uA67D\u200C霣🄆。\u200C𑁂\u1B01; [C1 P1 V5 V6]; [P1 V5 V6] # ꙽霣🄆.𑁂ᬁ ++N; \uA67D\u200C霣🄆。\u200C𑁂\u1B01; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ꙽霣🄆.𑁂ᬁ ++T; \uA67D\u200C霣5,。\u200C𑁂\u1B01; [C1 P1 V5 V6]; [P1 V5 V6] # ꙽霣5,.𑁂ᬁ ++N; \uA67D\u200C霣5,。\u200C𑁂\u1B01; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ꙽霣5,.𑁂ᬁ ++B; xn--5,-op8g373c.xn--4sf0725i; [P1 V5 V6]; [P1 V5 V6] # ꙽霣5,.𑁂ᬁ ++B; xn--5,-i1tz135dnbqa.xn--4sf36u6u4w; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ꙽霣5,.𑁂ᬁ ++B; xn--2q5a751a653w.xn--4sf0725i; [V5 V6]; [V5 V6] # ꙽霣🄆.𑁂ᬁ ++B; xn--0ug4208b2vjuk63a.xn--4sf36u6u4w; [C1 V5 V6]; [C1 V5 V6] # ꙽霣🄆.𑁂ᬁ ++B; 兎。ᠼ󠴜𑚶𑰿; [P1 V6]; [P1 V6] ++B; 兎。ᠼ󠴜𑚶𑰿; [P1 V6]; [P1 V6] ++B; xn--b5q.xn--v7e6041kqqd4m251b; [V6]; [V6] ++T; 𝟙。\u200D𝟸\u200D⁷; [C2]; 1.27 # 1.27 ++N; 𝟙。\u200D𝟸\u200D⁷; [C2]; [C2] # 1.27 ++T; 1。\u200D2\u200D7; [C2]; 1.27 # 1.27 ++N; 1。\u200D2\u200D7; [C2]; [C2] # 1.27 ++B; 1.27; ; ++B; 1.xn--27-l1tb; [C2]; [C2] # 1.27 ++B; ᡨ-。󠻋𝟷; [P1 V3 V6]; [P1 V3 V6] ++B; ᡨ-。󠻋1; [P1 V3 V6]; [P1 V3 V6] ++B; xn----z8j.xn--1-5671m; [V3 V6]; [V3 V6] ++B; 𑰻񵀐𐫚.\u0668⁹; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𑰻𐫚.٨9 ++B; 𑰻񵀐𐫚.\u06689; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𑰻𐫚.٨9 ++B; xn--gx9cr01aul57i.xn--9-oqc; [B1 V5 V6]; [B1 V5 V6] # 𑰻𐫚.٨9 ++T; Ⴜ򈷭\u0F80⾇。Ⴏ♀\u200C\u200C; [C1 P1 V6]; [P1 V6] # Ⴜྀ舛.Ⴏ♀ ++N; Ⴜ򈷭\u0F80⾇。Ⴏ♀\u200C\u200C; [C1 P1 V6]; [C1 P1 V6] # Ⴜྀ舛.Ⴏ♀ ++T; Ⴜ򈷭\u0F80舛。Ⴏ♀\u200C\u200C; [C1 P1 V6]; [P1 V6] # Ⴜྀ舛.Ⴏ♀ ++N; Ⴜ򈷭\u0F80舛。Ⴏ♀\u200C\u200C; [C1 P1 V6]; [C1 P1 V6] # Ⴜྀ舛.Ⴏ♀ ++T; ⴜ򈷭\u0F80舛。ⴏ♀\u200C\u200C; [C1 P1 V6]; [P1 V6] # ⴜྀ舛.ⴏ♀ ++N; ⴜ򈷭\u0F80舛。ⴏ♀\u200C\u200C; [C1 P1 V6]; [C1 P1 V6] # ⴜྀ舛.ⴏ♀ ++B; xn--zed372mdj2do3v4h.xn--e5h11w; [V6]; [V6] # ⴜྀ舛.ⴏ♀ ++B; xn--zed372mdj2do3v4h.xn--0uga678bgyh; [C1 V6]; [C1 V6] # ⴜྀ舛.ⴏ♀ ++B; xn--zed54dz10wo343g.xn--nnd651i; [V6]; [V6] # Ⴜྀ舛.Ⴏ♀ ++B; xn--zed54dz10wo343g.xn--nnd089ea464d; [C1 V6]; [C1 V6] # Ⴜྀ舛.Ⴏ♀ ++T; ⴜ򈷭\u0F80⾇。ⴏ♀\u200C\u200C; [C1 P1 V6]; [P1 V6] # ⴜྀ舛.ⴏ♀ ++N; ⴜ򈷭\u0F80⾇。ⴏ♀\u200C\u200C; [C1 P1 V6]; [C1 P1 V6] # ⴜྀ舛.ⴏ♀ ++T; 𑁆𝟰.\u200D; [C2 V5]; [V5] # 𑁆4. ++N; 𑁆𝟰.\u200D; [C2 V5]; [C2 V5] # 𑁆4. ++T; 𑁆4.\u200D; [C2 V5]; [V5] # 𑁆4. ++N; 𑁆4.\u200D; [C2 V5]; [C2 V5] # 𑁆4. ++B; xn--4-xu7i.; [V5]; [V5] ++B; xn--4-xu7i.xn--1ug; [C2 V5]; [C2 V5] # 𑁆4. ++T; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # Ⴞ癀.𑘿붼 ++N; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # Ⴞ癀.𑘿붼 ++T; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # Ⴞ癀.𑘿붼 ++N; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # Ⴞ癀.𑘿붼 ++T; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # Ⴞ癀.𑘿붼 ++N; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # Ⴞ癀.𑘿붼 ++T; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # Ⴞ癀.𑘿붼 ++N; 񮴘Ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # Ⴞ癀.𑘿붼 ++T; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # ⴞ癀.𑘿붼 ++N; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⴞ癀.𑘿붼 ++T; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # ⴞ癀.𑘿붼 ++N; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⴞ癀.𑘿붼 ++B; xn--mlju35u7qx2f.xn--et3bn23n; [V5 V6]; [V5 V6] ++B; xn--mlju35u7qx2f.xn--0ugb6122js83c; [C1 V5 V6]; [C1 V5 V6] # ⴞ癀.𑘿붼 ++B; xn--2nd6803c7q37d.xn--et3bn23n; [V5 V6]; [V5 V6] ++B; xn--2nd6803c7q37d.xn--0ugb6122js83c; [C1 V5 V6]; [C1 V5 V6] # Ⴞ癀.𑘿붼 ++T; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # ⴞ癀.𑘿붼 ++N; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⴞ癀.𑘿붼 ++T; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [P1 V5 V6] # ⴞ癀.𑘿붼 ++N; 񮴘ⴞ癀。𑘿\u200D\u200C붼; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⴞ癀.𑘿붼 ++B; 󚀅-\u0BCD。\u06B9; [B6 P1 V6]; [B6 P1 V6] # -்.ڹ ++B; xn----mze84808x.xn--skb; [B6 V6]; [B6 V6] # -்.ڹ ++B; ᡃ𝟧≯ᠣ.氁񨏱ꁫ; [P1 V6]; [P1 V6] ++B; ᡃ𝟧>\u0338ᠣ.氁񨏱ꁫ; [P1 V6]; [P1 V6] ++B; ᡃ5≯ᠣ.氁񨏱ꁫ; [P1 V6]; [P1 V6] ++B; ᡃ5>\u0338ᠣ.氁񨏱ꁫ; [P1 V6]; [P1 V6] ++B; xn--5-24jyf768b.xn--lqw213ime95g; [V6]; [V6] ++B; 𐹬𝩇.\u0F76; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐹬𝩇.ྲྀ ++B; 𐹬𝩇.\u0FB2\u0F80; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐹬𝩇.ྲྀ ++B; 𐹬𝩇.\u0FB2\u0F80; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐹬𝩇.ྲྀ ++B; xn--ko0d8295a.xn--zed3h; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𐹬𝩇.ྲྀ ++B; -𑈶⒏.⒎𰛢󠎭; [P1 V3 V6]; [P1 V3 V6] ++B; -𑈶8..7.𰛢󠎭; [P1 V3 V6 A4_2]; [P1 V3 V6 A4_2] ++B; xn---8-bv5o..7.xn--c35nf1622b; [V3 V6 A4_2]; [V3 V6 A4_2] ++B; xn----scp6252h.xn--zshy411yzpx2d; [V3 V6]; [V3 V6] ++T; \u200CႡ畝\u200D.≮; [C1 C2 P1 V6]; [P1 V6] # Ⴁ畝.≮ ++N; \u200CႡ畝\u200D.≮; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⴁ畝.≮ ++T; \u200CႡ畝\u200D.<\u0338; [C1 C2 P1 V6]; [P1 V6] # Ⴁ畝.≮ ++N; \u200CႡ畝\u200D.<\u0338; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⴁ畝.≮ ++T; \u200CႡ畝\u200D.≮; [C1 C2 P1 V6]; [P1 V6] # Ⴁ畝.≮ ++N; \u200CႡ畝\u200D.≮; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⴁ畝.≮ ++T; \u200CႡ畝\u200D.<\u0338; [C1 C2 P1 V6]; [P1 V6] # Ⴁ畝.≮ ++N; \u200CႡ畝\u200D.<\u0338; [C1 C2 P1 V6]; [C1 C2 P1 V6] # Ⴁ畝.≮ ++T; \u200Cⴁ畝\u200D.<\u0338; [C1 C2 P1 V6]; [P1 V6] # ⴁ畝.≮ ++N; \u200Cⴁ畝\u200D.<\u0338; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⴁ畝.≮ ++T; \u200Cⴁ畝\u200D.≮; [C1 C2 P1 V6]; [P1 V6] # ⴁ畝.≮ ++N; \u200Cⴁ畝\u200D.≮; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⴁ畝.≮ ++B; xn--skjy82u.xn--gdh; [V6]; [V6] ++B; xn--0ugc160hb36e.xn--gdh; [C1 C2 V6]; [C1 C2 V6] # ⴁ畝.≮ ++B; xn--8md0962c.xn--gdh; [V6]; [V6] ++B; xn--8md700fea3748f.xn--gdh; [C1 C2 V6]; [C1 C2 V6] # Ⴁ畝.≮ ++T; \u200Cⴁ畝\u200D.<\u0338; [C1 C2 P1 V6]; [P1 V6] # ⴁ畝.≮ ++N; \u200Cⴁ畝\u200D.<\u0338; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⴁ畝.≮ ++T; \u200Cⴁ畝\u200D.≮; [C1 C2 P1 V6]; [P1 V6] # ⴁ畝.≮ ++N; \u200Cⴁ畝\u200D.≮; [C1 C2 P1 V6]; [C1 C2 P1 V6] # ⴁ畝.≮ ++T; 歷。𐹻≯󳛽\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # 歷.𐹻≯ ++N; 歷。𐹻≯󳛽\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 歷.𐹻≯ ++T; 歷。𐹻>\u0338󳛽\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # 歷.𐹻≯ ++N; 歷。𐹻>\u0338󳛽\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 歷.𐹻≯ ++T; 歷。𐹻≯󳛽\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # 歷.𐹻≯ ++N; 歷。𐹻≯󳛽\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 歷.𐹻≯ ++T; 歷。𐹻>\u0338󳛽\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # 歷.𐹻≯ ++N; 歷。𐹻>\u0338󳛽\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 歷.𐹻≯ ++B; xn--nmw.xn--hdh7804gdms2h; [B1 V6]; [B1 V6] ++B; xn--nmw.xn--1ugx6gs128a1134j; [B1 C2 V6]; [B1 C2 V6] # 歷.𐹻≯ ++T; \u0ECB\u200D.鎁󠰑; [C2 P1 V5 V6]; [P1 V5 V6] # ໋.鎁 ++N; \u0ECB\u200D.鎁󠰑; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ໋.鎁 ++T; \u0ECB\u200D.鎁󠰑; [C2 P1 V5 V6]; [P1 V5 V6] # ໋.鎁 ++N; \u0ECB\u200D.鎁󠰑; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ໋.鎁 ++B; xn--t8c.xn--iz4a43209d; [V5 V6]; [V5 V6] # ໋.鎁 ++B; xn--t8c059f.xn--iz4a43209d; [C2 V5 V6]; [C2 V5 V6] # ໋.鎁 ++T; \u200D\u200C𞤀。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # 𞤢. ++N; \u200D\u200C𞤀。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # 𞤢. ++T; \u200D\u200C𞤀。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # 𞤢. ++N; \u200D\u200C𞤀。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # 𞤢. ++T; \u200D\u200C𞤢。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # 𞤢. ++N; \u200D\u200C𞤢。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # 𞤢. ++B; xn--9d6h.xn--wh0dj799f; [B5 B6 V6]; [B5 B6 V6] ++B; xn--0ugb45126a.xn--wh0dj799f; [B1 B5 B6 C1 C2 V6]; [B1 B5 B6 C1 C2 V6] # 𞤢. ++T; \u200D\u200C𞤢。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B5 B6 P1 V6] # 𞤢. ++N; \u200D\u200C𞤢。𱘅𐶃; [B1 B5 B6 C1 C2 P1 V6]; [B1 B5 B6 C1 C2 P1 V6] # 𞤢. ++T; \u0628≠𝟫-.ς⒍𐹦≠; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.ς⒍𐹦≠ ++N; \u0628≠𝟫-.ς⒍𐹦≠; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.ς⒍𐹦≠ ++T; \u0628=\u0338𝟫-.ς⒍𐹦=\u0338; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.ς⒍𐹦≠ ++N; \u0628=\u0338𝟫-.ς⒍𐹦=\u0338; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.ς⒍𐹦≠ ++T; \u0628≠9-.ς6.𐹦≠; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.ς6.𐹦≠ ++N; \u0628≠9-.ς6.𐹦≠; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.ς6.𐹦≠ ++T; \u0628=\u03389-.ς6.𐹦=\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.ς6.𐹦≠ ++N; \u0628=\u03389-.ς6.𐹦=\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.ς6.𐹦≠ ++B; \u0628=\u03389-.Σ6.𐹦=\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.σ6.𐹦≠ ++B; \u0628≠9-.Σ6.𐹦≠; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.σ6.𐹦≠ ++B; \u0628≠9-.σ6.𐹦≠; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.σ6.𐹦≠ ++B; \u0628=\u03389-.σ6.𐹦=\u0338; [B1 B3 P1 V3 V6]; [B1 B3 P1 V3 V6] # ب≠9-.σ6.𐹦≠ ++B; xn--9--etd0100a.xn--6-zmb.xn--1ch8704g; [B1 B3 V3 V6]; [B1 B3 V3 V6] # ب≠9-.σ6.𐹦≠ ++B; xn--9--etd0100a.xn--6-xmb.xn--1ch8704g; [B1 B3 V3 V6]; [B1 B3 V3 V6] # ب≠9-.ς6.𐹦≠ ++B; \u0628=\u0338𝟫-.Σ⒍𐹦=\u0338; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.σ⒍𐹦≠ ++B; \u0628≠𝟫-.Σ⒍𐹦≠; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.σ⒍𐹦≠ ++B; \u0628≠𝟫-.σ⒍𐹦≠; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.σ⒍𐹦≠ ++B; \u0628=\u0338𝟫-.σ⒍𐹦=\u0338; [B3 B5 B6 P1 V3 V6]; [B3 B5 B6 P1 V3 V6] # ب≠9-.σ⒍𐹦≠ ++B; xn--9--etd0100a.xn--4xa887mzpbzz04b; [B3 B5 B6 V3 V6]; [B3 B5 B6 V3 V6] # ب≠9-.σ⒍𐹦≠ ++B; xn--9--etd0100a.xn--3xa097mzpbzz04b; [B3 B5 B6 V3 V6]; [B3 B5 B6 V3 V6] # ب≠9-.ς⒍𐹦≠ ++B; 򉛴.-ᡢ\u0592𝨠; [P1 V3 V6]; [P1 V3 V6] # .-ᡢ֒𝨠 ++B; xn--ep37b.xn----hec165lho83b; [V3 V6]; [V3 V6] # .-ᡢ֒𝨠 ++T; \u06CB⒈ß󠄽。񷋍-; [B2 B3 B6 P1 V3 V6]; [B2 B3 B6 P1 V3 V6] # ۋ⒈ß.- ++N; \u06CB⒈ß󠄽。񷋍-; [B2 B3 B6 P1 V3 V6]; [B2 B3 B6 P1 V3 V6] # ۋ⒈ß.- ++T; \u06CB1.ß󠄽。񷋍-; [B6 P1 V3 V6]; [B6 P1 V3 V6] # ۋ1.ß.- ++N; \u06CB1.ß󠄽。񷋍-; [B6 P1 V3 V6]; [B6 P1 V3 V6] # ۋ1.ß.- ++B; \u06CB1.SS󠄽。񷋍-; [B6 P1 V3 V6]; [B6 P1 V3 V6] # ۋ1.ss.- ++B; \u06CB1.ss󠄽。񷋍-; [B6 P1 V3 V6]; [B6 P1 V3 V6] # ۋ1.ss.- ++B; \u06CB1.Ss󠄽。񷋍-; [B6 P1 V3 V6]; [B6 P1 V3 V6] # ۋ1.ss.- ++B; xn--1-cwc.ss.xn----q001f; [B6 V3 V6]; [B6 V3 V6] # ۋ1.ss.- ++B; xn--1-cwc.xn--zca.xn----q001f; [B6 V3 V6]; [B6 V3 V6] # ۋ1.ß.- ++B; \u06CB⒈SS󠄽。񷋍-; [B2 B3 B6 P1 V3 V6]; [B2 B3 B6 P1 V3 V6] # ۋ⒈ss.- ++B; \u06CB⒈ss󠄽。񷋍-; [B2 B3 B6 P1 V3 V6]; [B2 B3 B6 P1 V3 V6] # ۋ⒈ss.- ++B; \u06CB⒈Ss󠄽。񷋍-; [B2 B3 B6 P1 V3 V6]; [B2 B3 B6 P1 V3 V6] # ۋ⒈ss.- ++B; xn--ss-d7d6651a.xn----q001f; [B2 B3 B6 V3 V6]; [B2 B3 B6 V3 V6] # ۋ⒈ss.- ++B; xn--zca541ato3a.xn----q001f; [B2 B3 B6 V3 V6]; [B2 B3 B6 V3 V6] # ۋ⒈ß.- ++T; 𿀫.\u1BAAςႦ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪ςႦ ++N; 𿀫.\u1BAAςႦ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪ςႦ ++T; 𿀫.\u1BAAςႦ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪ςႦ ++N; 𿀫.\u1BAAςႦ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪ςႦ ++T; 𿀫.\u1BAAςⴆ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪ςⴆ ++N; 𿀫.\u1BAAςⴆ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪ςⴆ ++T; 𿀫.\u1BAAΣႦ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪σႦ ++N; 𿀫.\u1BAAΣႦ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪σႦ ++T; 𿀫.\u1BAAσⴆ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪σⴆ ++N; 𿀫.\u1BAAσⴆ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪σⴆ ++T; 𿀫.\u1BAAΣⴆ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪σⴆ ++N; 𿀫.\u1BAAΣⴆ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪σⴆ ++B; xn--nu4s.xn--4xa153j7im; [V5 V6]; [V5 V6] # .᮪σⴆ ++B; xn--nu4s.xn--4xa153jk8cs1q; [C2 V5 V6]; [C2 V5 V6] # .᮪σⴆ ++B; xn--nu4s.xn--4xa217dxri; [V5 V6]; [V5 V6] # .᮪σႦ ++B; xn--nu4s.xn--4xa217dxriome; [C2 V5 V6]; [C2 V5 V6] # .᮪σႦ ++B; xn--nu4s.xn--3xa353jk8cs1q; [C2 V5 V6]; [C2 V5 V6] # .᮪ςⴆ ++B; xn--nu4s.xn--3xa417dxriome; [C2 V5 V6]; [C2 V5 V6] # .᮪ςႦ ++T; 𿀫.\u1BAAςⴆ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪ςⴆ ++N; 𿀫.\u1BAAςⴆ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪ςⴆ ++T; 𿀫.\u1BAAΣႦ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪σႦ ++N; 𿀫.\u1BAAΣႦ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪σႦ ++T; 𿀫.\u1BAAσⴆ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪σⴆ ++N; 𿀫.\u1BAAσⴆ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪σⴆ ++T; 𿀫.\u1BAAΣⴆ\u200D; [C2 P1 V5 V6]; [P1 V5 V6] # .᮪σⴆ ++N; 𿀫.\u1BAAΣⴆ\u200D; [C2 P1 V5 V6]; [C2 P1 V5 V6] # .᮪σⴆ ++B; ⾆\u08E2.𝈴; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 舌.𝈴 ++B; 舌\u08E2.𝈴; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 舌.𝈴 ++B; xn--l0b9413d.xn--kl1h; [B1 B5 B6 V6]; [B1 B5 B6 V6] # 舌.𝈴 ++B; ⫞𐹶𖫴。⭠⒈; [B1 P1 V6]; [B1 P1 V6] ++B; ⫞𐹶𖫴。⭠1.; [B1]; [B1] ++B; xn--53ix188et88b.xn--1-h6r.; [B1]; [B1] ++B; xn--53ix188et88b.xn--tsh52w; [B1 V6]; [B1 V6] ++T; ⒈\u200C\uAAEC︒.\u0ACD; [C1 P1 V5 V6]; [P1 V5 V6] # ⒈ꫬ︒.્ ++N; ⒈\u200C\uAAEC︒.\u0ACD; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⒈ꫬ︒.્ ++T; 1.\u200C\uAAEC。.\u0ACD; [C1 V5 A4_2]; [V5 A4_2] # 1.ꫬ..્ ++N; 1.\u200C\uAAEC。.\u0ACD; [C1 V5 A4_2]; [C1 V5 A4_2] # 1.ꫬ..્ ++B; 1.xn--sv9a..xn--mfc; [V5 A4_2]; [V5 A4_2] # 1.ꫬ..્ ++B; 1.xn--0ug7185c..xn--mfc; [C1 V5 A4_2]; [C1 V5 A4_2] # 1.ꫬ..્ ++B; xn--tsh0720cse8b.xn--mfc; [V5 V6]; [V5 V6] # ⒈ꫬ︒.્ ++B; xn--0ug78o720myr1c.xn--mfc; [C1 V5 V6]; [C1 V5 V6] # ⒈ꫬ︒.્ ++B; \u0C46。䰀\u0668𞭅󠅼; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ె.䰀٨ ++B; xn--eqc.xn--hib5476aim6t; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # ె.䰀٨ ++T; ß\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [P1 V5 V6] # ß.᯲ ++N; ß\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ß.᯲ ++T; SS\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [P1 V5 V6] # ss.᯲ ++N; SS\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ss.᯲ ++T; ss\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [P1 V5 V6] # ss.᯲ ++N; ss\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ss.᯲ ++T; Ss\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [P1 V5 V6] # ss.᯲ ++N; Ss\u200D.\u1BF2񄾼; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ss.᯲ ++B; ss.xn--0zf22107b; [V5 V6]; [V5 V6] # ss.᯲ ++B; xn--ss-n1t.xn--0zf22107b; [C2 V5 V6]; [C2 V5 V6] # ss.᯲ ++B; xn--zca870n.xn--0zf22107b; [C2 V5 V6]; [C2 V5 V6] # ß.᯲ ++T; 𑓂\u200C≮.≮; [P1 V5 V6]; [P1 V5 V6] # 𑓂≮.≮ ++N; 𑓂\u200C≮.≮; [P1 V5 V6]; [P1 V5 V6] # 𑓂≮.≮ ++T; 𑓂\u200C<\u0338.<\u0338; [P1 V5 V6]; [P1 V5 V6] # 𑓂≮.≮ ++N; 𑓂\u200C<\u0338.<\u0338; [P1 V5 V6]; [P1 V5 V6] # 𑓂≮.≮ ++B; xn--gdhz656g.xn--gdh; [V5 V6]; [V5 V6] ++B; xn--0ugy6glz29a.xn--gdh; [V5 V6]; [V5 V6] # 𑓂≮.≮ ++B; 🕼.\uFFA0; [P1 V6]; [P1 V6] # 🕼. ++B; 🕼.\u1160; [P1 V6]; [P1 V6] # 🕼. ++B; xn--my8h.xn--psd; [V6]; [V6] # 🕼. ++B; xn--my8h.xn--cl7c; [V6]; [V6] # 🕼. ++B; ᡔ\uFD82。񷘎; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ᡔلحى. ++B; ᡔ\u0644\u062D\u0649。񷘎; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ᡔلحى. ++B; xn--sgb9bq785p.xn--bc31b; [B5 B6 V6]; [B5 B6 V6] # ᡔلحى. ++B; 爕򳙑.𝟰気; [P1 V6]; [P1 V6] ++B; 爕򳙑.4気; [P1 V6]; [P1 V6] ++B; xn--1zxq3199c.xn--4-678b; [V6]; [V6] ++B; ⒋𑍍Ⴝ-.𞬪\u0DCA\u05B5; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ⒋𑍍Ⴝ-.්ֵ ++B; 4.𑍍Ⴝ-.𞬪\u0DCA\u05B5; [B1 B6 P1 V3 V5 V6]; [B1 B6 P1 V3 V5 V6] # 4.𑍍Ⴝ-.්ֵ ++B; 4.𑍍ⴝ-.𞬪\u0DCA\u05B5; [B1 B6 P1 V3 V5 V6]; [B1 B6 P1 V3 V5 V6] # 4.𑍍ⴝ-.්ֵ ++B; 4.xn----wwsx259f.xn--ddb152b7y23b; [B1 B6 V3 V5 V6]; [B1 B6 V3 V5 V6] # 4.𑍍ⴝ-.්ֵ ++B; 4.xn----t1g9869q.xn--ddb152b7y23b; [B1 B6 V3 V5 V6]; [B1 B6 V3 V5 V6] # 4.𑍍Ⴝ-.්ֵ ++B; ⒋𑍍ⴝ-.𞬪\u0DCA\u05B5; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ⒋𑍍ⴝ-.්ֵ ++B; xn----jcp487avl3w.xn--ddb152b7y23b; [B1 V3 V6]; [B1 V3 V6] # ⒋𑍍ⴝ-.්ֵ ++B; xn----t1g323mnk9t.xn--ddb152b7y23b; [B1 V3 V6]; [B1 V3 V6] # ⒋𑍍Ⴝ-.්ֵ ++B; 󞝃。򑆃񉢗--; [P1 V2 V3 V6]; [P1 V2 V3 V6] ++B; xn--2y75e.xn-----1l15eer88n; [V2 V3 V6]; [V2 V3 V6] ++T; \u200D\u07DF。\u200C\uABED; [B1 C1 C2]; [B1 B3 B6 V5] # ߟ.꯭ ++N; \u200D\u07DF。\u200C\uABED; [B1 C1 C2]; [B1 C1 C2] # ߟ.꯭ ++T; \u200D\u07DF。\u200C\uABED; [B1 C1 C2]; [B1 B3 B6 V5] # ߟ.꯭ ++N; \u200D\u07DF。\u200C\uABED; [B1 C1 C2]; [B1 C1 C2] # ߟ.꯭ ++B; xn--6sb.xn--429a; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ߟ.꯭ ++B; xn--6sb394j.xn--0ug1126c; [B1 C1 C2]; [B1 C1 C2] # ߟ.꯭ ++B; 𞮽\u07FF\u084E。ᢍ򝹁𐫘; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡎ.ᢍ𐫘 ++B; 𞮽\u07FF\u084E。ᢍ򝹁𐫘; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ࡎ.ᢍ𐫘 ++B; xn--3tb2nz468k.xn--69e8615j5rn5d; [B5 B6 V6]; [B5 B6 V6] # ࡎ.ᢍ𐫘 ++B; \u06ED𞺌𑄚\u1714.ꡞ\u08B7; [B1 B5 B6 V5]; [B1 B5 B6 V5] # ۭم𑄚᜔.ꡞࢷ ++B; \u06ED\u0645𑄚\u1714.ꡞ\u08B7; [B1 B5 B6 V5]; [B1 B5 B6 V5] # ۭم𑄚᜔.ꡞࢷ ++B; xn--hhb94ag41b739u.xn--dzb5582f; [B1 B5 B6 V5]; [B1 B5 B6 V5] # ۭم𑄚᜔.ꡞࢷ ++T; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς ++N; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς ++T; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς ++N; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς ++T; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς ++N; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς ++T; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς ++N; 񻂵킃𑘶\u07DC。ς\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.ςؼς ++B; 񻂵킃𑘶\u07DC。Σ\u063CΣ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ ++B; 񻂵킃𑘶\u07DC。Σ\u063CΣ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ ++B; 񻂵킃𑘶\u07DC。σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ ++B; 񻂵킃𑘶\u07DC。σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ ++B; 񻂵킃𑘶\u07DC。Σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ ++B; 񻂵킃𑘶\u07DC。Σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ ++B; xn--3sb7483hoyvbbe76g.xn--4xaa21q; [B5 B6 V6]; [B5 B6 V6] # 킃𑘶ߜ.σؼσ ++T; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς ++N; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς ++T; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς ++N; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς ++T; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς ++N; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς ++T; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς ++N; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς ++B; xn--3sb7483hoyvbbe76g.xn--3xab31q; [B5 B6 V6]; [B5 B6 V6] # 킃𑘶ߜ.σؼς ++B; xn--3sb7483hoyvbbe76g.xn--3xaa51q; [B5 B6 V6]; [B5 B6 V6] # 킃𑘶ߜ.ςؼς ++B; 񻂵킃𑘶\u07DC。Σ\u063CΣ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ ++B; 񻂵킃𑘶\u07DC。Σ\u063CΣ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ ++B; 񻂵킃𑘶\u07DC。σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ ++B; 񻂵킃𑘶\u07DC。σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ ++B; 񻂵킃𑘶\u07DC。Σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ ++B; 񻂵킃𑘶\u07DC。Σ\u063Cσ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼσ ++T; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς ++N; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς ++T; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς ++N; 񻂵킃𑘶\u07DC。Σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς ++T; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς ++N; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς ++T; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς ++N; 񻂵킃𑘶\u07DC。σ\u063Cς; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 킃𑘶ߜ.σؼς ++B; 蔰。󠁹\u08DD-𑈵; [P1 V6]; [P1 V6] # 蔰.ࣝ-𑈵 ++B; xn--sz1a.xn----mrd9984r3dl0i; [V6]; [V6] # 蔰.ࣝ-𑈵 ++T; ςჅ。\u075A; [P1 V6]; [P1 V6] # ςჅ.ݚ ++N; ςჅ。\u075A; [P1 V6]; [P1 V6] # ςჅ.ݚ ++T; ςⴥ。\u075A; ςⴥ.\u075A; xn--4xa203s.xn--epb # ςⴥ.ݚ ++N; ςⴥ。\u075A; ςⴥ.\u075A; xn--3xa403s.xn--epb # ςⴥ.ݚ ++B; ΣჅ。\u075A; [P1 V6]; [P1 V6] # σჅ.ݚ ++B; σⴥ。\u075A; σⴥ.\u075A; xn--4xa203s.xn--epb # σⴥ.ݚ ++B; Σⴥ。\u075A; σⴥ.\u075A; xn--4xa203s.xn--epb # σⴥ.ݚ ++B; xn--4xa203s.xn--epb; σⴥ.\u075A; xn--4xa203s.xn--epb # σⴥ.ݚ ++B; σⴥ.\u075A; ; xn--4xa203s.xn--epb # σⴥ.ݚ ++B; ΣჅ.\u075A; [P1 V6]; [P1 V6] # σჅ.ݚ ++B; Σⴥ.\u075A; σⴥ.\u075A; xn--4xa203s.xn--epb # σⴥ.ݚ ++B; xn--4xa477d.xn--epb; [V6]; [V6] # σჅ.ݚ ++B; xn--3xa403s.xn--epb; ςⴥ.\u075A; xn--3xa403s.xn--epb # ςⴥ.ݚ ++T; ςⴥ.\u075A; ; xn--4xa203s.xn--epb # ςⴥ.ݚ ++N; ςⴥ.\u075A; ; xn--3xa403s.xn--epb # ςⴥ.ݚ ++B; xn--3xa677d.xn--epb; [V6]; [V6] # ςჅ.ݚ ++B; \u0C4DႩ𞰓.\u1B72; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ్Ⴉ.᭲ ++B; \u0C4DႩ𞰓.\u1B72; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ్Ⴉ.᭲ ++B; \u0C4Dⴉ𞰓.\u1B72; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ్ⴉ.᭲ ++B; xn--lqc478nlr02a.xn--dwf; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # ్ⴉ.᭲ ++B; xn--lqc64t7t26c.xn--dwf; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # ్Ⴉ.᭲ ++B; \u0C4Dⴉ𞰓.\u1B72; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ్ⴉ.᭲ ++B; ⮷≮񎈴󠄟。𐠄; [B1 P1 V6]; [B1 P1 V6] ++B; ⮷<\u0338񎈴󠄟。𐠄; [B1 P1 V6]; [B1 P1 V6] ++B; xn--gdh877a3513h.xn--pc9c; [B1 V6]; [B1 V6] ++T; \u06BC。\u200Dẏ\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ ++N; \u06BC。\u200Dẏ\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ ++T; \u06BC。\u200Dy\u0307\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ ++N; \u06BC。\u200Dy\u0307\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ ++T; \u06BC。\u200Dẏ\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ ++N; \u06BC。\u200Dẏ\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ ++T; \u06BC。\u200Dy\u0307\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ ++N; \u06BC。\u200Dy\u0307\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ ++T; \u06BC。\u200DY\u0307\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ ++N; \u06BC。\u200DY\u0307\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ ++T; \u06BC。\u200DẎ\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ ++N; \u06BC。\u200DẎ\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ ++B; xn--vkb.xn--08e172a; \u06BC.ẏᡤ; xn--vkb.xn--08e172a # ڼ.ẏᡤ ++B; \u06BC.ẏᡤ; ; xn--vkb.xn--08e172a # ڼ.ẏᡤ ++B; \u06BC.y\u0307ᡤ; \u06BC.ẏᡤ; xn--vkb.xn--08e172a # ڼ.ẏᡤ ++B; \u06BC.Y\u0307ᡤ; \u06BC.ẏᡤ; xn--vkb.xn--08e172a # ڼ.ẏᡤ ++B; \u06BC.Ẏᡤ; \u06BC.ẏᡤ; xn--vkb.xn--08e172a # ڼ.ẏᡤ ++B; xn--vkb.xn--08e172ax6aca; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ ++T; \u06BC。\u200DY\u0307\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ ++N; \u06BC。\u200DY\u0307\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ ++T; \u06BC。\u200DẎ\u200Cᡤ; [B1 C1 C2]; xn--vkb.xn--08e172a # ڼ.ẏᡤ ++N; \u06BC。\u200DẎ\u200Cᡤ; [B1 C1 C2]; [B1 C1 C2] # ڼ.ẏᡤ ++B; 𐹹𑲛。񑂐\u0DCA; [B1 P1 V6]; [B1 P1 V6] # 𐹹𑲛.් ++B; xn--xo0dg5v.xn--h1c39876d; [B1 V6]; [B1 V6] # 𐹹𑲛.් ++B; -≠𑈵。嵕\uFEF1۴\uA953; [B1 B5 P1 V3 V6]; [B1 B5 P1 V3 V6] # -≠𑈵.嵕ي۴꥓ ++B; -=\u0338𑈵。嵕\uFEF1۴\uA953; [B1 B5 P1 V3 V6]; [B1 B5 P1 V3 V6] # -≠𑈵.嵕ي۴꥓ ++B; -≠𑈵。嵕\u064A۴\uA953; [B1 B5 P1 V3 V6]; [B1 B5 P1 V3 V6] # -≠𑈵.嵕ي۴꥓ ++B; -=\u0338𑈵。嵕\u064A۴\uA953; [B1 B5 P1 V3 V6]; [B1 B5 P1 V3 V6] # -≠𑈵.嵕ي۴꥓ ++B; xn----ufo4749h.xn--mhb45a235sns3c; [B1 B5 V3 V6]; [B1 B5 V3 V6] # -≠𑈵.嵕ي۴꥓ ++T; \u200C񍸰𐹶\u076E.\u06C1\u200D≯\u200D; [B1 B3 C1 C2 P1 V6]; [B3 B5 B6 P1 V6] # 𐹶ݮ.ہ≯ ++N; \u200C񍸰𐹶\u076E.\u06C1\u200D≯\u200D; [B1 B3 C1 C2 P1 V6]; [B1 B3 C1 C2 P1 V6] # 𐹶ݮ.ہ≯ ++T; \u200C񍸰𐹶\u076E.\u06C1\u200D>\u0338\u200D; [B1 B3 C1 C2 P1 V6]; [B3 B5 B6 P1 V6] # 𐹶ݮ.ہ≯ ++N; \u200C񍸰𐹶\u076E.\u06C1\u200D>\u0338\u200D; [B1 B3 C1 C2 P1 V6]; [B1 B3 C1 C2 P1 V6] # 𐹶ݮ.ہ≯ ++T; \u200C񍸰𐹶\u076E.\u06C1\u200D≯\u200D; [B1 B3 C1 C2 P1 V6]; [B3 B5 B6 P1 V6] # 𐹶ݮ.ہ≯ ++N; \u200C񍸰𐹶\u076E.\u06C1\u200D≯\u200D; [B1 B3 C1 C2 P1 V6]; [B1 B3 C1 C2 P1 V6] # 𐹶ݮ.ہ≯ ++T; \u200C񍸰𐹶\u076E.\u06C1\u200D>\u0338\u200D; [B1 B3 C1 C2 P1 V6]; [B3 B5 B6 P1 V6] # 𐹶ݮ.ہ≯ ++N; \u200C񍸰𐹶\u076E.\u06C1\u200D>\u0338\u200D; [B1 B3 C1 C2 P1 V6]; [B1 B3 C1 C2 P1 V6] # 𐹶ݮ.ہ≯ ++B; xn--ypb5875khz9y.xn--0kb682l; [B3 B5 B6 V6]; [B3 B5 B6 V6] # 𐹶ݮ.ہ≯ ++B; xn--ypb717jrx2o7v94a.xn--0kb660ka35v; [B1 B3 C1 C2 V6]; [B1 B3 C1 C2 V6] # 𐹶ݮ.ہ≯ ++B; ≮.\u17B5\u0855𐫔; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≮.ࡕ𐫔 ++B; <\u0338.\u17B5\u0855𐫔; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≮.ࡕ𐫔 ++B; ≮.\u17B5\u0855𐫔; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≮.ࡕ𐫔 ++B; <\u0338.\u17B5\u0855𐫔; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≮.ࡕ𐫔 ++B; xn--gdh.xn--kwb589e217p; [B1 V5 V6]; [B1 V5 V6] # ≮.ࡕ𐫔 ++T; 𐩗\u200D。ႩႵ; [B3 C2 P1 V6]; [P1 V6] # 𐩗.ႩႵ ++N; 𐩗\u200D。ႩႵ; [B3 C2 P1 V6]; [B3 C2 P1 V6] # 𐩗.ႩႵ ++T; 𐩗\u200D。ႩႵ; [B3 C2 P1 V6]; [P1 V6] # 𐩗.ႩႵ ++N; 𐩗\u200D。ႩႵ; [B3 C2 P1 V6]; [B3 C2 P1 V6] # 𐩗.ႩႵ ++T; 𐩗\u200D。ⴉⴕ; [B3 C2]; xn--pt9c.xn--0kjya # 𐩗.ⴉⴕ ++N; 𐩗\u200D。ⴉⴕ; [B3 C2]; [B3 C2] # 𐩗.ⴉⴕ ++T; 𐩗\u200D。Ⴉⴕ; [B3 C2 P1 V6]; [P1 V6] # 𐩗.Ⴉⴕ ++N; 𐩗\u200D。Ⴉⴕ; [B3 C2 P1 V6]; [B3 C2 P1 V6] # 𐩗.Ⴉⴕ ++B; xn--pt9c.xn--hnd666l; [V6]; [V6] ++B; xn--1ug4933g.xn--hnd666l; [B3 C2 V6]; [B3 C2 V6] # 𐩗.Ⴉⴕ ++B; xn--pt9c.xn--0kjya; 𐩗.ⴉⴕ; xn--pt9c.xn--0kjya; NV8 ++B; 𐩗.ⴉⴕ; ; xn--pt9c.xn--0kjya; NV8 ++B; 𐩗.ႩႵ; [P1 V6]; [P1 V6] ++B; 𐩗.Ⴉⴕ; [P1 V6]; [P1 V6] ++B; xn--pt9c.xn--hndy; [V6]; [V6] ++B; xn--1ug4933g.xn--0kjya; [B3 C2]; [B3 C2] # 𐩗.ⴉⴕ ++B; xn--1ug4933g.xn--hndy; [B3 C2 V6]; [B3 C2 V6] # 𐩗.ႩႵ ++T; 𐩗\u200D。ⴉⴕ; [B3 C2]; xn--pt9c.xn--0kjya # 𐩗.ⴉⴕ ++N; 𐩗\u200D。ⴉⴕ; [B3 C2]; [B3 C2] # 𐩗.ⴉⴕ ++T; 𐩗\u200D。Ⴉⴕ; [B3 C2 P1 V6]; [P1 V6] # 𐩗.Ⴉⴕ ++N; 𐩗\u200D。Ⴉⴕ; [B3 C2 P1 V6]; [B3 C2 P1 V6] # 𐩗.Ⴉⴕ ++T; \u200C\u200Cㄤ.\u032E󕨑\u09C2; [C1 P1 V5 V6]; [P1 V5 V6] # ㄤ.̮ূ ++N; \u200C\u200Cㄤ.\u032E󕨑\u09C2; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ㄤ.̮ূ ++T; \u200C\u200Cㄤ.\u032E󕨑\u09C2; [C1 P1 V5 V6]; [P1 V5 V6] # ㄤ.̮ূ ++N; \u200C\u200Cㄤ.\u032E󕨑\u09C2; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ㄤ.̮ূ ++B; xn--1fk.xn--vta284a9o563a; [V5 V6]; [V5 V6] # ㄤ.̮ূ ++B; xn--0uga242k.xn--vta284a9o563a; [C1 V5 V6]; [C1 V5 V6] # ㄤ.̮ূ ++T; 𐋻。-\u200C𐫄Ⴗ; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𐋻.-𐫄Ⴗ ++N; 𐋻。-\u200C𐫄Ⴗ; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𐋻.-𐫄Ⴗ ++T; 𐋻。-\u200C𐫄Ⴗ; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𐋻.-𐫄Ⴗ ++N; 𐋻。-\u200C𐫄Ⴗ; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𐋻.-𐫄Ⴗ ++T; 𐋻。-\u200C𐫄ⴗ; [B1 C1 V3]; [B1 V3] # 𐋻.-𐫄ⴗ ++N; 𐋻。-\u200C𐫄ⴗ; [B1 C1 V3]; [B1 C1 V3] # 𐋻.-𐫄ⴗ ++B; xn--v97c.xn----lws0526f; [B1 V3]; [B1 V3] ++B; xn--v97c.xn----sgnv20du99s; [B1 C1 V3]; [B1 C1 V3] # 𐋻.-𐫄ⴗ ++B; xn--v97c.xn----i1g2513q; [B1 V3 V6]; [B1 V3 V6] ++B; xn--v97c.xn----i1g888ih12u; [B1 C1 V3 V6]; [B1 C1 V3 V6] # 𐋻.-𐫄Ⴗ ++T; 𐋻。-\u200C𐫄ⴗ; [B1 C1 V3]; [B1 V3] # 𐋻.-𐫄ⴗ ++N; 𐋻。-\u200C𐫄ⴗ; [B1 C1 V3]; [B1 C1 V3] # 𐋻.-𐫄ⴗ ++T; 🙑𐷺.≠\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 🙑.≠ ++N; 🙑𐷺.≠\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 🙑.≠ ++T; 🙑𐷺.=\u0338\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 🙑.≠ ++N; 🙑𐷺.=\u0338\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 🙑.≠ ++T; 🙑𐷺.≠\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 🙑.≠ ++N; 🙑𐷺.≠\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 🙑.≠ ++T; 🙑𐷺.=\u0338\u200C; [B1 C1 P1 V6]; [B1 P1 V6] # 🙑.≠ ++N; 🙑𐷺.=\u0338\u200C; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 🙑.≠ ++B; xn--bl0dh970b.xn--1ch; [B1 V6]; [B1 V6] ++B; xn--bl0dh970b.xn--0ug83g; [B1 C1 V6]; [B1 C1 V6] # 🙑.≠ ++B; \u064C\u1CD2。𞮞\u2D7F⧎; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ٌ᳒.⵿⧎ ++B; \u064C\u1CD2。𞮞\u2D7F⧎; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ٌ᳒.⵿⧎ ++B; xn--ohb646i.xn--ewi38jf765c; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # ٌ᳒.⵿⧎ ++B; Ⴔ𝨨₃󠁦.𝟳𑂹\u0B82; [P1 V6]; [P1 V6] # Ⴔ𝨨3.7𑂹ஂ ++B; Ⴔ𝨨3󠁦.7𑂹\u0B82; [P1 V6]; [P1 V6] # Ⴔ𝨨3.7𑂹ஂ ++B; ⴔ𝨨3󠁦.7𑂹\u0B82; [P1 V6]; [P1 V6] # ⴔ𝨨3.7𑂹ஂ ++B; xn--3-ews6985n35s3g.xn--7-cve6271r; [V6]; [V6] # ⴔ𝨨3.7𑂹ஂ ++B; xn--3-b1g83426a35t0g.xn--7-cve6271r; [V6]; [V6] # Ⴔ𝨨3.7𑂹ஂ ++B; ⴔ𝨨₃󠁦.𝟳𑂹\u0B82; [P1 V6]; [P1 V6] # ⴔ𝨨3.7𑂹ஂ ++T; 䏈\u200C。\u200C⒈񱢕; [C1 P1 V6]; [P1 V6] # 䏈.⒈ ++N; 䏈\u200C。\u200C⒈񱢕; [C1 P1 V6]; [C1 P1 V6] # 䏈.⒈ ++T; 䏈\u200C。\u200C1.񱢕; [C1 P1 V6]; [P1 V6] # 䏈.1. ++N; 䏈\u200C。\u200C1.񱢕; [C1 P1 V6]; [C1 P1 V6] # 䏈.1. ++B; xn--eco.1.xn--ms39a; [V6]; [V6] ++B; xn--0ug491l.xn--1-rgn.xn--ms39a; [C1 V6]; [C1 V6] # 䏈.1. ++B; xn--eco.xn--tsh21126d; [V6]; [V6] ++B; xn--0ug491l.xn--0ug88oot66q; [C1 V6]; [C1 V6] # 䏈.⒈ ++T; 1\uAAF6ß𑲥。\u1DD8; [V5]; [V5] # 1꫶ß𑲥.ᷘ ++N; 1\uAAF6ß𑲥。\u1DD8; [V5]; [V5] # 1꫶ß𑲥.ᷘ ++T; 1\uAAF6ß𑲥。\u1DD8; [V5]; [V5] # 1꫶ß𑲥.ᷘ ++N; 1\uAAF6ß𑲥。\u1DD8; [V5]; [V5] # 1꫶ß𑲥.ᷘ ++B; 1\uAAF6SS𑲥。\u1DD8; [V5]; [V5] # 1꫶ss𑲥.ᷘ ++B; 1\uAAF6ss𑲥。\u1DD8; [V5]; [V5] # 1꫶ss𑲥.ᷘ ++B; 1\uAAF6Ss𑲥。\u1DD8; [V5]; [V5] # 1꫶ss𑲥.ᷘ ++B; xn--1ss-ir6ln166b.xn--weg; [V5]; [V5] # 1꫶ss𑲥.ᷘ ++B; xn--1-qfa2471kdb0d.xn--weg; [V5]; [V5] # 1꫶ß𑲥.ᷘ ++B; 1\uAAF6SS𑲥。\u1DD8; [V5]; [V5] # 1꫶ss𑲥.ᷘ ++B; 1\uAAF6ss𑲥。\u1DD8; [V5]; [V5] # 1꫶ss𑲥.ᷘ ++B; 1\uAAF6Ss𑲥。\u1DD8; [V5]; [V5] # 1꫶ss𑲥.ᷘ ++T; \u200D񫶩𞪯\u0CCD。\u077C⒈; [B1 C2 P1 V6]; [B5 B6 P1 V6] # ್.ݼ⒈ ++N; \u200D񫶩𞪯\u0CCD。\u077C⒈; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ್.ݼ⒈ ++T; \u200D񫶩𞪯\u0CCD。\u077C1.; [B1 C2 P1 V6]; [B5 B6 P1 V6] # ್.ݼ1. ++N; \u200D񫶩𞪯\u0CCD。\u077C1.; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ್.ݼ1. ++B; xn--8tc9875v5is1a.xn--1-g6c.; [B5 B6 V6]; [B5 B6 V6] # ್.ݼ1. ++B; xn--8tc969gzn94a4lm8a.xn--1-g6c.; [B1 C2 V6]; [B1 C2 V6] # ್.ݼ1. ++B; xn--8tc9875v5is1a.xn--dqb689l; [B5 B6 V6]; [B5 B6 V6] # ್.ݼ⒈ ++B; xn--8tc969gzn94a4lm8a.xn--dqb689l; [B1 C2 V6]; [B1 C2 V6] # ್.ݼ⒈ ++B; \u1AB6.𞤳򓢖򻉒\u07D7; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ᪶.𞤳ߗ ++B; \u1AB6.𞤳򓢖򻉒\u07D7; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ᪶.𞤳ߗ ++B; \u1AB6.𞤑򓢖򻉒\u07D7; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ᪶.𞤳ߗ ++B; xn--zqf.xn--ysb9657vuiz5bj0ep; [B1 B2 B3 B6 V5 V6]; [B1 B2 B3 B6 V5 V6] # ᪶.𞤳ߗ ++B; \u1AB6.𞤑򓢖򻉒\u07D7; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ᪶.𞤳ߗ ++B; \u0842𞩚⒈.󠬌8򏳏\u0770; [B1 P1 V6]; [B1 P1 V6] # ࡂ⒈.8ݰ ++B; \u0842𞩚1..󠬌8򏳏\u0770; [B1 P1 V6 A4_2]; [B1 P1 V6 A4_2] # ࡂ1..8ݰ ++B; xn--1-rid26318a..xn--8-s5c22427ox454a; [B1 V6 A4_2]; [B1 V6 A4_2] # ࡂ1..8ݰ ++B; xn--0vb095ldg52a.xn--8-s5c22427ox454a; [B1 V6]; [B1 V6] # ࡂ⒈.8ݰ ++B; \u0361𐫫\u0369ᡷ。-󠰛鞰; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ͡𐫫ͩᡷ.-鞰 ++B; xn--cvaq482npv5t.xn----yg7dt1332g; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ͡𐫫ͩᡷ.-鞰 ++T; -.\u0ACD剘ß𐫃; [B1 V3 V5]; [B1 V3 V5] # -.્剘ß𐫃 ++N; -.\u0ACD剘ß𐫃; [B1 V3 V5]; [B1 V3 V5] # -.્剘ß𐫃 ++B; -.\u0ACD剘SS𐫃; [B1 V3 V5]; [B1 V3 V5] # -.્剘ss𐫃 ++B; -.\u0ACD剘ss𐫃; [B1 V3 V5]; [B1 V3 V5] # -.્剘ss𐫃 ++B; -.\u0ACD剘Ss𐫃; [B1 V3 V5]; [B1 V3 V5] # -.્剘ss𐫃 ++B; -.xn--ss-bqg4734erywk; [B1 V3 V5]; [B1 V3 V5] # -.્剘ss𐫃 ++B; -.xn--zca791c493duf8i; [B1 V3 V5]; [B1 V3 V5] # -.્剘ß𐫃 ++B; \u08FB𞵸。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ࣻ.- ++B; \u08FB𞵸。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ࣻ.- ++B; xn--b1b2719v.-; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ࣻ.- ++B; ⒈󠈻𐹲。≠\u0603𐹽; [B1 P1 V6]; [B1 P1 V6] # ⒈𐹲.≠𐹽 ++B; ⒈󠈻𐹲。=\u0338\u0603𐹽; [B1 P1 V6]; [B1 P1 V6] # ⒈𐹲.≠𐹽 ++B; 1.󠈻𐹲。≠\u0603𐹽; [B1 P1 V6]; [B1 P1 V6] # 1.𐹲.≠𐹽 ++B; 1.󠈻𐹲。=\u0338\u0603𐹽; [B1 P1 V6]; [B1 P1 V6] # 1.𐹲.≠𐹽 ++B; 1.xn--qo0dl3077c.xn--lfb536lb35n; [B1 V6]; [B1 V6] # 1.𐹲.≠𐹽 ++B; xn--tshw766f1153g.xn--lfb536lb35n; [B1 V6]; [B1 V6] # ⒈𐹲.≠𐹽 ++T; 𐹢󠈚Ⴎ\u200C.㖾𐹡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # 𐹢Ⴎ.㖾𐹡 ++N; 𐹢󠈚Ⴎ\u200C.㖾𐹡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # 𐹢Ⴎ.㖾𐹡 ++T; 𐹢󠈚ⴎ\u200C.㖾𐹡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # 𐹢ⴎ.㖾𐹡 ++N; 𐹢󠈚ⴎ\u200C.㖾𐹡; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # 𐹢ⴎ.㖾𐹡 ++B; xn--5kjx323em053g.xn--pelu572d; [B1 B5 B6 V6]; [B1 B5 B6 V6] ++B; xn--0ug342clq0pqxv4i.xn--pelu572d; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # 𐹢ⴎ.㖾𐹡 ++B; xn--mnd9001km0o0g.xn--pelu572d; [B1 B5 B6 V6]; [B1 B5 B6 V6] ++B; xn--mnd289ezj4pqxp0i.xn--pelu572d; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # 𐹢Ⴎ.㖾𐹡 ++B; 򩼗.\u07C7ᡖႳႧ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # .߇ᡖႳႧ ++B; 򩼗.\u07C7ᡖႳႧ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # .߇ᡖႳႧ ++B; 򩼗.\u07C7ᡖⴓⴇ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # .߇ᡖⴓⴇ ++B; 򩼗.\u07C7ᡖႳⴇ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # .߇ᡖႳⴇ ++B; xn--te28c.xn--isb286btrgo7w; [B2 B3 V6]; [B2 B3 V6] # .߇ᡖႳⴇ ++B; xn--te28c.xn--isb295fbtpmb; [B2 B3 V6]; [B2 B3 V6] # .߇ᡖⴓⴇ ++B; xn--te28c.xn--isb856b9a631d; [B2 B3 V6]; [B2 B3 V6] # .߇ᡖႳႧ ++B; 򩼗.\u07C7ᡖⴓⴇ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # .߇ᡖⴓⴇ ++B; 򩼗.\u07C7ᡖႳⴇ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # .߇ᡖႳⴇ ++T; \u200D􅍉.\u06B3\u0775; [B1 C2 P1 V6]; [P1 V6] # .ڳݵ ++N; \u200D􅍉.\u06B3\u0775; [B1 C2 P1 V6]; [B1 C2 P1 V6] # .ڳݵ ++B; xn--3j78f.xn--mkb20b; [V6]; [V6] # .ڳݵ ++B; xn--1ug39444n.xn--mkb20b; [B1 C2 V6]; [B1 C2 V6] # .ڳݵ ++B; 𲤱⒛⾳.ꡦ⒈; [P1 V6]; [P1 V6] ++B; 𲤱20.音.ꡦ1.; [P1 V6]; [P1 V6] ++B; xn--20-9802c.xn--0w5a.xn--1-eg4e.; [V6]; [V6] ++B; xn--dth6033bzbvx.xn--tsh9439b; [V6]; [V6] ++B; \u07DC8񳦓-。򞲙𑁿𐩥\u09CD; [B2 B3 B5 B6 P1 V3 V6]; [B2 B3 B5 B6 P1 V3 V6] # ߜ8-.𑁿𐩥্ ++B; \u07DC8񳦓-。򞲙𑁿𐩥\u09CD; [B2 B3 B5 B6 P1 V3 V6]; [B2 B3 B5 B6 P1 V3 V6] # ߜ8-.𑁿𐩥্ ++B; xn--8--rve13079p.xn--b7b9842k42df776x; [B2 B3 B5 B6 V3 V6]; [B2 B3 B5 B6 V3 V6] # ߜ8-.𑁿𐩥্ ++T; Ⴕ。۰≮ß\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ß݅ ++N; Ⴕ。۰≮ß\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ß݅ ++T; Ⴕ。۰<\u0338ß\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ß݅ ++N; Ⴕ。۰<\u0338ß\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ß݅ ++T; ⴕ。۰<\u0338ß\u0745; [P1 V6]; [P1 V6] # ⴕ.۰≮ß݅ ++N; ⴕ。۰<\u0338ß\u0745; [P1 V6]; [P1 V6] # ⴕ.۰≮ß݅ ++T; ⴕ。۰≮ß\u0745; [P1 V6]; [P1 V6] # ⴕ.۰≮ß݅ ++N; ⴕ。۰≮ß\u0745; [P1 V6]; [P1 V6] # ⴕ.۰≮ß݅ ++B; Ⴕ。۰≮SS\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ss݅ ++B; Ⴕ。۰<\u0338SS\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ss݅ ++B; ⴕ。۰<\u0338ss\u0745; [P1 V6]; [P1 V6] # ⴕ.۰≮ss݅ ++B; ⴕ。۰≮ss\u0745; [P1 V6]; [P1 V6] # ⴕ.۰≮ss݅ ++B; Ⴕ。۰≮Ss\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ss݅ ++B; Ⴕ。۰<\u0338Ss\u0745; [P1 V6]; [P1 V6] # Ⴕ.۰≮ss݅ ++B; xn--tnd.xn--ss-jbe65aw27i; [V6]; [V6] # Ⴕ.۰≮ss݅ ++B; xn--dlj.xn--ss-jbe65aw27i; [V6]; [V6] # ⴕ.۰≮ss݅ ++B; xn--dlj.xn--zca912alh227g; [V6]; [V6] # ⴕ.۰≮ß݅ ++B; xn--tnd.xn--zca912alh227g; [V6]; [V6] # Ⴕ.۰≮ß݅ ++B; \u07E9-.𝨗꒱\u1B72; [B1 B3 V3 V5]; [B1 B3 V3 V5] # ߩ-.𝨗꒱᭲ ++B; xn----odd.xn--dwf8994dc8wj; [B1 B3 V3 V5]; [B1 B3 V3 V5] # ߩ-.𝨗꒱᭲ ++T; 𞼸\u200C.≯䕵⫧; [B1 B3 C1 P1 V6]; [B1 P1 V6] # .≯䕵⫧ ++N; 𞼸\u200C.≯䕵⫧; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # .≯䕵⫧ ++T; 𞼸\u200C.>\u0338䕵⫧; [B1 B3 C1 P1 V6]; [B1 P1 V6] # .≯䕵⫧ ++N; 𞼸\u200C.>\u0338䕵⫧; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # .≯䕵⫧ ++B; xn--sn7h.xn--hdh754ax6w; [B1 V6]; [B1 V6] ++B; xn--0ugx453p.xn--hdh754ax6w; [B1 B3 C1 V6]; [B1 B3 C1 V6] # .≯䕵⫧ ++T; 𐨅ß\uFC57.\u06AC۳︒; [B1 B3 P1 V5 V6]; [B1 B3 P1 V5 V6] # 𐨅ßيخ.ڬ۳︒ ++N; 𐨅ß\uFC57.\u06AC۳︒; [B1 B3 P1 V5 V6]; [B1 B3 P1 V5 V6] # 𐨅ßيخ.ڬ۳︒ ++T; 𐨅ß\u064A\u062E.\u06AC۳。; [B1 V5]; [B1 V5] # 𐨅ßيخ.ڬ۳. ++N; 𐨅ß\u064A\u062E.\u06AC۳。; [B1 V5]; [B1 V5] # 𐨅ßيخ.ڬ۳. ++B; 𐨅SS\u064A\u062E.\u06AC۳。; [B1 V5]; [B1 V5] # 𐨅ssيخ.ڬ۳. ++B; 𐨅ss\u064A\u062E.\u06AC۳。; [B1 V5]; [B1 V5] # 𐨅ssيخ.ڬ۳. ++B; 𐨅Ss\u064A\u062E.\u06AC۳。; [B1 V5]; [B1 V5] # 𐨅ssيخ.ڬ۳. ++B; xn--ss-ytd5i7765l.xn--fkb6l.; [B1 V5]; [B1 V5] # 𐨅ssيخ.ڬ۳. ++B; xn--zca23yncs877j.xn--fkb6l.; [B1 V5]; [B1 V5] # 𐨅ßيخ.ڬ۳. ++B; 𐨅SS\uFC57.\u06AC۳︒; [B1 B3 P1 V5 V6]; [B1 B3 P1 V5 V6] # 𐨅ssيخ.ڬ۳︒ ++B; 𐨅ss\uFC57.\u06AC۳︒; [B1 B3 P1 V5 V6]; [B1 B3 P1 V5 V6] # 𐨅ssيخ.ڬ۳︒ ++B; 𐨅Ss\uFC57.\u06AC۳︒; [B1 B3 P1 V5 V6]; [B1 B3 P1 V5 V6] # 𐨅ssيخ.ڬ۳︒ ++B; xn--ss-ytd5i7765l.xn--fkb6lp314e; [B1 B3 V5 V6]; [B1 B3 V5 V6] # 𐨅ssيخ.ڬ۳︒ ++B; xn--zca23yncs877j.xn--fkb6lp314e; [B1 B3 V5 V6]; [B1 B3 V5 V6] # 𐨅ßيخ.ڬ۳︒ ++B; -≮🡒\u1CED.񏿾Ⴁ\u0714; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≮🡒᳭.Ⴁܔ ++B; -<\u0338🡒\u1CED.񏿾Ⴁ\u0714; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≮🡒᳭.Ⴁܔ ++B; -<\u0338🡒\u1CED.񏿾ⴁ\u0714; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≮🡒᳭.ⴁܔ ++B; -≮🡒\u1CED.񏿾ⴁ\u0714; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -≮🡒᳭.ⴁܔ ++B; xn----44l04zxt68c.xn--enb135qf106f; [B1 V3 V6]; [B1 V3 V6] # -≮🡒᳭.ⴁܔ ++B; xn----44l04zxt68c.xn--enb300c1597h; [B1 V3 V6]; [B1 V3 V6] # -≮🡒᳭.Ⴁܔ ++T; 𞤨。ꡏ\u200D\u200C; [B6 C1 C2]; xn--ge6h.xn--oc9a # 𞤨.ꡏ ++N; 𞤨。ꡏ\u200D\u200C; [B6 C1 C2]; [B6 C1 C2] # 𞤨.ꡏ ++T; 𞤨。ꡏ\u200D\u200C; [B6 C1 C2]; xn--ge6h.xn--oc9a # 𞤨.ꡏ ++N; 𞤨。ꡏ\u200D\u200C; [B6 C1 C2]; [B6 C1 C2] # 𞤨.ꡏ ++T; 𞤆。ꡏ\u200D\u200C; [B6 C1 C2]; xn--ge6h.xn--oc9a # 𞤨.ꡏ ++N; 𞤆。ꡏ\u200D\u200C; [B6 C1 C2]; [B6 C1 C2] # 𞤨.ꡏ ++B; xn--ge6h.xn--oc9a; 𞤨.ꡏ; xn--ge6h.xn--oc9a ++B; 𞤨.ꡏ; ; xn--ge6h.xn--oc9a ++B; 𞤆.ꡏ; 𞤨.ꡏ; xn--ge6h.xn--oc9a ++B; xn--ge6h.xn--0ugb9575h; [B6 C1 C2]; [B6 C1 C2] # 𞤨.ꡏ ++T; 𞤆。ꡏ\u200D\u200C; [B6 C1 C2]; xn--ge6h.xn--oc9a # 𞤨.ꡏ ++N; 𞤆。ꡏ\u200D\u200C; [B6 C1 C2]; [B6 C1 C2] # 𞤨.ꡏ ++B; 󠅹𑂶.ᢌ𑂹\u0669; [B1 B3 B5 B6 V5]; [B1 B3 B5 B6 V5] # 𑂶.ᢌ𑂹٩ ++B; 󠅹𑂶.ᢌ𑂹\u0669; [B1 B3 B5 B6 V5]; [B1 B3 B5 B6 V5] # 𑂶.ᢌ𑂹٩ ++B; xn--b50d.xn--iib993gyp5p; [B1 B3 B5 B6 V5]; [B1 B3 B5 B6 V5] # 𑂶.ᢌ𑂹٩ ++B; Ⅎ󠅺񝵒。≯⾑; [P1 V6]; [P1 V6] ++B; Ⅎ󠅺񝵒。>\u0338⾑; [P1 V6]; [P1 V6] ++B; Ⅎ󠅺񝵒。≯襾; [P1 V6]; [P1 V6] ++B; Ⅎ󠅺񝵒。>\u0338襾; [P1 V6]; [P1 V6] ++B; ⅎ󠅺񝵒。>\u0338襾; [P1 V6]; [P1 V6] ++B; ⅎ󠅺񝵒。≯襾; [P1 V6]; [P1 V6] ++B; xn--73g39298c.xn--hdhz171b; [V6]; [V6] ++B; xn--f3g73398c.xn--hdhz171b; [V6]; [V6] ++B; ⅎ󠅺񝵒。>\u0338⾑; [P1 V6]; [P1 V6] ++B; ⅎ󠅺񝵒。≯⾑; [P1 V6]; [P1 V6] ++T; ς\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 V3] # ςු٠.- ++N; ς\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # ςු٠.- ++T; ς\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 V3] # ςු٠.- ++N; ς\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # ςු٠.- ++T; Σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 V3] # σු٠.- ++N; Σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # σු٠.- ++T; σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 V3] # σු٠.- ++N; σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # σු٠.- ++B; xn--4xa25ks2j.-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # σු٠.- ++B; xn--4xa25ks2jenu.-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # σු٠.- ++B; xn--3xa45ks2jenu.-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # ςු٠.- ++T; Σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 V3] # σු٠.- ++N; Σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # σු٠.- ++T; σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 V3] # σු٠.- ++N; σ\u200D\u0DD4\u0660。-; [B1 B5 B6 C2 V3]; [B1 B5 B6 C2 V3] # σු٠.- ++T; \u200C.ßႩ-; [C1 P1 V3 V6]; [P1 V3 V6 A4_2] # .ßႩ- ++N; \u200C.ßႩ-; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .ßႩ- ++T; \u200C.ßⴉ-; [C1 V3]; [V3 A4_2] # .ßⴉ- ++N; \u200C.ßⴉ-; [C1 V3]; [C1 V3] # .ßⴉ- ++T; \u200C.SSႩ-; [C1 P1 V3 V6]; [P1 V3 V6 A4_2] # .ssႩ- ++N; \u200C.SSႩ-; [C1 P1 V3 V6]; [C1 P1 V3 V6] # .ssႩ- ++T; \u200C.ssⴉ-; [C1 V3]; [V3 A4_2] # .ssⴉ- ++N; \u200C.ssⴉ-; [C1 V3]; [C1 V3] # .ssⴉ- ++T; \u200C.Ssⴉ-; [C1 V3]; [V3 A4_2] # .ssⴉ- ++N; \u200C.Ssⴉ-; [C1 V3]; [C1 V3] # .ssⴉ- ++B; .xn--ss--bi1b; [V3 A4_2]; [V3 A4_2] ++B; xn--0ug.xn--ss--bi1b; [C1 V3]; [C1 V3] # .ssⴉ- ++B; .xn--ss--4rn; [V3 V6 A4_2]; [V3 V6 A4_2] ++B; xn--0ug.xn--ss--4rn; [C1 V3 V6]; [C1 V3 V6] # .ssႩ- ++B; xn--0ug.xn----pfa2305a; [C1 V3]; [C1 V3] # .ßⴉ- ++B; xn--0ug.xn----pfa042j; [C1 V3 V6]; [C1 V3 V6] # .ßႩ- ++B; 󍭲𐫍㓱。⾑; [B5 P1 V6]; [B5 P1 V6] ++B; 󍭲𐫍㓱。襾; [B5 P1 V6]; [B5 P1 V6] ++B; xn--u7kt691dlj09f.xn--9v2a; [B5 V6]; [B5 V6] ++T; \u06A0𐮋𐹰≮。≯󠦗\u200D; [B1 B3 C2 P1 V6]; [B1 B3 P1 V6] # ڠ𐮋𐹰≮.≯ ++N; \u06A0𐮋𐹰≮。≯󠦗\u200D; [B1 B3 C2 P1 V6]; [B1 B3 C2 P1 V6] # ڠ𐮋𐹰≮.≯ ++T; \u06A0𐮋𐹰<\u0338。>\u0338󠦗\u200D; [B1 B3 C2 P1 V6]; [B1 B3 P1 V6] # ڠ𐮋𐹰≮.≯ ++N; \u06A0𐮋𐹰<\u0338。>\u0338󠦗\u200D; [B1 B3 C2 P1 V6]; [B1 B3 C2 P1 V6] # ڠ𐮋𐹰≮.≯ ++B; xn--2jb053lf13nyoc.xn--hdh08821l; [B1 B3 V6]; [B1 B3 V6] # ڠ𐮋𐹰≮.≯ ++B; xn--2jb053lf13nyoc.xn--1ugx6gc8096c; [B1 B3 C2 V6]; [B1 B3 C2 V6] # ڠ𐮋𐹰≮.≯ ++B; 𝟞。񃰶\u0777\u08B0⩋; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 6.ݷࢰ⩋ ++B; 6。񃰶\u0777\u08B0⩋; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 6.ݷࢰ⩋ ++B; 6.xn--7pb04do15eq748f; [B1 B5 B6 V6]; [B1 B5 B6 V6] # 6.ݷࢰ⩋ ++B; -\uFCFD。𑇀𑍴; [B1 V3 V5]; [B1 V3 V5] # -شى.𑇀𑍴 ++B; -\uFCFD。𑇀𑍴; [B1 V3 V5]; [B1 V3 V5] # -شى.𑇀𑍴 ++B; -\u0634\u0649。𑇀𑍴; [B1 V3 V5]; [B1 V3 V5] # -شى.𑇀𑍴 ++B; xn----qnc7d.xn--wd1d62a; [B1 V3 V5]; [B1 V3 V5] # -شى.𑇀𑍴 ++T; \u200C󠊶𝟏.\u0D43򪥐𐹬󊓶; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 1.ൃ𐹬 ++N; \u200C󠊶𝟏.\u0D43򪥐𐹬󊓶; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 1.ൃ𐹬 ++T; \u200C󠊶1.\u0D43򪥐𐹬󊓶; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 1.ൃ𐹬 ++N; \u200C󠊶1.\u0D43򪥐𐹬󊓶; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 1.ൃ𐹬 ++B; xn--1-f521m.xn--mxc0872kcu37dnmem; [B1 V5 V6]; [B1 V5 V6] # 1.ൃ𐹬 ++B; xn--1-rgnu0071n.xn--mxc0872kcu37dnmem; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 1.ൃ𐹬 ++T; 齙--𝟰.ß; 齙--4.ß; xn----4-p16k.ss ++N; 齙--𝟰.ß; 齙--4.ß; xn----4-p16k.xn--zca ++T; 齙--4.ß; ; xn----4-p16k.ss ++N; 齙--4.ß; ; xn----4-p16k.xn--zca ++B; 齙--4.SS; 齙--4.ss; xn----4-p16k.ss ++B; 齙--4.ss; ; xn----4-p16k.ss ++B; 齙--4.Ss; 齙--4.ss; xn----4-p16k.ss ++B; xn----4-p16k.ss; 齙--4.ss; xn----4-p16k.ss ++B; xn----4-p16k.xn--zca; 齙--4.ß; xn----4-p16k.xn--zca ++B; 齙--𝟰.SS; 齙--4.ss; xn----4-p16k.ss ++B; 齙--𝟰.ss; 齙--4.ss; xn----4-p16k.ss ++B; 齙--𝟰.Ss; 齙--4.ss; xn----4-p16k.ss ++T; \u1BF2.𐹢𞀖\u200C; [B1 C1 V5]; [B1 V5] # ᯲.𐹢𞀖 ++N; \u1BF2.𐹢𞀖\u200C; [B1 C1 V5]; [B1 C1 V5] # ᯲.𐹢𞀖 ++B; xn--0zf.xn--9n0d2296a; [B1 V5]; [B1 V5] # ᯲.𐹢𞀖 ++B; xn--0zf.xn--0ug9894grqqf; [B1 C1 V5]; [B1 C1 V5] # ᯲.𐹢𞀖 ++T; 󃲙󠋘。\uDEDE-\u200D; [C2 P1 V6]; [P1 V3 V6 A3] # .- ++N; 󃲙󠋘。\uDEDE-\u200D; [C2 P1 V6]; [C2 P1 V6 A3] # .- ++T; 󃲙󠋘。\uDEDE-\u200D; [C2 P1 V6]; [P1 V3 V6 A3] # .- ++N; 󃲙󠋘。\uDEDE-\u200D; [C2 P1 V6]; [C2 P1 V6 A3] # .- ++B; xn--ct86d8w51a.\uDEDE-; [P1 V3 V6]; [P1 V3 V6 A3] # .- ++B; XN--CT86D8W51A.\uDEDE-; [P1 V3 V6]; [P1 V3 V6 A3] # .- ++B; Xn--Ct86d8w51a.\uDEDE-; [P1 V3 V6]; [P1 V3 V6 A3] # .- ++T; xn--ct86d8w51a.\uDEDE-\u200D; [C2 P1 V6]; [P1 V3 V6 A3] # .- ++N; xn--ct86d8w51a.\uDEDE-\u200D; [C2 P1 V6]; [C2 P1 V6 A3] # .- ++T; XN--CT86D8W51A.\uDEDE-\u200D; [C2 P1 V6]; [P1 V3 V6 A3] # .- ++N; XN--CT86D8W51A.\uDEDE-\u200D; [C2 P1 V6]; [C2 P1 V6 A3] # .- ++T; Xn--Ct86d8w51a.\uDEDE-\u200D; [C2 P1 V6]; [P1 V3 V6 A3] # .- ++N; Xn--Ct86d8w51a.\uDEDE-\u200D; [C2 P1 V6]; [C2 P1 V6 A3] # .- ++B; \u1A60.𞵷-𝪩悎; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ᩠.-𝪩悎 ++B; \u1A60.𞵷-𝪩悎; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ᩠.-𝪩悎 ++B; xn--jof.xn----gf4bq282iezpa; [B1 B2 B3 B6 V5 V6]; [B1 B2 B3 B6 V5 V6] # ᩠.-𝪩悎 ++B; 𛜯󠊛.𞤳񏥾; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] ++B; 𛜯󠊛.𞤳񏥾; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] ++B; 𛜯󠊛.𞤑񏥾; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] ++B; xn--xx5gy2741c.xn--re6hw266j; [B2 B3 B6 V6]; [B2 B3 B6 V6] ++B; 𛜯󠊛.𞤑񏥾; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] ++B; \u071C𐫒\u062E.𐋲; [B1]; [B1] # ܜ𐫒خ.𐋲 ++B; xn--tgb98b8643d.xn--m97c; [B1]; [B1] # ܜ𐫒خ.𐋲 ++B; 𐼑𞤓\u0637\u08E2.\uDF56; [P1 V6]; [P1 V6 A3] # 𞤵ط. ++B; 𐼑𞤵\u0637\u08E2.\uDF56; [P1 V6]; [P1 V6 A3] # 𞤵ط. ++B; xn--2gb08k9w69agm0g.\uDF56; [P1 V6]; [P1 V6 A3] # 𞤵ط. ++B; XN--2GB08K9W69AGM0G.\uDF56; [P1 V6]; [P1 V6 A3] # 𞤵ط. ++B; Xn--2Gb08k9w69agm0g.\uDF56; [P1 V6]; [P1 V6 A3] # 𞤵ط. ++B; Ↄ。\u0A4D\u1CD4𞷣; [B1 P1 V5 V6]; [B1 P1 V5 V6] # Ↄ.᳔੍ ++B; Ↄ。\u1CD4\u0A4D𞷣; [B1 P1 V5 V6]; [B1 P1 V5 V6] # Ↄ.᳔੍ ++B; ↄ。\u1CD4\u0A4D𞷣; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ↄ.᳔੍ ++B; xn--r5g.xn--ybc995g0835a; [B1 V5 V6]; [B1 V5 V6] # ↄ.᳔੍ ++B; xn--q5g.xn--ybc995g0835a; [B1 V5 V6]; [B1 V5 V6] # Ↄ.᳔੍ ++B; ↄ。\u0A4D\u1CD4𞷣; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ↄ.᳔੍ ++B; 󠪢-。򛂏≮𑜫; [P1 V3 V6]; [P1 V3 V6] ++B; 󠪢-。򛂏<\u0338𑜫; [P1 V3 V6]; [P1 V3 V6] ++B; xn----bh61m.xn--gdhz157g0em1d; [V3 V6]; [V3 V6] ++T; \u200C󠉹\u200D。򌿧≮Ⴉ; [C1 C2 P1 V6]; [P1 V6] # .≮Ⴉ ++N; \u200C󠉹\u200D。򌿧≮Ⴉ; [C1 C2 P1 V6]; [C1 C2 P1 V6] # .≮Ⴉ ++T; \u200C󠉹\u200D。򌿧<\u0338Ⴉ; [C1 C2 P1 V6]; [P1 V6] # .≮Ⴉ ++N; \u200C󠉹\u200D。򌿧<\u0338Ⴉ; [C1 C2 P1 V6]; [C1 C2 P1 V6] # .≮Ⴉ ++T; \u200C󠉹\u200D。򌿧<\u0338ⴉ; [C1 C2 P1 V6]; [P1 V6] # .≮ⴉ ++N; \u200C󠉹\u200D。򌿧<\u0338ⴉ; [C1 C2 P1 V6]; [C1 C2 P1 V6] # .≮ⴉ ++T; \u200C󠉹\u200D。򌿧≮ⴉ; [C1 C2 P1 V6]; [P1 V6] # .≮ⴉ ++N; \u200C󠉹\u200D。򌿧≮ⴉ; [C1 C2 P1 V6]; [C1 C2 P1 V6] # .≮ⴉ ++B; xn--3n36e.xn--gdh992byu01p; [V6]; [V6] ++B; xn--0ugc90904y.xn--gdh992byu01p; [C1 C2 V6]; [C1 C2 V6] # .≮ⴉ ++B; xn--3n36e.xn--hnd112gpz83n; [V6]; [V6] ++B; xn--0ugc90904y.xn--hnd112gpz83n; [C1 C2 V6]; [C1 C2 V6] # .≮Ⴉ ++B; 𐹯-𑄴\u08BC。︒䖐⾆; [B1 P1 V6]; [B1 P1 V6] # 𐹯-𑄴ࢼ.︒䖐舌 ++B; 𐹯-𑄴\u08BC。。䖐舌; [B1 A4_2]; [B1 A4_2] # 𐹯-𑄴ࢼ..䖐舌 ++B; xn----rpd7902rclc..xn--fpo216m; [B1 A4_2]; [B1 A4_2] # 𐹯-𑄴ࢼ..䖐舌 ++B; xn----rpd7902rclc.xn--fpo216mn07e; [B1 V6]; [B1 V6] # 𐹯-𑄴ࢼ.︒䖐舌 ++B; 𝪞Ⴐ。쪡; [P1 V5 V6]; [P1 V5 V6] ++B; 𝪞Ⴐ。쪡; [P1 V5 V6]; [P1 V5 V6] ++B; 𝪞Ⴐ。쪡; [P1 V5 V6]; [P1 V5 V6] ++B; 𝪞Ⴐ。쪡; [P1 V5 V6]; [P1 V5 V6] ++B; 𝪞ⴐ。쪡; [V5]; [V5] ++B; 𝪞ⴐ。쪡; [V5]; [V5] ++B; xn--7kj1858k.xn--pi6b; [V5]; [V5] ++B; xn--ond3755u.xn--pi6b; [V5 V6]; [V5 V6] ++B; 𝪞ⴐ。쪡; [V5]; [V5] ++B; 𝪞ⴐ。쪡; [V5]; [V5] ++B; \u0E3A쩁𐹬.􋉳; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ฺ쩁𐹬. ++B; \u0E3A쩁𐹬.􋉳; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ฺ쩁𐹬. ++B; xn--o4c4837g2zvb.xn--5f70g; [B1 V5 V6]; [B1 V5 V6] # ฺ쩁𐹬. ++T; ᡅ0\u200C。⎢󤨄; [C1 P1 V6]; [P1 V6] # ᡅ0.⎢ ++N; ᡅ0\u200C。⎢󤨄; [C1 P1 V6]; [C1 P1 V6] # ᡅ0.⎢ ++T; ᡅ0\u200C。⎢󤨄; [C1 P1 V6]; [P1 V6] # ᡅ0.⎢ ++N; ᡅ0\u200C。⎢󤨄; [C1 P1 V6]; [C1 P1 V6] # ᡅ0.⎢ ++B; xn--0-z6j.xn--8lh28773l; [V6]; [V6] ++B; xn--0-z6jy93b.xn--8lh28773l; [C1 V6]; [C1 V6] # ᡅ0.⎢ ++T; 𲮚9ꍩ\u17D3.\u200Dß; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ß ++N; 𲮚9ꍩ\u17D3.\u200Dß; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ß ++T; 𲮚9ꍩ\u17D3.\u200Dß; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ß ++N; 𲮚9ꍩ\u17D3.\u200Dß; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ß ++T; 𲮚9ꍩ\u17D3.\u200DSS; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ss ++N; 𲮚9ꍩ\u17D3.\u200DSS; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ss ++T; 𲮚9ꍩ\u17D3.\u200Dss; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ss ++N; 𲮚9ꍩ\u17D3.\u200Dss; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ss ++T; 𲮚9ꍩ\u17D3.\u200DSs; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ss ++N; 𲮚9ꍩ\u17D3.\u200DSs; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ss ++B; xn--9-i0j5967eg3qz.ss; [V6]; [V6] # 9ꍩ៓.ss ++B; xn--9-i0j5967eg3qz.xn--ss-l1t; [C2 V6]; [C2 V6] # 9ꍩ៓.ss ++B; xn--9-i0j5967eg3qz.xn--zca770n; [C2 V6]; [C2 V6] # 9ꍩ៓.ß ++T; 𲮚9ꍩ\u17D3.\u200DSS; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ss ++N; 𲮚9ꍩ\u17D3.\u200DSS; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ss ++T; 𲮚9ꍩ\u17D3.\u200Dss; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ss ++N; 𲮚9ꍩ\u17D3.\u200Dss; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ss ++T; 𲮚9ꍩ\u17D3.\u200DSs; [C2 P1 V6]; [P1 V6] # 9ꍩ៓.ss ++N; 𲮚9ꍩ\u17D3.\u200DSs; [C2 P1 V6]; [C2 P1 V6] # 9ꍩ៓.ss ++B; ꗷ𑆀.\u075D𐩒; ; xn--ju8a625r.xn--hpb0073k; NV8 # ꗷ𑆀.ݝ𐩒 ++B; xn--ju8a625r.xn--hpb0073k; ꗷ𑆀.\u075D𐩒; xn--ju8a625r.xn--hpb0073k; NV8 # ꗷ𑆀.ݝ𐩒 ++B; ⒐≯-。︒򩑣-񞛠; [P1 V3 V6]; [P1 V3 V6] ++B; ⒐>\u0338-。︒򩑣-񞛠; [P1 V3 V6]; [P1 V3 V6] ++B; 9.≯-。。򩑣-񞛠; [P1 V3 V6 A4_2]; [P1 V3 V6 A4_2] ++B; 9.>\u0338-。。򩑣-񞛠; [P1 V3 V6 A4_2]; [P1 V3 V6 A4_2] ++B; 9.xn----ogo..xn----xj54d1s69k; [V3 V6 A4_2]; [V3 V6 A4_2] ++B; xn----ogot9g.xn----n89hl0522az9u2a; [V3 V6]; [V3 V6] ++B; 򈪚\u0CE3Ⴡ󠢏.\u061D; [B6 P1 V6]; [B6 P1 V6] # ೣჁ. ++B; 򈪚\u0CE3Ⴡ󠢏.\u061D; [B6 P1 V6]; [B6 P1 V6] # ೣჁ. ++B; 򈪚\u0CE3ⴡ󠢏.\u061D; [B6 P1 V6]; [B6 P1 V6] # ೣⴡ. ++B; xn--vuc226n8n28lmju7a.xn--cgb; [B6 V6]; [B6 V6] # ೣⴡ. ++B; xn--vuc49qvu85xmju7a.xn--cgb; [B6 V6]; [B6 V6] # ೣჁ. ++B; 򈪚\u0CE3ⴡ󠢏.\u061D; [B6 P1 V6]; [B6 P1 V6] # ೣⴡ. ++B; \u1DEB。𐋩\u0638-𐫮; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ᷫ.𐋩ظ-𐫮 ++B; xn--gfg.xn----xnc0815qyyg; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ᷫ.𐋩ظ-𐫮 ++B; 싇。⾇𐳋Ⴝ; [B5 P1 V6]; [B5 P1 V6] ++B; 싇。⾇𐳋Ⴝ; [B5 P1 V6]; [B5 P1 V6] ++B; 싇。舛𐳋Ⴝ; [B5 P1 V6]; [B5 P1 V6] ++B; 싇。舛𐳋Ⴝ; [B5 P1 V6]; [B5 P1 V6] ++B; 싇。舛𐳋ⴝ; [B5]; [B5] ++B; 싇。舛𐳋ⴝ; [B5]; [B5] ++B; 싇。舛𐲋Ⴝ; [B5 P1 V6]; [B5 P1 V6] ++B; 싇。舛𐲋Ⴝ; [B5 P1 V6]; [B5 P1 V6] ++B; 싇。舛𐲋ⴝ; [B5]; [B5] ++B; 싇。舛𐲋ⴝ; [B5]; [B5] ++B; xn--9u4b.xn--llj123yh74e; [B5]; [B5] ++B; xn--9u4b.xn--1nd7519ch79d; [B5 V6]; [B5 V6] ++B; 싇。⾇𐳋ⴝ; [B5]; [B5] ++B; 싇。⾇𐳋ⴝ; [B5]; [B5] ++B; 싇。⾇𐲋Ⴝ; [B5 P1 V6]; [B5 P1 V6] ++B; 싇。⾇𐲋Ⴝ; [B5 P1 V6]; [B5 P1 V6] ++B; 싇。⾇𐲋ⴝ; [B5]; [B5] ++B; 싇。⾇𐲋ⴝ; [B5]; [B5] ++T; 𐹠ς。\u200C\u06BFჀ; [B1 C1 P1 V6]; [B1 B2 B3 P1 V6] # 𐹠ς.ڿჀ ++N; 𐹠ς。\u200C\u06BFჀ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹠ς.ڿჀ ++T; 𐹠ς。\u200C\u06BFⴠ; [B1 C1]; [B1 B2 B3] # 𐹠ς.ڿⴠ ++N; 𐹠ς。\u200C\u06BFⴠ; [B1 C1]; [B1 C1] # 𐹠ς.ڿⴠ ++T; 𐹠Σ。\u200C\u06BFჀ; [B1 C1 P1 V6]; [B1 B2 B3 P1 V6] # 𐹠σ.ڿჀ ++N; 𐹠Σ。\u200C\u06BFჀ; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 𐹠σ.ڿჀ ++T; 𐹠σ。\u200C\u06BFⴠ; [B1 C1]; [B1 B2 B3] # 𐹠σ.ڿⴠ ++N; 𐹠σ。\u200C\u06BFⴠ; [B1 C1]; [B1 C1] # 𐹠σ.ڿⴠ ++B; xn--4xa9167k.xn--ykb467q; [B1 B2 B3]; [B1 B2 B3] # 𐹠σ.ڿⴠ ++B; xn--4xa9167k.xn--ykb760k9hj; [B1 C1]; [B1 C1] # 𐹠σ.ڿⴠ ++B; xn--4xa9167k.xn--ykb632c; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 𐹠σ.ڿჀ ++B; xn--4xa9167k.xn--ykb632cvxm; [B1 C1 V6]; [B1 C1 V6] # 𐹠σ.ڿჀ ++B; xn--3xa1267k.xn--ykb760k9hj; [B1 C1]; [B1 C1] # 𐹠ς.ڿⴠ ++B; xn--3xa1267k.xn--ykb632cvxm; [B1 C1 V6]; [B1 C1 V6] # 𐹠ς.ڿჀ ++T; 򇒐\u200C\u0604.\u069A-ß; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # .ښ-ß ++N; 򇒐\u200C\u0604.\u069A-ß; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 C1 P1 V6] # .ښ-ß ++T; 򇒐\u200C\u0604.\u069A-SS; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # .ښ-ss ++N; 򇒐\u200C\u0604.\u069A-SS; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 C1 P1 V6] # .ښ-ss ++T; 򇒐\u200C\u0604.\u069A-ss; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # .ښ-ss ++N; 򇒐\u200C\u0604.\u069A-ss; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 C1 P1 V6] # .ښ-ss ++T; 򇒐\u200C\u0604.\u069A-Ss; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # .ښ-ss ++N; 򇒐\u200C\u0604.\u069A-Ss; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 C1 P1 V6] # .ښ-ss ++B; xn--mfb98261i.xn---ss-sdf; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] # .ښ-ss ++B; xn--mfb144kqo32m.xn---ss-sdf; [B2 B3 B5 B6 C1 V6]; [B2 B3 B5 B6 C1 V6] # .ښ-ss ++B; xn--mfb144kqo32m.xn----qfa315b; [B2 B3 B5 B6 C1 V6]; [B2 B3 B5 B6 C1 V6] # .ښ-ß ++T; \u200C\u200D\u17B5\u067A.-\uFBB0󅄞𐸚; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V5 V6] # ٺ.-ۓ ++N; \u200C\u200D\u17B5\u067A.-\uFBB0󅄞𐸚; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # ٺ.-ۓ ++T; \u200C\u200D\u17B5\u067A.-\u06D3󅄞𐸚; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V5 V6] # ٺ.-ۓ ++N; \u200C\u200D\u17B5\u067A.-\u06D3󅄞𐸚; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # ٺ.-ۓ ++T; \u200C\u200D\u17B5\u067A.-\u06D2\u0654󅄞𐸚; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V5 V6] # ٺ.-ۓ ++N; \u200C\u200D\u17B5\u067A.-\u06D2\u0654󅄞𐸚; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # ٺ.-ۓ ++B; xn--zib539f.xn----twc1133r17r6g; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ٺ.-ۓ ++B; xn--zib539f8igea.xn----twc1133r17r6g; [B1 C1 C2 V3 V6]; [B1 C1 C2 V3 V6] # ٺ.-ۓ ++B; 򡶱。𐮬≠; [B3 P1 V6]; [B3 P1 V6] ++B; 򡶱。𐮬=\u0338; [B3 P1 V6]; [B3 P1 V6] ++B; 򡶱。𐮬≠; [B3 P1 V6]; [B3 P1 V6] ++B; 򡶱。𐮬=\u0338; [B3 P1 V6]; [B3 P1 V6] ++B; xn--dd55c.xn--1ch3003g; [B3 V6]; [B3 V6] ++B; \u0FB2𞶅。𐹮𐹷덝۵; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ྲ.𐹮𐹷덝۵ ++B; \u0FB2𞶅。𐹮𐹷덝۵; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ྲ.𐹮𐹷덝۵ ++B; \u0FB2𞶅。𐹮𐹷덝۵; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ྲ.𐹮𐹷덝۵ ++B; \u0FB2𞶅。𐹮𐹷덝۵; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ྲ.𐹮𐹷덝۵ ++B; xn--fgd0675v.xn--imb5839fidpcbba; [B1 V5 V6]; [B1 V5 V6] # ྲ.𐹮𐹷덝۵ ++T; Ⴏ󠅋-.\u200DႩ; [C2 P1 V3 V6]; [P1 V3 V6] # Ⴏ-.Ⴉ ++N; Ⴏ󠅋-.\u200DႩ; [C2 P1 V3 V6]; [C2 P1 V3 V6] # Ⴏ-.Ⴉ ++T; Ⴏ󠅋-.\u200DႩ; [C2 P1 V3 V6]; [P1 V3 V6] # Ⴏ-.Ⴉ ++N; Ⴏ󠅋-.\u200DႩ; [C2 P1 V3 V6]; [C2 P1 V3 V6] # Ⴏ-.Ⴉ ++T; ⴏ󠅋-.\u200Dⴉ; [C2 V3]; [V3] # ⴏ-.ⴉ ++N; ⴏ󠅋-.\u200Dⴉ; [C2 V3]; [C2 V3] # ⴏ-.ⴉ ++B; xn----3vs.xn--0kj; [V3]; [V3] ++B; xn----3vs.xn--1ug532c; [C2 V3]; [C2 V3] # ⴏ-.ⴉ ++B; xn----00g.xn--hnd; [V3 V6]; [V3 V6] ++B; xn----00g.xn--hnd399e; [C2 V3 V6]; [C2 V3 V6] # Ⴏ-.Ⴉ ++T; ⴏ󠅋-.\u200Dⴉ; [C2 V3]; [V3] # ⴏ-.ⴉ ++N; ⴏ󠅋-.\u200Dⴉ; [C2 V3]; [C2 V3] # ⴏ-.ⴉ ++B; ⇧𐨏󠾈󯶅。\u0600󠈵󠆉; [B1 P1 V6]; [B1 P1 V6] # ⇧𐨏. ++B; xn--l8g5552g64t4g46xf.xn--ifb08144p; [B1 V6]; [B1 V6] # ⇧𐨏. ++B; ≠𐮂.↑🄇⒈; [B1 P1 V6]; [B1 P1 V6] ++B; =\u0338𐮂.↑🄇⒈; [B1 P1 V6]; [B1 P1 V6] ++B; ≠𐮂.↑6,1.; [B1 P1 V6]; [B1 P1 V6] ++B; =\u0338𐮂.↑6,1.; [B1 P1 V6]; [B1 P1 V6] ++B; xn--1chy492g.xn--6,1-pw1a.; [B1 P1 V6]; [B1 P1 V6] ++B; xn--1chy492g.xn--45gx9iuy44d; [B1 V6]; [B1 V6] ++T; 𝩏󠲉ß.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𝩏ß.ᢤ𐹫 ++N; 𝩏󠲉ß.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # 𝩏ß.ᢤ𐹫 ++T; 𝩏󠲉SS.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𝩏ss.ᢤ𐹫 ++N; 𝩏󠲉SS.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # 𝩏ss.ᢤ𐹫 ++T; 𝩏󠲉ss.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𝩏ss.ᢤ𐹫 ++N; 𝩏󠲉ss.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # 𝩏ss.ᢤ𐹫 ++T; 𝩏󠲉Ss.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𝩏ss.ᢤ𐹫 ++N; 𝩏󠲉Ss.ᢤ򄦌\u200C𐹫; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # 𝩏ss.ᢤ𐹫 ++B; xn--ss-zb11ap1427e.xn--ubf2596jbt61c; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] ++B; xn--ss-zb11ap1427e.xn--ubf609atw1tynn3d; [B1 B5 B6 C1 V5 V6]; [B1 B5 B6 C1 V5 V6] # 𝩏ss.ᢤ𐹫 ++B; xn--zca3153vupz3e.xn--ubf609atw1tynn3d; [B1 B5 B6 C1 V5 V6]; [B1 B5 B6 C1 V5 V6] # 𝩏ß.ᢤ𐹫 ++T; ß𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßႧ.ꙺ ++N; ß𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßႧ.ꙺ ++T; ß𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßႧ.ꙺ ++N; ß𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßႧ.ꙺ ++T; ß𐵳񗘁ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßⴇ.ꙺ ++N; ß𐵳񗘁ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßⴇ.ꙺ ++B; SS𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ssႧ.ꙺ ++B; ss𐵳񗘁ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ssⴇ.ꙺ ++B; Ss𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ssႧ.ꙺ ++B; xn--ss-rek7420r4hs7b.xn--9x8a; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # ssႧ.ꙺ ++B; xn--ss-e61ar955h4hs7b.xn--9x8a; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # ssⴇ.ꙺ ++B; xn--zca227tpy4lkns1b.xn--9x8a; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # ßⴇ.ꙺ ++B; xn--zca491fci5qkn79a.xn--9x8a; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # ßႧ.ꙺ ++T; ß𐵳񗘁ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßⴇ.ꙺ ++N; ß𐵳񗘁ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ßⴇ.ꙺ ++B; SS𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ssႧ.ꙺ ++B; ss𐵳񗘁ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ssⴇ.ꙺ ++B; Ss𐵳񗘁Ⴇ。\uA67A; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # ssႧ.ꙺ ++B; \u1714。󠆣-𑋪; [V3 V5]; [V3 V5] # ᜔.-𑋪 ++B; xn--fze.xn----ly8i; [V3 V5]; [V3 V5] # ᜔.-𑋪 ++T; \uABE8-.򨏜\u05BDß; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽß ++N; \uABE8-.򨏜\u05BDß; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽß ++T; \uABE8-.򨏜\u05BDß; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽß ++N; \uABE8-.򨏜\u05BDß; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽß ++B; \uABE8-.򨏜\u05BDSS; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽss ++B; \uABE8-.򨏜\u05BDss; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽss ++B; \uABE8-.򨏜\u05BDSs; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽss ++B; xn----pw5e.xn--ss-7jd10716y; [V3 V5 V6]; [V3 V5 V6] # ꯨ-.ֽss ++B; xn----pw5e.xn--zca50wfv060a; [V3 V5 V6]; [V3 V5 V6] # ꯨ-.ֽß ++B; \uABE8-.򨏜\u05BDSS; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽss ++B; \uABE8-.򨏜\u05BDss; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽss ++B; \uABE8-.򨏜\u05BDSs; [P1 V3 V5 V6]; [P1 V3 V5 V6] # ꯨ-.ֽss ++B; ᡓ-≮。\u066B󠅱ᡄ; [B1 B6 P1 V6]; [B1 B6 P1 V6] # ᡓ-≮.٫ᡄ ++B; ᡓ-<\u0338。\u066B󠅱ᡄ; [B1 B6 P1 V6]; [B1 B6 P1 V6] # ᡓ-≮.٫ᡄ ++B; xn----s7j866c.xn--kib252g; [B1 B6 V6]; [B1 B6 V6] # ᡓ-≮.٫ᡄ ++B; 𝟥♮𑜫\u08ED.\u17D2𑜫8󠆏; [V5]; [V5] # 3♮𑜫࣭.្𑜫8 ++B; 3♮𑜫\u08ED.\u17D2𑜫8󠆏; [V5]; [V5] # 3♮𑜫࣭.្𑜫8 ++B; xn--3-ksd277tlo7s.xn--8-f0jx021l; [V5]; [V5] # 3♮𑜫࣭.្𑜫8 ++T; -。򕌀\u200D❡; [C2 P1 V3 V6]; [P1 V3 V6] # -.❡ ++N; -。򕌀\u200D❡; [C2 P1 V3 V6]; [C2 P1 V3 V6] # -.❡ ++T; -。򕌀\u200D❡; [C2 P1 V3 V6]; [P1 V3 V6] # -.❡ ++N; -。򕌀\u200D❡; [C2 P1 V3 V6]; [C2 P1 V3 V6] # -.❡ ++B; -.xn--nei54421f; [V3 V6]; [V3 V6] ++B; -.xn--1ug800aq795s; [C2 V3 V6]; [C2 V3 V6] # -.❡ ++B; 𝟓☱𝟐򥰵。𝪮񐡳; [P1 V5 V6]; [P1 V5 V6] ++B; 5☱2򥰵。𝪮񐡳; [P1 V5 V6]; [P1 V5 V6] ++B; xn--52-dwx47758j.xn--kd3hk431k; [V5 V6]; [V5 V6] ++B; -.-├򖦣; [P1 V3 V6]; [P1 V3 V6] ++B; -.xn----ukp70432h; [V3 V6]; [V3 V6] ++T; \u05A5\u076D。\u200D󠀘; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ֥ݭ. ++N; \u05A5\u076D。\u200D󠀘; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ֥ݭ. ++T; \u05A5\u076D。\u200D󠀘; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ֥ݭ. ++N; \u05A5\u076D。\u200D󠀘; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ֥ݭ. ++B; xn--wcb62g.xn--p526e; [B1 V5 V6]; [B1 V5 V6] # ֥ݭ. ++B; xn--wcb62g.xn--1ugy8001l; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ֥ݭ. ++T; 쥥󔏉Ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥Ⴎ.⒈⒈𐫒 ++N; 쥥󔏉Ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥Ⴎ.⒈⒈𐫒 ++T; 쥥󔏉Ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥Ⴎ.⒈⒈𐫒 ++N; 쥥󔏉Ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥Ⴎ.⒈⒈𐫒 ++T; 쥥󔏉Ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥Ⴎ.1.1.𐫒 ++N; 쥥󔏉Ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥Ⴎ.1.1.𐫒 ++T; 쥥󔏉Ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥Ⴎ.1.1.𐫒 ++N; 쥥󔏉Ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥Ⴎ.1.1.𐫒 ++T; 쥥󔏉ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥ⴎ.1.1.𐫒 ++N; 쥥󔏉ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥ⴎ.1.1.𐫒 ++T; 쥥󔏉ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥ⴎ.1.1.𐫒 ++N; 쥥󔏉ⴎ.\u200C1.1.𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥ⴎ.1.1.𐫒 ++B; xn--5kj3511ccyw3h.1.1.xn--7w9c; [B1 V6]; [B1 V6] ++B; xn--5kj3511ccyw3h.xn--1-rgn.1.xn--7w9c; [B1 C1 V6]; [B1 C1 V6] # 쥥ⴎ.1.1.𐫒 ++B; xn--mnd7865gcy28g.1.1.xn--7w9c; [B1 V6]; [B1 V6] ++B; xn--mnd7865gcy28g.xn--1-rgn.1.xn--7w9c; [B1 C1 V6]; [B1 C1 V6] # 쥥Ⴎ.1.1.𐫒 ++T; 쥥󔏉ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥ⴎ.⒈⒈𐫒 ++N; 쥥󔏉ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥ⴎ.⒈⒈𐫒 ++T; 쥥󔏉ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 P1 V6] # 쥥ⴎ.⒈⒈𐫒 ++N; 쥥󔏉ⴎ.\u200C⒈⒈𐫒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # 쥥ⴎ.⒈⒈𐫒 ++B; xn--5kj3511ccyw3h.xn--tsha6797o; [B1 V6]; [B1 V6] ++B; xn--5kj3511ccyw3h.xn--0ug88oa0396u; [B1 C1 V6]; [B1 C1 V6] # 쥥ⴎ.⒈⒈𐫒 ++B; xn--mnd7865gcy28g.xn--tsha6797o; [B1 V6]; [B1 V6] ++B; xn--mnd7865gcy28g.xn--0ug88oa0396u; [B1 C1 V6]; [B1 C1 V6] # 쥥Ⴎ.⒈⒈𐫒 ++B; \u0827𝟶\u06A0-。𑄳; [B1 B3 B6 V3 V5]; [B1 B3 B6 V3 V5] # ࠧ0ڠ-.𑄳 ++B; \u08270\u06A0-。𑄳; [B1 B3 B6 V3 V5]; [B1 B3 B6 V3 V5] # ࠧ0ڠ-.𑄳 ++B; xn--0--p3d67m.xn--v80d; [B1 B3 B6 V3 V5]; [B1 B3 B6 V3 V5] # ࠧ0ڠ-.𑄳 ++T; ς.\uFDC1🞛⒈; [P1 V6]; [P1 V6] # ς.فمي🞛⒈ ++N; ς.\uFDC1🞛⒈; [P1 V6]; [P1 V6] # ς.فمي🞛⒈ ++T; ς.\u0641\u0645\u064A🞛1.; ; xn--4xa.xn--1-gocmu97674d.; NV8 # ς.فمي🞛1. ++N; ς.\u0641\u0645\u064A🞛1.; ; xn--3xa.xn--1-gocmu97674d.; NV8 # ς.فمي🞛1. ++B; Σ.\u0641\u0645\u064A🞛1.; σ.\u0641\u0645\u064A🞛1.; xn--4xa.xn--1-gocmu97674d.; NV8 # σ.فمي🞛1. ++B; σ.\u0641\u0645\u064A🞛1.; ; xn--4xa.xn--1-gocmu97674d.; NV8 # σ.فمي🞛1. ++B; xn--4xa.xn--1-gocmu97674d.; σ.\u0641\u0645\u064A🞛1.; xn--4xa.xn--1-gocmu97674d.; NV8 # σ.فمي🞛1. ++B; xn--3xa.xn--1-gocmu97674d.; ς.\u0641\u0645\u064A🞛1.; xn--3xa.xn--1-gocmu97674d.; NV8 # ς.فمي🞛1. ++B; Σ.\uFDC1🞛⒈; [P1 V6]; [P1 V6] # σ.فمي🞛⒈ ++B; σ.\uFDC1🞛⒈; [P1 V6]; [P1 V6] # σ.فمي🞛⒈ ++B; xn--4xa.xn--dhbip2802atb20c; [V6]; [V6] # σ.فمي🞛⒈ ++B; xn--3xa.xn--dhbip2802atb20c; [V6]; [V6] # ς.فمي🞛⒈ ++B; 🗩-。𐹻󐞆񥉮; [B1 P1 V3 V6]; [B1 P1 V3 V6] ++B; 🗩-。𐹻󐞆񥉮; [B1 P1 V3 V6]; [B1 P1 V3 V6] ++B; xn----6t3s.xn--zo0d4811u6ru6a; [B1 V3 V6]; [B1 V3 V6] ++T; 𐡜-🔪。𝟻\u200C𐿀; [B1 B3 C1 P1 V6]; [B1 B3 P1 V6] # 𐡜-🔪.5 ++N; 𐡜-🔪。𝟻\u200C𐿀; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # 𐡜-🔪.5 ++T; 𐡜-🔪。5\u200C𐿀; [B1 B3 C1 P1 V6]; [B1 B3 P1 V6] # 𐡜-🔪.5 ++N; 𐡜-🔪。5\u200C𐿀; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # 𐡜-🔪.5 ++B; xn----5j4iv089c.xn--5-bn7i; [B1 B3 V6]; [B1 B3 V6] ++B; xn----5j4iv089c.xn--5-sgn7149h; [B1 B3 C1 V6]; [B1 B3 C1 V6] # 𐡜-🔪.5 ++T; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ß.ߏ0ּ ++N; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ß.ߏ0ּ ++T; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ß.ߏ0ּ ++N; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ß.ߏ0ּ ++T; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ß.ߏ0ּ ++N; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ß.ߏ0ּ ++T; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ß.ߏ0ּ ++N; 𐹣늿\u200Dß.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ß.ߏ0ּ ++T; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ ++N; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ ++T; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ ++N; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ ++T; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ ++N; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ ++T; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ ++N; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ ++T; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ ++N; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ ++T; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ ++N; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ ++B; xn--ss-i05i7041a.xn--0-vgc50n; [B1]; [B1] # 𐹣늿ss.ߏ0ּ ++B; xn--ss-l1tu910fo0xd.xn--0-vgc50n; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ ++B; xn--zca770n5s4hev6c.xn--0-vgc50n; [B1 C2]; [B1 C2] # 𐹣늿ß.ߏ0ּ ++T; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ ++N; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ ++T; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ ++N; 𐹣늿\u200DSS.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ ++T; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ ++N; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ ++T; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ ++N; 𐹣늿\u200Dss.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ ++T; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ ++N; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ ++T; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1] # 𐹣늿ss.ߏ0ּ ++N; 𐹣늿\u200DSs.\u07CF0\u05BC; [B1 C2]; [B1 C2] # 𐹣늿ss.ߏ0ּ ++B; 9󠇥.󪴴ᢓ; [P1 V6]; [P1 V6] ++B; 9󠇥.󪴴ᢓ; [P1 V6]; [P1 V6] ++B; 9.xn--dbf91222q; [V6]; [V6] ++T; \u200C\uFFA0.𐫭🠗ß⽟; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ß玉 ++N; \u200C\uFFA0.𐫭🠗ß⽟; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ß玉 ++T; \u200C\u1160.𐫭🠗ß玉; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ß玉 ++N; \u200C\u1160.𐫭🠗ß玉; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ß玉 ++T; \u200C\u1160.𐫭🠗SS玉; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ss玉 ++N; \u200C\u1160.𐫭🠗SS玉; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ss玉 ++T; \u200C\u1160.𐫭🠗ss玉; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ss玉 ++N; \u200C\u1160.𐫭🠗ss玉; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ss玉 ++T; \u200C\u1160.𐫭🠗Ss玉; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ss玉 ++N; \u200C\u1160.𐫭🠗Ss玉; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ss玉 ++B; xn--psd.xn--ss-je6eq954cp25j; [B2 B3 V6]; [B2 B3 V6] # .𐫭🠗ss玉 ++B; xn--psd526e.xn--ss-je6eq954cp25j; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # .𐫭🠗ss玉 ++B; xn--psd526e.xn--zca2289c550e0iwi; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # .𐫭🠗ß玉 ++T; \u200C\uFFA0.𐫭🠗SS⽟; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ss玉 ++N; \u200C\uFFA0.𐫭🠗SS⽟; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ss玉 ++T; \u200C\uFFA0.𐫭🠗ss⽟; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ss玉 ++N; \u200C\uFFA0.𐫭🠗ss⽟; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ss玉 ++T; \u200C\uFFA0.𐫭🠗Ss⽟; [B1 B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # .𐫭🠗ss玉 ++N; \u200C\uFFA0.𐫭🠗Ss⽟; [B1 B2 B3 C1 P1 V6]; [B1 B2 B3 C1 P1 V6] # .𐫭🠗ss玉 ++B; xn--cl7c.xn--ss-je6eq954cp25j; [B2 B3 V6]; [B2 B3 V6] # .𐫭🠗ss玉 ++B; xn--0ug7719f.xn--ss-je6eq954cp25j; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # .𐫭🠗ss玉 ++B; xn--0ug7719f.xn--zca2289c550e0iwi; [B1 B2 B3 C1 V6]; [B1 B2 B3 C1 V6] # .𐫭🠗ß玉 ++T; ︒Ⴖ\u0366.\u200C; [C1 P1 V6]; [P1 V6] # ︒Ⴖͦ. ++N; ︒Ⴖ\u0366.\u200C; [C1 P1 V6]; [C1 P1 V6] # ︒Ⴖͦ. ++T; 。Ⴖ\u0366.\u200C; [C1 P1 V6 A4_2]; [P1 V6 A4_2] # .Ⴖͦ. ++N; 。Ⴖ\u0366.\u200C; [C1 P1 V6 A4_2]; [C1 P1 V6 A4_2] # .Ⴖͦ. ++T; 。ⴖ\u0366.\u200C; [C1 A4_2]; [A4_2] # .ⴖͦ. ++N; 。ⴖ\u0366.\u200C; [C1 A4_2]; [C1 A4_2] # .ⴖͦ. ++B; .xn--hva754s.; [A4_2]; [A4_2] # .ⴖͦ. ++B; .xn--hva754s.xn--0ug; [C1 A4_2]; [C1 A4_2] # .ⴖͦ. ++B; .xn--hva929d.; [V6 A4_2]; [V6 A4_2] # .Ⴖͦ. ++B; .xn--hva929d.xn--0ug; [C1 V6 A4_2]; [C1 V6 A4_2] # .Ⴖͦ. ++T; ︒ⴖ\u0366.\u200C; [C1 P1 V6]; [P1 V6] # ︒ⴖͦ. ++N; ︒ⴖ\u0366.\u200C; [C1 P1 V6]; [C1 P1 V6] # ︒ⴖͦ. ++B; xn--hva754sy94k.; [V6]; [V6] # ︒ⴖͦ. ++B; xn--hva754sy94k.xn--0ug; [C1 V6]; [C1 V6] # ︒ⴖͦ. ++B; xn--hva929dl29p.; [V6]; [V6] # ︒Ⴖͦ. ++B; xn--hva929dl29p.xn--0ug; [C1 V6]; [C1 V6] # ︒Ⴖͦ. ++B; xn--hva754s.; ⴖ\u0366.; xn--hva754s. # ⴖͦ. ++B; ⴖ\u0366.; ; xn--hva754s. # ⴖͦ. ++B; Ⴖ\u0366.; [P1 V6]; [P1 V6] # Ⴖͦ. ++B; xn--hva929d.; [V6]; [V6] # Ⴖͦ. ++T; \u08BB.\u200CႣ𞀒; [B1 C1 P1 V6]; [P1 V6] # ࢻ.Ⴃ𞀒 ++N; \u08BB.\u200CႣ𞀒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ࢻ.Ⴃ𞀒 ++T; \u08BB.\u200CႣ𞀒; [B1 C1 P1 V6]; [P1 V6] # ࢻ.Ⴃ𞀒 ++N; \u08BB.\u200CႣ𞀒; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ࢻ.Ⴃ𞀒 ++T; \u08BB.\u200Cⴃ𞀒; [B1 C1]; xn--hzb.xn--ukj4430l # ࢻ.ⴃ𞀒 ++N; \u08BB.\u200Cⴃ𞀒; [B1 C1]; [B1 C1] # ࢻ.ⴃ𞀒 ++B; xn--hzb.xn--ukj4430l; \u08BB.ⴃ𞀒; xn--hzb.xn--ukj4430l # ࢻ.ⴃ𞀒 ++B; \u08BB.ⴃ𞀒; ; xn--hzb.xn--ukj4430l # ࢻ.ⴃ𞀒 ++B; \u08BB.Ⴃ𞀒; [P1 V6]; [P1 V6] # ࢻ.Ⴃ𞀒 ++B; xn--hzb.xn--bnd2938u; [V6]; [V6] # ࢻ.Ⴃ𞀒 ++B; xn--hzb.xn--0ug822cp045a; [B1 C1]; [B1 C1] # ࢻ.ⴃ𞀒 ++B; xn--hzb.xn--bnd300f7225a; [B1 C1 V6]; [B1 C1 V6] # ࢻ.Ⴃ𞀒 ++T; \u08BB.\u200Cⴃ𞀒; [B1 C1]; xn--hzb.xn--ukj4430l # ࢻ.ⴃ𞀒 ++N; \u08BB.\u200Cⴃ𞀒; [B1 C1]; [B1 C1] # ࢻ.ⴃ𞀒 ++T; \u200D\u200C。2䫷󠧷; [C1 C2 P1 V6]; [P1 V6 A4_2] # .2䫷 ++N; \u200D\u200C。2䫷󠧷; [C1 C2 P1 V6]; [C1 C2 P1 V6] # .2䫷 ++T; \u200D\u200C。2䫷󠧷; [C1 C2 P1 V6]; [P1 V6 A4_2] # .2䫷 ++N; \u200D\u200C。2䫷󠧷; [C1 C2 P1 V6]; [C1 C2 P1 V6] # .2䫷 ++B; .xn--2-me5ay1273i; [V6 A4_2]; [V6 A4_2] ++B; xn--0ugb.xn--2-me5ay1273i; [C1 C2 V6]; [C1 C2 V6] # .2䫷 ++B; -𞀤󜠐。򈬖; [P1 V3 V6]; [P1 V3 V6] ++B; xn----rq4re4997d.xn--l707b; [V3 V6]; [V3 V6] ++T; 󳛂︒\u200C㟀.\u0624⒈; [C1 P1 V6]; [P1 V6] # ︒㟀.ؤ⒈ ++N; 󳛂︒\u200C㟀.\u0624⒈; [C1 P1 V6]; [C1 P1 V6] # ︒㟀.ؤ⒈ ++T; 󳛂︒\u200C㟀.\u0648\u0654⒈; [C1 P1 V6]; [P1 V6] # ︒㟀.ؤ⒈ ++N; 󳛂︒\u200C㟀.\u0648\u0654⒈; [C1 P1 V6]; [C1 P1 V6] # ︒㟀.ؤ⒈ ++T; 󳛂。\u200C㟀.\u06241.; [B1 C1 P1 V6]; [P1 V6] # .㟀.ؤ1. ++N; 󳛂。\u200C㟀.\u06241.; [B1 C1 P1 V6]; [B1 C1 P1 V6] # .㟀.ؤ1. ++T; 󳛂。\u200C㟀.\u0648\u06541.; [B1 C1 P1 V6]; [P1 V6] # .㟀.ؤ1. ++N; 󳛂。\u200C㟀.\u0648\u06541.; [B1 C1 P1 V6]; [B1 C1 P1 V6] # .㟀.ؤ1. ++B; xn--z272f.xn--etl.xn--1-smc.; [V6]; [V6] # .㟀.ؤ1. ++B; xn--z272f.xn--0ug754g.xn--1-smc.; [B1 C1 V6]; [B1 C1 V6] # .㟀.ؤ1. ++B; xn--etlt457ccrq7h.xn--jgb476m; [V6]; [V6] # ︒㟀.ؤ⒈ ++B; xn--0ug754gxl4ldlt0k.xn--jgb476m; [C1 V6]; [C1 V6] # ︒㟀.ؤ⒈ ++T; 𑲜\u07CA𝅼。-\u200D; [B1 C2 V3 V5]; [B1 V3 V5] # 𑲜ߊ𝅼.- ++N; 𑲜\u07CA𝅼。-\u200D; [B1 C2 V3 V5]; [B1 C2 V3 V5] # 𑲜ߊ𝅼.- ++B; xn--lsb5482l7nre.-; [B1 V3 V5]; [B1 V3 V5] # 𑲜ߊ𝅼.- ++B; xn--lsb5482l7nre.xn----ugn; [B1 C2 V3 V5]; [B1 C2 V3 V5] # 𑲜ߊ𝅼.- ++T; \u200C.Ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .Ⴉ≠𐫶 ++N; \u200C.Ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .Ⴉ≠𐫶 ++T; \u200C.Ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .Ⴉ≠𐫶 ++N; \u200C.Ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .Ⴉ≠𐫶 ++T; \u200C.Ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .Ⴉ≠𐫶 ++N; \u200C.Ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .Ⴉ≠𐫶 ++T; \u200C.Ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .Ⴉ≠𐫶 ++N; \u200C.Ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .Ⴉ≠𐫶 ++T; \u200C.ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ⴉ≠𐫶 ++N; \u200C.ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ⴉ≠𐫶 ++T; \u200C.ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ⴉ≠𐫶 ++N; \u200C.ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ⴉ≠𐫶 ++B; .xn--1chx23bzj4p; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] ++B; xn--0ug.xn--1chx23bzj4p; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # .ⴉ≠𐫶 ++B; .xn--hnd481gv73o; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] ++B; xn--0ug.xn--hnd481gv73o; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # .Ⴉ≠𐫶 ++T; \u200C.ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ⴉ≠𐫶 ++N; \u200C.ⴉ=\u0338𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ⴉ≠𐫶 ++T; \u200C.ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ⴉ≠𐫶 ++N; \u200C.ⴉ≠𐫶; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ⴉ≠𐫶 ++T; \u0750。≯ς; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯ς ++N; \u0750。≯ς; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯ς ++T; \u0750。>\u0338ς; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯ς ++N; \u0750。>\u0338ς; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯ς ++B; \u0750。>\u0338Σ; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯σ ++B; \u0750。≯Σ; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯σ ++B; \u0750。≯σ; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯σ ++B; \u0750。>\u0338σ; [B1 P1 V6]; [B1 P1 V6] # ݐ.≯σ ++B; xn--3ob.xn--4xa718m; [B1 V6]; [B1 V6] # ݐ.≯σ ++B; xn--3ob.xn--3xa918m; [B1 V6]; [B1 V6] # ݐ.≯ς ++B; \u07FC𐸆.𓖏︒񊨩Ⴐ; [P1 V6]; [P1 V6] # .︒Ⴐ ++B; \u07FC𐸆.𓖏。񊨩Ⴐ; [P1 V6]; [P1 V6] # ..Ⴐ ++B; \u07FC𐸆.𓖏。񊨩ⴐ; [P1 V6]; [P1 V6] # ..ⴐ ++B; xn--0tb8725k.xn--tu8d.xn--7kj73887a; [V6]; [V6] # ..ⴐ ++B; xn--0tb8725k.xn--tu8d.xn--ond97931d; [V6]; [V6] # ..Ⴐ ++B; \u07FC𐸆.𓖏︒񊨩ⴐ; [P1 V6]; [P1 V6] # .︒ⴐ ++B; xn--0tb8725k.xn--7kj9008dt18a7py9c; [V6]; [V6] # .︒ⴐ ++B; xn--0tb8725k.xn--ond3562jt18a7py9c; [V6]; [V6] # .︒Ⴐ ++B; Ⴥ⚭󠖫⋃。𑌼; [P1 V5 V6]; [P1 V5 V6] ++B; Ⴥ⚭󠖫⋃。𑌼; [P1 V5 V6]; [P1 V5 V6] ++B; ⴥ⚭󠖫⋃。𑌼; [P1 V5 V6]; [P1 V5 V6] ++B; xn--vfh16m67gx1162b.xn--ro1d; [V5 V6]; [V5 V6] ++B; xn--9nd623g4zc5z060c.xn--ro1d; [V5 V6]; [V5 V6] ++B; ⴥ⚭󠖫⋃。𑌼; [P1 V5 V6]; [P1 V5 V6] ++B; 🄈。󠷳\u0844; [B1 P1 V6]; [B1 P1 V6] # 🄈.ࡄ ++B; 7,。󠷳\u0844; [B1 P1 V6]; [B1 P1 V6] # 7,.ࡄ ++B; 7,.xn--2vb13094p; [B1 P1 V6]; [B1 P1 V6] # 7,.ࡄ ++B; xn--107h.xn--2vb13094p; [B1 V6]; [B1 V6] # 🄈.ࡄ ++T; ≮\u0846。섖쮖ß; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ß ++N; ≮\u0846。섖쮖ß; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ß ++T; <\u0338\u0846。섖쮖ß; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ß ++N; <\u0338\u0846。섖쮖ß; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ß ++B; <\u0338\u0846。섖쮖SS; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ss ++B; ≮\u0846。섖쮖SS; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ss ++B; ≮\u0846。섖쮖ss; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ss ++B; <\u0338\u0846。섖쮖ss; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ss ++B; <\u0338\u0846。섖쮖Ss; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ss ++B; ≮\u0846。섖쮖Ss; [B1 P1 V6]; [B1 P1 V6] # ≮ࡆ.섖쮖ss ++B; xn--4vb505k.xn--ss-5z4j006a; [B1 V6]; [B1 V6] # ≮ࡆ.섖쮖ss ++B; xn--4vb505k.xn--zca7259goug; [B1 V6]; [B1 V6] # ≮ࡆ.섖쮖ß ++B; 󠆓⛏-。ꡒ; [V3]; [V3] ++B; xn----o9p.xn--rc9a; [V3]; [V3] ++T; \u07BB𐹳\u0626𑁆。\u08A7\u06B0\u200Cᢒ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐹳ئ𑁆.ࢧڰᢒ ++N; \u07BB𐹳\u0626𑁆。\u08A7\u06B0\u200Cᢒ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐹳ئ𑁆.ࢧڰᢒ ++T; \u07BB𐹳\u064A𑁆\u0654。\u08A7\u06B0\u200Cᢒ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐹳ئ𑁆.ࢧڰᢒ ++N; \u07BB𐹳\u064A𑁆\u0654。\u08A7\u06B0\u200Cᢒ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐹳ئ𑁆.ࢧڰᢒ ++B; xn--lgb32f2753cosb.xn--jkb91hlz1a; [B2 B3 V6]; [B2 B3 V6] # 𐹳ئ𑁆.ࢧڰᢒ ++B; xn--lgb32f2753cosb.xn--jkb91hlz1azih; [B2 B3 V6]; [B2 B3 V6] # 𐹳ئ𑁆.ࢧڰᢒ ++B; \u0816.𐨕𚚕; [B1 B2 B3 B6 P1 V5 V6]; [B1 B2 B3 B6 P1 V5 V6] # ࠖ.𐨕 ++B; xn--rub.xn--tr9c248x; [B1 B2 B3 B6 V5 V6]; [B1 B2 B3 B6 V5 V6] # ࠖ.𐨕 ++B; --。𽊆\u0767𐽋𞠬; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # --.ݧ𞠬 ++B; --.xn--rpb6226k77pfh58p; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] # --.ݧ𞠬 ++B; 򛭦𐋥𹸐.≯\u08B0\u08A6󔛣; [B1 P1 V6]; [B1 P1 V6] # 𐋥.≯ࢰࢦ ++B; 򛭦𐋥𹸐.>\u0338\u08B0\u08A6󔛣; [B1 P1 V6]; [B1 P1 V6] # 𐋥.≯ࢰࢦ ++B; xn--887c2298i5mv6a.xn--vybt688qm8981a; [B1 V6]; [B1 V6] # 𐋥.≯ࢰࢦ ++B; 䔛󠇒򤸞𐹧.-䤷; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] ++B; 䔛󠇒򤸞𐹧.-䤷; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] ++B; xn--2loy662coo60e.xn----0n4a; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] ++T; 𐹩.\u200D-; [B1 C2 V3]; [B1 V3] # 𐹩.- ++N; 𐹩.\u200D-; [B1 C2 V3]; [B1 C2 V3] # 𐹩.- ++T; 𐹩.\u200D-; [B1 C2 V3]; [B1 V3] # 𐹩.- ++N; 𐹩.\u200D-; [B1 C2 V3]; [B1 C2 V3] # 𐹩.- ++B; xn--ho0d.-; [B1 V3]; [B1 V3] ++B; xn--ho0d.xn----tgn; [B1 C2 V3]; [B1 C2 V3] # 𐹩.- ++B; 񂈦帷。≯萺\u1DC8-; [P1 V3 V6]; [P1 V3 V6] # 帷.≯萺᷈- ++B; 񂈦帷。>\u0338萺\u1DC8-; [P1 V3 V6]; [P1 V3 V6] # 帷.≯萺᷈- ++B; 񂈦帷。≯萺\u1DC8-; [P1 V3 V6]; [P1 V3 V6] # 帷.≯萺᷈- ++B; 񂈦帷。>\u0338萺\u1DC8-; [P1 V3 V6]; [P1 V3 V6] # 帷.≯萺᷈- ++B; xn--qutw175s.xn----mimu6tf67j; [V3 V6]; [V3 V6] # 帷.≯萺᷈- ++T; \u200D攌\uABED。ᢖ-Ⴘ; [C2 P1 V6]; [P1 V6] # 攌꯭.ᢖ-Ⴘ ++N; \u200D攌\uABED。ᢖ-Ⴘ; [C2 P1 V6]; [C2 P1 V6] # 攌꯭.ᢖ-Ⴘ ++T; \u200D攌\uABED。ᢖ-ⴘ; [C2]; xn--p9ut19m.xn----mck373i # 攌꯭.ᢖ-ⴘ ++N; \u200D攌\uABED。ᢖ-ⴘ; [C2]; [C2] # 攌꯭.ᢖ-ⴘ ++B; xn--p9ut19m.xn----mck373i; 攌\uABED.ᢖ-ⴘ; xn--p9ut19m.xn----mck373i # 攌꯭.ᢖ-ⴘ ++B; 攌\uABED.ᢖ-ⴘ; ; xn--p9ut19m.xn----mck373i # 攌꯭.ᢖ-ⴘ ++B; 攌\uABED.ᢖ-Ⴘ; [P1 V6]; [P1 V6] # 攌꯭.ᢖ-Ⴘ ++B; xn--p9ut19m.xn----k1g451d; [V6]; [V6] # 攌꯭.ᢖ-Ⴘ ++B; xn--1ug592ykp6b.xn----mck373i; [C2]; [C2] # 攌꯭.ᢖ-ⴘ ++B; xn--1ug592ykp6b.xn----k1g451d; [C2 V6]; [C2 V6] # 攌꯭.ᢖ-Ⴘ ++T; \u200Cꖨ.⒗3툒۳; [C1 P1 V6]; [P1 V6] # ꖨ.⒗3툒۳ ++N; \u200Cꖨ.⒗3툒۳; [C1 P1 V6]; [C1 P1 V6] # ꖨ.⒗3툒۳ ++T; \u200Cꖨ.⒗3툒۳; [C1 P1 V6]; [P1 V6] # ꖨ.⒗3툒۳ ++N; \u200Cꖨ.⒗3툒۳; [C1 P1 V6]; [C1 P1 V6] # ꖨ.⒗3툒۳ ++T; \u200Cꖨ.16.3툒۳; [C1]; xn--9r8a.16.xn--3-nyc0117m # ꖨ.16.3툒۳ ++N; \u200Cꖨ.16.3툒۳; [C1]; [C1] # ꖨ.16.3툒۳ ++T; \u200Cꖨ.16.3툒۳; [C1]; xn--9r8a.16.xn--3-nyc0117m # ꖨ.16.3툒۳ ++N; \u200Cꖨ.16.3툒۳; [C1]; [C1] # ꖨ.16.3툒۳ ++B; xn--9r8a.16.xn--3-nyc0117m; ꖨ.16.3툒۳; xn--9r8a.16.xn--3-nyc0117m ++B; ꖨ.16.3툒۳; ; xn--9r8a.16.xn--3-nyc0117m ++B; ꖨ.16.3툒۳; ꖨ.16.3툒۳; xn--9r8a.16.xn--3-nyc0117m ++B; xn--0ug2473c.16.xn--3-nyc0117m; [C1]; [C1] # ꖨ.16.3툒۳ ++B; xn--9r8a.xn--3-nyc678tu07m; [V6]; [V6] ++B; xn--0ug2473c.xn--3-nyc678tu07m; [C1 V6]; [C1 V6] # ꖨ.⒗3툒۳ ++B; ⒈걾6.𐱁\u06D0; [B1 P1 V6]; [B1 P1 V6] # ⒈걾6.𐱁ې ++B; ⒈걾6.𐱁\u06D0; [B1 P1 V6]; [B1 P1 V6] # ⒈걾6.𐱁ې ++B; 1.걾6.𐱁\u06D0; [B1]; [B1] # 1.걾6.𐱁ې ++B; 1.걾6.𐱁\u06D0; [B1]; [B1] # 1.걾6.𐱁ې ++B; 1.xn--6-945e.xn--glb1794k; [B1]; [B1] # 1.걾6.𐱁ې ++B; xn--6-dcps419c.xn--glb1794k; [B1 V6]; [B1 V6] # ⒈걾6.𐱁ې ++B; 𐲞𝟶≮≮.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع ++B; 𐲞𝟶<\u0338<\u0338.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع ++B; 𐲞0≮≮.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع ++B; 𐲞0<\u0338<\u0338.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع ++B; 𐳞0<\u0338<\u0338.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع ++B; 𐳞0≮≮.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع ++B; xn--0-ngoa5711v.xn--4gb31034p; [B1 B3 V6]; [B1 B3 V6] # 𐳞0≮≮.ع ++B; 𐳞𝟶<\u0338<\u0338.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع ++B; 𐳞𝟶≮≮.󠀧\u0639; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 𐳞0≮≮.ع ++B; \u0AE3.𐹺\u115F; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ૣ.𐹺 ++B; xn--8fc.xn--osd3070k; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # ૣ.𐹺 ++T; 𝟏𝨙⸖.\u200D; [C2]; xn--1-5bt6845n. # 1𝨙⸖. ++N; 𝟏𝨙⸖.\u200D; [C2]; [C2] # 1𝨙⸖. ++T; 1𝨙⸖.\u200D; [C2]; xn--1-5bt6845n. # 1𝨙⸖. ++N; 1𝨙⸖.\u200D; [C2]; [C2] # 1𝨙⸖. ++B; xn--1-5bt6845n.; 1𝨙⸖.; xn--1-5bt6845n.; NV8 ++B; 1𝨙⸖.; ; xn--1-5bt6845n.; NV8 ++B; xn--1-5bt6845n.xn--1ug; [C2]; [C2] # 1𝨙⸖. ++T; 𞤐≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ ++N; 𞤐≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ ++T; 𞤐=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ ++N; 𞤐=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ ++T; 𞤐≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ ++N; 𞤐≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ ++T; 𞤐=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ ++N; 𞤐=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ ++T; 𞤲=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ ++N; 𞤲=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ ++T; 𞤲≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ ++N; 𞤲≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ ++B; xn--wnb859grzfzw60c.xn----kcd; [B1 V3 V6]; [B1 V3 V6] # 𞤲≠ܦ᩠.-ߕ ++B; xn--wnb859grzfzw60c.xn----kcd017p; [B1 C1 V3 V6]; [B1 C1 V3 V6] # 𞤲≠ܦ᩠.-ߕ ++T; 𞤲=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ ++N; 𞤲=\u0338\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ ++T; 𞤲≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ ++N; 𞤲≠\u0726\u1A60。-\u200C\u07D5; [B1 C1 P1 V3 V6]; [B1 C1 P1 V3 V6] # 𞤲≠ܦ᩠.-ߕ ++B; 𐹰\u0368-ꡧ。\u0675; [B1]; [B1] # 𐹰ͨ-ꡧ.اٴ ++B; 𐹰\u0368-ꡧ。\u0627\u0674; [B1]; [B1] # 𐹰ͨ-ꡧ.اٴ ++B; xn----shb2387jgkqd.xn--mgb8m; [B1]; [B1] # 𐹰ͨ-ꡧ.اٴ ++B; F󠅟。򏗅♚; [P1 V6]; [P1 V6] ++B; F󠅟。򏗅♚; [P1 V6]; [P1 V6] ++B; f󠅟。򏗅♚; [P1 V6]; [P1 V6] ++B; f.xn--45hz6953f; [V6]; [V6] ++B; f󠅟。򏗅♚; [P1 V6]; [P1 V6] ++B; \u0B4D𑄴\u1DE9。𝟮Ⴘ𞀨񃥇; [P1 V5 V6]; [P1 V5 V6] # ୍𑄴ᷩ.2Ⴘ𞀨 ++B; \u0B4D𑄴\u1DE9。2Ⴘ𞀨񃥇; [P1 V5 V6]; [P1 V5 V6] # ୍𑄴ᷩ.2Ⴘ𞀨 ++B; \u0B4D𑄴\u1DE9。2ⴘ𞀨񃥇; [P1 V5 V6]; [P1 V5 V6] # ୍𑄴ᷩ.2ⴘ𞀨 ++B; xn--9ic246gs21p.xn--2-nws2918ndrjr; [V5 V6]; [V5 V6] # ୍𑄴ᷩ.2ⴘ𞀨 ++B; xn--9ic246gs21p.xn--2-k1g43076adrwq; [V5 V6]; [V5 V6] # ୍𑄴ᷩ.2Ⴘ𞀨 ++B; \u0B4D𑄴\u1DE9。𝟮ⴘ𞀨񃥇; [P1 V5 V6]; [P1 V5 V6] # ୍𑄴ᷩ.2ⴘ𞀨 ++T; 򓠭\u200C\u200C⒈。勉𑁅; [C1 P1 V6]; [P1 V6] # ⒈.勉𑁅 ++N; 򓠭\u200C\u200C⒈。勉𑁅; [C1 P1 V6]; [C1 P1 V6] # ⒈.勉𑁅 ++T; 򓠭\u200C\u200C1.。勉𑁅; [C1 P1 V6 A4_2]; [P1 V6 A4_2] # 1..勉𑁅 ++N; 򓠭\u200C\u200C1.。勉𑁅; [C1 P1 V6 A4_2]; [C1 P1 V6 A4_2] # 1..勉𑁅 ++B; xn--1-yi00h..xn--4grs325b; [V6 A4_2]; [V6 A4_2] ++B; xn--1-rgna61159u..xn--4grs325b; [C1 V6 A4_2]; [C1 V6 A4_2] # 1..勉𑁅 ++B; xn--tsh11906f.xn--4grs325b; [V6]; [V6] ++B; xn--0uga855aez302a.xn--4grs325b; [C1 V6]; [C1 V6] # ⒈.勉𑁅 ++B; ᡃ.玿񫈜󕞐; [P1 V6]; [P1 V6] ++B; xn--27e.xn--7cy81125a0yq4a; [V6]; [V6] ++T; \u200C\u200C。⒈≯𝟵; [C1 P1 V6]; [P1 V6 A4_2] # .⒈≯9 ++N; \u200C\u200C。⒈≯𝟵; [C1 P1 V6]; [C1 P1 V6] # .⒈≯9 ++T; \u200C\u200C。⒈>\u0338𝟵; [C1 P1 V6]; [P1 V6 A4_2] # .⒈≯9 ++N; \u200C\u200C。⒈>\u0338𝟵; [C1 P1 V6]; [C1 P1 V6] # .⒈≯9 ++T; \u200C\u200C。1.≯9; [C1 P1 V6]; [P1 V6 A4_2] # .1.≯9 ++N; \u200C\u200C。1.≯9; [C1 P1 V6]; [C1 P1 V6] # .1.≯9 ++T; \u200C\u200C。1.>\u03389; [C1 P1 V6]; [P1 V6 A4_2] # .1.≯9 ++N; \u200C\u200C。1.>\u03389; [C1 P1 V6]; [C1 P1 V6] # .1.≯9 ++B; .1.xn--9-ogo; [V6 A4_2]; [V6 A4_2] ++B; xn--0uga.1.xn--9-ogo; [C1 V6]; [C1 V6] # .1.≯9 ++B; .xn--9-ogo37g; [V6 A4_2]; [V6 A4_2] ++B; xn--0uga.xn--9-ogo37g; [C1 V6]; [C1 V6] # .⒈≯9 ++B; \u115F\u1DE0򐀁.𺻆≯𐮁; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ᷠ.≯𐮁 ++B; \u115F\u1DE0򐀁.𺻆>\u0338𐮁; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ᷠ.≯𐮁 ++B; xn--osd615d5659o.xn--hdh5192gkm6r; [B5 B6 V6]; [B5 B6 V6] # ᷠ.≯𐮁 ++T; 󠄫𝩤\u200D\u063E.𝩩-\u081E󑼩; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # 𝩤ؾ.𝩩-ࠞ ++N; 󠄫𝩤\u200D\u063E.𝩩-\u081E󑼩; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # 𝩤ؾ.𝩩-ࠞ ++B; xn--9gb5080v.xn----qgd52296avol4f; [B1 V5 V6]; [B1 V5 V6] # 𝩤ؾ.𝩩-ࠞ ++B; xn--9gb723kg862a.xn----qgd52296avol4f; [B1 C2 V5 V6]; [B1 C2 V5 V6] # 𝩤ؾ.𝩩-ࠞ ++B; \u20DA.𑘿-; [V3 V5]; [V3 V5] # ⃚.𑘿- ++B; \u20DA.𑘿-; [V3 V5]; [V3 V5] # ⃚.𑘿- ++B; xn--w0g.xn----bd0j; [V3 V5]; [V3 V5] # ⃚.𑘿- ++T; 䮸ß.󠵟󠭎紙\u08A8; [B1 P1 V6]; [B1 P1 V6] # 䮸ß.紙ࢨ ++N; 䮸ß.󠵟󠭎紙\u08A8; [B1 P1 V6]; [B1 P1 V6] # 䮸ß.紙ࢨ ++B; 䮸SS.󠵟󠭎紙\u08A8; [B1 P1 V6]; [B1 P1 V6] # 䮸ss.紙ࢨ ++B; 䮸ss.󠵟󠭎紙\u08A8; [B1 P1 V6]; [B1 P1 V6] # 䮸ss.紙ࢨ ++B; 䮸Ss.󠵟󠭎紙\u08A8; [B1 P1 V6]; [B1 P1 V6] # 䮸ss.紙ࢨ ++B; xn--ss-sf1c.xn--xyb1370div70kpzba; [B1 V6]; [B1 V6] # 䮸ss.紙ࢨ ++B; xn--zca5349a.xn--xyb1370div70kpzba; [B1 V6]; [B1 V6] # 䮸ß.紙ࢨ ++B; -Ⴞ.-𝩨⅔𐦕; [B1 P1 V3 V6]; [B1 P1 V3 V6] ++B; -Ⴞ.-𝩨2⁄3𐦕; [B1 P1 V3 V6]; [B1 P1 V3 V6] ++B; -ⴞ.-𝩨2⁄3𐦕; [B1 V3]; [B1 V3] ++B; xn----zws.xn---23-pt0a0433lk3jj; [B1 V3]; [B1 V3] ++B; xn----w1g.xn---23-pt0a0433lk3jj; [B1 V3 V6]; [B1 V3 V6] ++B; -ⴞ.-𝩨⅔𐦕; [B1 V3]; [B1 V3] ++B; 󧈯𐹯\u0AC2。򖢨𐮁񇼖ᡂ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 𐹯ૂ.𐮁ᡂ ++B; 󧈯𐹯\u0AC2。򖢨𐮁񇼖ᡂ; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 𐹯ૂ.𐮁ᡂ ++B; xn--bfc7604kv8m3g.xn--17e5565jl7zw4h16a; [B5 B6 V6]; [B5 B6 V6] # 𐹯ૂ.𐮁ᡂ ++T; \u1082-\u200D\uA8EA.ꡊ\u200D񼸳; [C2 P1 V5 V6]; [P1 V5 V6] # ႂ-꣪.ꡊ ++N; \u1082-\u200D\uA8EA.ꡊ\u200D񼸳; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ႂ-꣪.ꡊ ++T; \u1082-\u200D\uA8EA.ꡊ\u200D񼸳; [C2 P1 V5 V6]; [P1 V5 V6] # ႂ-꣪.ꡊ ++N; \u1082-\u200D\uA8EA.ꡊ\u200D񼸳; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ႂ-꣪.ꡊ ++B; xn----gyg3618i.xn--jc9ao4185a; [V5 V6]; [V5 V6] # ႂ-꣪.ꡊ ++B; xn----gyg250jio7k.xn--1ug8774cri56d; [C2 V5 V6]; [C2 V5 V6] # ႂ-꣪.ꡊ ++B; ۱。≠\u0668; [B1 P1 V6]; [B1 P1 V6] # ۱.≠٨ ++B; ۱。=\u0338\u0668; [B1 P1 V6]; [B1 P1 V6] # ۱.≠٨ ++B; xn--emb.xn--hib334l; [B1 V6]; [B1 V6] # ۱.≠٨ ++B; 𑈵廊.𐠍; [V5]; [V5] ++B; xn--xytw701b.xn--yc9c; [V5]; [V5] ++T; \u200D\u0356-.-Ⴐ\u0661; [B1 C2 P1 V3 V6]; [B1 P1 V3 V5 V6] # ͖-.-Ⴐ١ ++N; \u200D\u0356-.-Ⴐ\u0661; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # ͖-.-Ⴐ١ ++T; \u200D\u0356-.-Ⴐ\u0661; [B1 C2 P1 V3 V6]; [B1 P1 V3 V5 V6] # ͖-.-Ⴐ١ ++N; \u200D\u0356-.-Ⴐ\u0661; [B1 C2 P1 V3 V6]; [B1 C2 P1 V3 V6] # ͖-.-Ⴐ١ ++T; \u200D\u0356-.-ⴐ\u0661; [B1 C2 V3]; [B1 V3 V5] # ͖-.-ⴐ١ ++N; \u200D\u0356-.-ⴐ\u0661; [B1 C2 V3]; [B1 C2 V3] # ͖-.-ⴐ١ ++B; xn----rgb.xn----bqc2280a; [B1 V3 V5]; [B1 V3 V5] # ͖-.-ⴐ١ ++B; xn----rgb661t.xn----bqc2280a; [B1 C2 V3]; [B1 C2 V3] # ͖-.-ⴐ١ ++B; xn----rgb.xn----bqc030f; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ͖-.-Ⴐ١ ++B; xn----rgb661t.xn----bqc030f; [B1 C2 V3 V6]; [B1 C2 V3 V6] # ͖-.-Ⴐ١ ++T; \u200D\u0356-.-ⴐ\u0661; [B1 C2 V3]; [B1 V3 V5] # ͖-.-ⴐ١ ++N; \u200D\u0356-.-ⴐ\u0661; [B1 C2 V3]; [B1 C2 V3] # ͖-.-ⴐ١ ++B; \u063A\u0661挏󾯐.-; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # غ١挏.- ++B; xn--5gb2f4205aqi47p.-; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # غ١挏.- ++B; \u06EF。𐹧𞤽; [B1]; [B1] # ۯ.𐹧𞤽 ++B; \u06EF。𐹧𞤽; [B1]; [B1] # ۯ.𐹧𞤽 ++B; \u06EF。𐹧𞤛; [B1]; [B1] # ۯ.𐹧𞤽 ++B; xn--cmb.xn--fo0dy848a; [B1]; [B1] # ۯ.𐹧𞤽 ++B; \u06EF。𐹧𞤛; [B1]; [B1] # ۯ.𐹧𞤽 ++B; Ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] ++B; Ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] ++B; Ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] ++B; Ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] ++B; ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] ++B; ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] ++B; xn--mlj0486jgl2j.xn--hbf6853f; [V6]; [V6] ++B; xn--2nd8876sgl2j.xn--hbf6853f; [V6]; [V6] ++B; ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] ++B; ⴞ𶛀𛗻.ᢗ릫; [P1 V6]; [P1 V6] ++T; 󠎃󗭞\u06B7𐹷。≯\u200C\u1DFE; [B1 C1 P1 V6]; [B1 P1 V6] # ڷ𐹷.≯᷾ ++N; 󠎃󗭞\u06B7𐹷。≯\u200C\u1DFE; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ڷ𐹷.≯᷾ ++T; 󠎃󗭞\u06B7𐹷。>\u0338\u200C\u1DFE; [B1 C1 P1 V6]; [B1 P1 V6] # ڷ𐹷.≯᷾ ++N; 󠎃󗭞\u06B7𐹷。>\u0338\u200C\u1DFE; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ڷ𐹷.≯᷾ ++T; 󠎃󗭞\u06B7𐹷。≯\u200C\u1DFE; [B1 C1 P1 V6]; [B1 P1 V6] # ڷ𐹷.≯᷾ ++N; 󠎃󗭞\u06B7𐹷。≯\u200C\u1DFE; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ڷ𐹷.≯᷾ ++T; 󠎃󗭞\u06B7𐹷。>\u0338\u200C\u1DFE; [B1 C1 P1 V6]; [B1 P1 V6] # ڷ𐹷.≯᷾ ++N; 󠎃󗭞\u06B7𐹷。>\u0338\u200C\u1DFE; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ڷ𐹷.≯᷾ ++B; xn--qkb4516kbi06fg2id.xn--zfg31q; [B1 V6]; [B1 V6] # ڷ𐹷.≯᷾ ++B; xn--qkb4516kbi06fg2id.xn--zfg59fm0c; [B1 C1 V6]; [B1 C1 V6] # ڷ𐹷.≯᷾ ++T; ᛎ󠅍󠐕\u200D。𐹾𐹪𐻝-; [B1 B6 C2 P1 V3 V6]; [B1 B6 P1 V3 V6] # ᛎ.𐹾𐹪- ++N; ᛎ󠅍󠐕\u200D。𐹾𐹪𐻝-; [B1 B6 C2 P1 V3 V6]; [B1 B6 C2 P1 V3 V6] # ᛎ.𐹾𐹪- ++T; ᛎ󠅍󠐕\u200D。𐹾𐹪𐻝-; [B1 B6 C2 P1 V3 V6]; [B1 B6 P1 V3 V6] # ᛎ.𐹾𐹪- ++N; ᛎ󠅍󠐕\u200D。𐹾𐹪𐻝-; [B1 B6 C2 P1 V3 V6]; [B1 B6 C2 P1 V3 V6] # ᛎ.𐹾𐹪- ++B; xn--fxe63563p.xn----q26i2bvu; [B1 B6 V3 V6]; [B1 B6 V3 V6] ++B; xn--fxe848bq3411a.xn----q26i2bvu; [B1 B6 C2 V3 V6]; [B1 B6 C2 V3 V6] # ᛎ.𐹾𐹪- ++B; 𐹶.𐫂; [B1]; [B1] ++B; xn--uo0d.xn--rw9c; [B1]; [B1] ++T; ß\u200D\u103A。⒈; [C2 P1 V6]; [P1 V6] # ß်.⒈ ++N; ß\u200D\u103A。⒈; [C2 P1 V6]; [C2 P1 V6] # ß်.⒈ ++T; ß\u200D\u103A。1.; [C2]; xn--ss-f4j.1. # ß်.1. ++N; ß\u200D\u103A。1.; [C2]; [C2] # ß်.1. ++T; SS\u200D\u103A。1.; [C2]; xn--ss-f4j.1. # ss်.1. ++N; SS\u200D\u103A。1.; [C2]; [C2] # ss်.1. ++T; ss\u200D\u103A。1.; [C2]; xn--ss-f4j.1. # ss်.1. ++N; ss\u200D\u103A。1.; [C2]; [C2] # ss်.1. ++T; Ss\u200D\u103A。1.; [C2]; xn--ss-f4j.1. # ss်.1. ++N; Ss\u200D\u103A。1.; [C2]; [C2] # ss်.1. ++B; xn--ss-f4j.1.; ss\u103A.1.; xn--ss-f4j.1. # ss်.1. ++B; ss\u103A.1.; ; xn--ss-f4j.1. # ss်.1. ++B; SS\u103A.1.; ss\u103A.1.; xn--ss-f4j.1. # ss်.1. ++B; Ss\u103A.1.; ss\u103A.1.; xn--ss-f4j.1. # ss်.1. ++B; xn--ss-f4j585j.1.; [C2]; [C2] # ss်.1. ++B; xn--zca679eh2l.1.; [C2]; [C2] # ß်.1. ++T; SS\u200D\u103A。⒈; [C2 P1 V6]; [P1 V6] # ss်.⒈ ++N; SS\u200D\u103A。⒈; [C2 P1 V6]; [C2 P1 V6] # ss်.⒈ ++T; ss\u200D\u103A。⒈; [C2 P1 V6]; [P1 V6] # ss်.⒈ ++N; ss\u200D\u103A。⒈; [C2 P1 V6]; [C2 P1 V6] # ss်.⒈ ++T; Ss\u200D\u103A。⒈; [C2 P1 V6]; [P1 V6] # ss်.⒈ ++N; Ss\u200D\u103A。⒈; [C2 P1 V6]; [C2 P1 V6] # ss်.⒈ ++B; xn--ss-f4j.xn--tsh; [V6]; [V6] # ss်.⒈ ++B; xn--ss-f4j585j.xn--tsh; [C2 V6]; [C2 V6] # ss်.⒈ ++B; xn--zca679eh2l.xn--tsh; [C2 V6]; [C2 V6] # ß်.⒈ ++T; \u0B4D\u200C𙶵𞻘。\u200D; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ୍. ++N; \u0B4D\u200C𙶵𞻘。\u200D; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ୍. ++B; xn--9ic6417rn4xb.; [B1 V5 V6]; [B1 V5 V6] # ୍. ++B; xn--9ic637hz82z32jc.xn--1ug; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ୍. ++B; 𐮅。\u06BC🁕; [B3]; [B3] # 𐮅.ڼ🁕 ++B; 𐮅。\u06BC🁕; [B3]; [B3] # 𐮅.ڼ🁕 ++B; xn--c29c.xn--vkb8871w; [B3]; [B3] # 𐮅.ڼ🁕 ++T; \u0620\u17D2。𐫔󠀧\u200C𑈵; [B2 B3 C1 P1 V6]; [B2 B3 P1 V6] # ؠ្.𐫔𑈵 ++N; \u0620\u17D2。𐫔󠀧\u200C𑈵; [B2 B3 C1 P1 V6]; [B2 B3 C1 P1 V6] # ؠ្.𐫔𑈵 ++B; xn--fgb471g.xn--9w9c29jw3931a; [B2 B3 V6]; [B2 B3 V6] # ؠ្.𐫔𑈵 ++B; xn--fgb471g.xn--0ug9853g7verp838a; [B2 B3 C1 V6]; [B2 B3 C1 V6] # ؠ្.𐫔𑈵 ++B; 񋉕.𞣕𞤊; [B1 P1 V5 V6]; [B1 P1 V5 V6] ++B; 񋉕.𞣕𞤬; [B1 P1 V5 V6]; [B1 P1 V5 V6] ++B; xn--tf5w.xn--2b6hof; [B1 V5 V6]; [B1 V5 V6] ++T; \u06CC𐨿.ß\u0F84𑍬; \u06CC𐨿.ß\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ß྄𑍬 ++N; \u06CC𐨿.ß\u0F84𑍬; \u06CC𐨿.ß\u0F84𑍬; xn--clb2593k.xn--zca216edt0r # ی𐨿.ß྄𑍬 ++T; \u06CC𐨿.ß\u0F84𑍬; ; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ß྄𑍬 ++N; \u06CC𐨿.ß\u0F84𑍬; ; xn--clb2593k.xn--zca216edt0r # ی𐨿.ß྄𑍬 ++B; \u06CC𐨿.SS\u0F84𑍬; \u06CC𐨿.ss\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 ++B; \u06CC𐨿.ss\u0F84𑍬; ; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 ++B; \u06CC𐨿.Ss\u0F84𑍬; \u06CC𐨿.ss\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 ++B; xn--clb2593k.xn--ss-toj6092t; \u06CC𐨿.ss\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 ++B; xn--clb2593k.xn--zca216edt0r; \u06CC𐨿.ß\u0F84𑍬; xn--clb2593k.xn--zca216edt0r # ی𐨿.ß྄𑍬 ++B; \u06CC𐨿.SS\u0F84𑍬; \u06CC𐨿.ss\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 ++B; \u06CC𐨿.ss\u0F84𑍬; \u06CC𐨿.ss\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 ++B; \u06CC𐨿.Ss\u0F84𑍬; \u06CC𐨿.ss\u0F84𑍬; xn--clb2593k.xn--ss-toj6092t # ی𐨿.ss྄𑍬 ++T; 𝟠≮\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [P1 V5 V6] # 8≮. ++N; 𝟠≮\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 8≮. ++T; 𝟠<\u0338\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [P1 V5 V6] # 8≮. ++N; 𝟠<\u0338\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 8≮. ++T; 8≮\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [P1 V5 V6] # 8≮. ++N; 8≮\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 8≮. ++T; 8<\u0338\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [P1 V5 V6] # 8≮. ++N; 8<\u0338\u200C。󠅱\u17B4; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 8≮. ++B; xn--8-ngo.xn--z3e; [V5 V6]; [V5 V6] # 8≮. ++B; xn--8-sgn10i.xn--z3e; [C1 V5 V6]; [C1 V5 V6] # 8≮. ++B; ᢕ≯︒񄂯.Ⴀ; [P1 V6]; [P1 V6] ++B; ᢕ>\u0338︒񄂯.Ⴀ; [P1 V6]; [P1 V6] ++B; ᢕ≯。񄂯.Ⴀ; [P1 V6]; [P1 V6] ++B; ᢕ>\u0338。񄂯.Ⴀ; [P1 V6]; [P1 V6] ++B; ᢕ>\u0338。񄂯.ⴀ; [P1 V6]; [P1 V6] ++B; ᢕ≯。񄂯.ⴀ; [P1 V6]; [P1 V6] ++B; xn--fbf851c.xn--ko1u.xn--rkj; [V6]; [V6] ++B; xn--fbf851c.xn--ko1u.xn--7md; [V6]; [V6] ++B; ᢕ>\u0338︒񄂯.ⴀ; [P1 V6]; [P1 V6] ++B; ᢕ≯︒񄂯.ⴀ; [P1 V6]; [P1 V6] ++B; xn--fbf851cq98poxw1a.xn--rkj; [V6]; [V6] ++B; xn--fbf851cq98poxw1a.xn--7md; [V6]; [V6] ++B; \u0F9F.-\u082A; [V3 V5]; [V3 V5] # ྟ.-ࠪ ++B; \u0F9F.-\u082A; [V3 V5]; [V3 V5] # ྟ.-ࠪ ++B; xn--vfd.xn----fhd; [V3 V5]; [V3 V5] # ྟ.-ࠪ ++B; ᵬ󠆠.핒⒒⒈􈄦; [P1 V6]; [P1 V6] ++B; ᵬ󠆠.핒⒒⒈􈄦; [P1 V6]; [P1 V6] ++B; ᵬ󠆠.핒11.1.􈄦; [P1 V6]; [P1 V6] ++B; ᵬ󠆠.핒11.1.􈄦; [P1 V6]; [P1 V6] ++B; xn--tbg.xn--11-5o7k.1.xn--k469f; [V6]; [V6] ++B; xn--tbg.xn--tsht7586kyts9l; [V6]; [V6] ++T; ς𑓂𐋢.\u0668; [B1]; [B1] # ς𑓂𐋢.٨ ++N; ς𑓂𐋢.\u0668; [B1]; [B1] # ς𑓂𐋢.٨ ++T; ς𑓂𐋢.\u0668; [B1]; [B1] # ς𑓂𐋢.٨ ++N; ς𑓂𐋢.\u0668; [B1]; [B1] # ς𑓂𐋢.٨ ++B; Σ𑓂𐋢.\u0668; [B1]; [B1] # σ𑓂𐋢.٨ ++B; σ𑓂𐋢.\u0668; [B1]; [B1] # σ𑓂𐋢.٨ ++B; xn--4xa6371khhl.xn--hib; [B1]; [B1] # σ𑓂𐋢.٨ ++B; xn--3xa8371khhl.xn--hib; [B1]; [B1] # ς𑓂𐋢.٨ ++B; Σ𑓂𐋢.\u0668; [B1]; [B1] # σ𑓂𐋢.٨ ++B; σ𑓂𐋢.\u0668; [B1]; [B1] # σ𑓂𐋢.٨ ++T; \uA953\u200C𐋻\u200D.\u2DF8𞿄𐹲; [B1 B6 C2 P1 V5 V6]; [B1 P1 V5 V6] # ꥓𐋻.ⷸ𐹲 ++N; \uA953\u200C𐋻\u200D.\u2DF8𞿄𐹲; [B1 B6 C2 P1 V5 V6]; [B1 B6 C2 P1 V5 V6] # ꥓𐋻.ⷸ𐹲 ++B; xn--3j9a531o.xn--urju692efj0f; [B1 V5 V6]; [B1 V5 V6] # ꥓𐋻.ⷸ𐹲 ++B; xn--0ugc8356he76c.xn--urju692efj0f; [B1 B6 C2 V5 V6]; [B1 B6 C2 V5 V6] # ꥓𐋻.ⷸ𐹲 ++B; ⊼。񪧖\u0695; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ⊼.ڕ ++B; xn--ofh.xn--rjb13118f; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ⊼.ڕ ++B; 𐯬񖋔。󜳥; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++B; xn--949co370q.xn--7g25e; [B2 B3 V6]; [B2 B3 V6] ++T; \u0601𑍧\u07DD。ς򬍘🀞\u17B5; [B1 B6 P1 V6]; [B1 B6 P1 V6] # 𑍧ߝ.ς🀞 ++N; \u0601𑍧\u07DD。ς򬍘🀞\u17B5; [B1 B6 P1 V6]; [B1 B6 P1 V6] # 𑍧ߝ.ς🀞 ++B; \u0601𑍧\u07DD。Σ򬍘🀞\u17B5; [B1 B6 P1 V6]; [B1 B6 P1 V6] # 𑍧ߝ.σ🀞 ++B; \u0601𑍧\u07DD。σ򬍘🀞\u17B5; [B1 B6 P1 V6]; [B1 B6 P1 V6] # 𑍧ߝ.σ🀞 ++B; xn--jfb66gt010c.xn--4xa623h9p95ars26d; [B1 B6 V6]; [B1 B6 V6] # 𑍧ߝ.σ🀞 ++B; xn--jfb66gt010c.xn--3xa823h9p95ars26d; [B1 B6 V6]; [B1 B6 V6] # 𑍧ߝ.ς🀞 ++B; -𐳲\u0646󠺐。\uABED𝟥; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -𐳲ن.꯭3 ++B; -𐳲\u0646󠺐。\uABED3; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -𐳲ن.꯭3 ++B; -𐲲\u0646󠺐。\uABED3; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -𐳲ن.꯭3 ++B; xn----roc5482rek10i.xn--3-zw5e; [B1 V3 V5 V6]; [B1 V3 V5 V6] # -𐳲ن.꯭3 ++B; -𐲲\u0646󠺐。\uABED𝟥; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # -𐳲ن.꯭3 ++T; \u200C󠴦。񲨕≮𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # .≮𐦜 ++N; \u200C󠴦。񲨕≮𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .≮𐦜 ++T; \u200C󠴦。񲨕<\u0338𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # .≮𐦜 ++N; \u200C󠴦。񲨕<\u0338𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .≮𐦜 ++T; \u200C󠴦。񲨕≮𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # .≮𐦜 ++N; \u200C󠴦。񲨕≮𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .≮𐦜 ++T; \u200C󠴦。񲨕<\u0338𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 P1 V6] # .≮𐦜 ++N; \u200C󠴦。񲨕<\u0338𐦜; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .≮𐦜 ++B; xn--6v56e.xn--gdhz712gzlr6b; [B1 B5 B6 V6]; [B1 B5 B6 V6] ++B; xn--0ug22251l.xn--gdhz712gzlr6b; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # .≮𐦜 ++B; ⒈✌򟬟.𝟡񠱣; [P1 V6]; [P1 V6] ++B; 1.✌򟬟.9񠱣; [P1 V6]; [P1 V6] ++B; 1.xn--7bi44996f.xn--9-o706d; [V6]; [V6] ++B; xn--tsh24g49550b.xn--9-o706d; [V6]; [V6] ++B; 𑆾𞤬𐮆.\u0666\u1DD4; [B1 V5]; [B1 V5] # 𑆾𞤬𐮆.٦ᷔ ++B; 𑆾𞤊𐮆.\u0666\u1DD4; [B1 V5]; [B1 V5] # 𑆾𞤬𐮆.٦ᷔ ++B; xn--d29c79hf98r.xn--fib011j; [B1 V5]; [B1 V5] # 𑆾𞤬𐮆.٦ᷔ ++T; ς.\uA9C0\uA8C4; [V5]; [V5] # ς.꧀꣄ ++N; ς.\uA9C0\uA8C4; [V5]; [V5] # ς.꧀꣄ ++T; ς.\uA9C0\uA8C4; [V5]; [V5] # ς.꧀꣄ ++N; ς.\uA9C0\uA8C4; [V5]; [V5] # ς.꧀꣄ ++B; Σ.\uA9C0\uA8C4; [V5]; [V5] # σ.꧀꣄ ++B; σ.\uA9C0\uA8C4; [V5]; [V5] # σ.꧀꣄ ++B; xn--4xa.xn--0f9ars; [V5]; [V5] # σ.꧀꣄ ++B; xn--3xa.xn--0f9ars; [V5]; [V5] # ς.꧀꣄ ++B; Σ.\uA9C0\uA8C4; [V5]; [V5] # σ.꧀꣄ ++B; σ.\uA9C0\uA8C4; [V5]; [V5] # σ.꧀꣄ ++T; 𑰶\u200C≯𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ ++N; 𑰶\u200C≯𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ ++T; 𑰶\u200C>\u0338𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ ++N; 𑰶\u200C>\u0338𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ ++T; 𑰶\u200C≯𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ ++N; 𑰶\u200C≯𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ ++T; 𑰶\u200C>\u0338𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ ++N; 𑰶\u200C>\u0338𐳐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ ++T; 𑰶\u200C>\u0338𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ ++N; 𑰶\u200C>\u0338𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ ++T; 𑰶\u200C≯𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ ++N; 𑰶\u200C≯𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ ++B; xn--hdhz343g3wj.xn--qwb; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # 𑰶≯𐳐.࡛ ++B; xn--0ug06g7697ap4ma.xn--qwb; [B1 B3 B6 C1 V5 V6]; [B1 B3 B6 C1 V5 V6] # 𑰶≯𐳐.࡛ ++T; 𑰶\u200C>\u0338𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ ++N; 𑰶\u200C>\u0338𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ ++T; 𑰶\u200C≯𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𑰶≯𐳐.࡛ ++N; 𑰶\u200C≯𐲐.\u085B; [B1 B3 B6 C1 P1 V5 V6]; [B1 B3 B6 C1 P1 V5 V6] # 𑰶≯𐳐.࡛ ++B; 羚。≯; [P1 V6]; [P1 V6] ++B; 羚。>\u0338; [P1 V6]; [P1 V6] ++B; 羚。≯; [P1 V6]; [P1 V6] ++B; 羚。>\u0338; [P1 V6]; [P1 V6] ++B; xn--xt0a.xn--hdh; [V6]; [V6] ++B; 𑓂\u1759.\u08A8; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𑓂.ࢨ ++B; 𑓂\u1759.\u08A8; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𑓂.ࢨ ++B; xn--e1e9580k.xn--xyb; [B1 V5 V6]; [B1 V5 V6] # 𑓂.ࢨ ++T; 󨣿󠇀\u200D。\u0663ҠჀ𝟑; [B1 B6 C2 P1 V6]; [B1 P1 V6] # .٣ҡჀ3 ++N; 󨣿󠇀\u200D。\u0663ҠჀ𝟑; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # .٣ҡჀ3 ++T; 󨣿󠇀\u200D。\u0663ҠჀ3; [B1 B6 C2 P1 V6]; [B1 P1 V6] # .٣ҡჀ3 ++N; 󨣿󠇀\u200D。\u0663ҠჀ3; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # .٣ҡჀ3 ++T; 󨣿󠇀\u200D。\u0663ҡⴠ3; [B1 B6 C2 P1 V6]; [B1 P1 V6] # .٣ҡⴠ3 ++N; 󨣿󠇀\u200D。\u0663ҡⴠ3; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # .٣ҡⴠ3 ++T; 󨣿󠇀\u200D。\u0663Ҡⴠ3; [B1 B6 C2 P1 V6]; [B1 P1 V6] # .٣ҡⴠ3 ++N; 󨣿󠇀\u200D。\u0663Ҡⴠ3; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # .٣ҡⴠ3 ++B; xn--1r19e.xn--3-ozb36ko13f; [B1 V6]; [B1 V6] # .٣ҡⴠ3 ++B; xn--1ug89936l.xn--3-ozb36ko13f; [B1 B6 C2 V6]; [B1 B6 C2 V6] # .٣ҡⴠ3 ++B; xn--1r19e.xn--3-ozb36kixu; [B1 V6]; [B1 V6] # .٣ҡჀ3 ++B; xn--1ug89936l.xn--3-ozb36kixu; [B1 B6 C2 V6]; [B1 B6 C2 V6] # .٣ҡჀ3 ++T; 󨣿󠇀\u200D。\u0663ҡⴠ𝟑; [B1 B6 C2 P1 V6]; [B1 P1 V6] # .٣ҡⴠ3 ++N; 󨣿󠇀\u200D。\u0663ҡⴠ𝟑; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # .٣ҡⴠ3 ++T; 󨣿󠇀\u200D。\u0663Ҡⴠ𝟑; [B1 B6 C2 P1 V6]; [B1 P1 V6] # .٣ҡⴠ3 ++N; 󨣿󠇀\u200D。\u0663Ҡⴠ𝟑; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # .٣ҡⴠ3 ++B; ᡷ。𐹢\u08E0; [B1]; [B1] # ᡷ.𐹢࣠ ++B; xn--k9e.xn--j0b5005k; [B1]; [B1] # ᡷ.𐹢࣠ ++T; 򕮇\u1BF3。\u0666񗜼\u17D2ß; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ß ++N; 򕮇\u1BF3。\u0666񗜼\u17D2ß; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ß ++T; 򕮇\u1BF3。\u0666񗜼\u17D2ß; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ß ++N; 򕮇\u1BF3。\u0666񗜼\u17D2ß; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ß ++B; 򕮇\u1BF3。\u0666񗜼\u17D2SS; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ss ++B; 򕮇\u1BF3。\u0666񗜼\u17D2ss; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ss ++B; 򕮇\u1BF3。\u0666񗜼\u17D2Ss; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ss ++B; xn--1zf58212h.xn--ss-pyd459o3258m; [B1 V6]; [B1 V6] # ᯳.٦្ss ++B; xn--1zf58212h.xn--zca34zk4qx711k; [B1 V6]; [B1 V6] # ᯳.٦្ß ++B; 򕮇\u1BF3。\u0666񗜼\u17D2SS; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ss ++B; 򕮇\u1BF3。\u0666񗜼\u17D2ss; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ss ++B; 򕮇\u1BF3。\u0666񗜼\u17D2Ss; [B1 P1 V6]; [B1 P1 V6] # ᯳.٦្ss ++B; \u0664򤽎𑲛.󠔢︒≠; [B1 P1 V6]; [B1 P1 V6] # ٤𑲛.︒≠ ++B; \u0664򤽎𑲛.󠔢︒=\u0338; [B1 P1 V6]; [B1 P1 V6] # ٤𑲛.︒≠ ++B; \u0664򤽎𑲛.󠔢。≠; [B1 P1 V6]; [B1 P1 V6] # ٤𑲛..≠ ++B; \u0664򤽎𑲛.󠔢。=\u0338; [B1 P1 V6]; [B1 P1 V6] # ٤𑲛..≠ ++B; xn--dib0653l2i02d.xn--k736e.xn--1ch; [B1 V6]; [B1 V6] # ٤𑲛..≠ ++B; xn--dib0653l2i02d.xn--1ch7467f14u4g; [B1 V6]; [B1 V6] # ٤𑲛.︒≠ ++B; ➆񷧕ỗ⒈.򑬒񡘮\u085B𝟫; [P1 V6]; [P1 V6] # ➆ỗ⒈.࡛9 ++B; ➆񷧕o\u0302\u0303⒈.򑬒񡘮\u085B𝟫; [P1 V6]; [P1 V6] # ➆ỗ⒈.࡛9 ++B; ➆񷧕ỗ1..򑬒񡘮\u085B9; [P1 V6 A4_2]; [P1 V6 A4_2] # ➆ỗ1..࡛9 ++B; ➆񷧕o\u0302\u03031..򑬒񡘮\u085B9; [P1 V6 A4_2]; [P1 V6 A4_2] # ➆ỗ1..࡛9 ++B; ➆񷧕O\u0302\u03031..򑬒񡘮\u085B9; [P1 V6 A4_2]; [P1 V6 A4_2] # ➆ỗ1..࡛9 ++B; ➆񷧕Ỗ1..򑬒񡘮\u085B9; [P1 V6 A4_2]; [P1 V6 A4_2] # ➆ỗ1..࡛9 ++B; xn--1-3xm292b6044r..xn--9-6jd87310jtcqs; [V6 A4_2]; [V6 A4_2] # ➆ỗ1..࡛9 ++B; ➆񷧕O\u0302\u0303⒈.򑬒񡘮\u085B𝟫; [P1 V6]; [P1 V6] # ➆ỗ⒈.࡛9 ++B; ➆񷧕Ỗ⒈.򑬒񡘮\u085B𝟫; [P1 V6]; [P1 V6] # ➆ỗ⒈.࡛9 ++B; xn--6lg26tvvc6v99z.xn--9-6jd87310jtcqs; [V6]; [V6] # ➆ỗ⒈.࡛9 ++T; \u200D。𞤘; [B1 C2]; [A4_2] # .𞤺 ++N; \u200D。𞤘; [B1 C2]; [B1 C2] # .𞤺 ++T; \u200D。𞤘; [B1 C2]; [A4_2] # .𞤺 ++N; \u200D。𞤘; [B1 C2]; [B1 C2] # .𞤺 ++T; \u200D。𞤺; [B1 C2]; [A4_2] # .𞤺 ++N; \u200D。𞤺; [B1 C2]; [B1 C2] # .𞤺 ++B; .xn--ye6h; [A4_2]; [A4_2] ++B; xn--1ug.xn--ye6h; [B1 C2]; [B1 C2] # .𞤺 ++T; \u200D。𞤺; [B1 C2]; [A4_2] # .𞤺 ++N; \u200D。𞤺; [B1 C2]; [B1 C2] # .𞤺 ++B; xn--ye6h; 𞤺; xn--ye6h ++B; 𞤺; ; xn--ye6h ++B; 𞤘; 𞤺; xn--ye6h ++B; \u0829\u0724.ᢣ; [B1 V5]; [B1 V5] # ࠩܤ.ᢣ ++B; xn--unb53c.xn--tbf; [B1 V5]; [B1 V5] # ࠩܤ.ᢣ ++T; \u073C\u200C-。𓐾ß; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # ܼ-.ß ++N; \u073C\u200C-。𓐾ß; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # ܼ-.ß ++T; \u073C\u200C-。𓐾SS; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # ܼ-.ss ++N; \u073C\u200C-。𓐾SS; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # ܼ-.ss ++T; \u073C\u200C-。𓐾ss; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # ܼ-.ss ++N; \u073C\u200C-。𓐾ss; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # ܼ-.ss ++T; \u073C\u200C-。𓐾Ss; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # ܼ-.ss ++N; \u073C\u200C-。𓐾Ss; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # ܼ-.ss ++B; xn----s2c.xn--ss-066q; [V3 V5 V6]; [V3 V5 V6] # ܼ-.ss ++B; xn----s2c071q.xn--ss-066q; [C1 V3 V5 V6]; [C1 V3 V5 V6] # ܼ-.ss ++B; xn----s2c071q.xn--zca7848m; [C1 V3 V5 V6]; [C1 V3 V5 V6] # ܼ-.ß ++T; \u200Cς🃡⒗.\u0CC6仧\u0756; [B1 B5 B6 C1 P1 V5 V6]; [B5 B6 P1 V5 V6] # ς🃡⒗.ೆ仧ݖ ++N; \u200Cς🃡⒗.\u0CC6仧\u0756; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # ς🃡⒗.ೆ仧ݖ ++T; \u200Cς🃡16..\u0CC6仧\u0756; [B1 B5 B6 C1 V5 A4_2]; [B5 B6 V5 A4_2] # ς🃡16..ೆ仧ݖ ++N; \u200Cς🃡16..\u0CC6仧\u0756; [B1 B5 B6 C1 V5 A4_2]; [B1 B5 B6 C1 V5 A4_2] # ς🃡16..ೆ仧ݖ ++T; \u200CΣ🃡16..\u0CC6仧\u0756; [B1 B5 B6 C1 V5 A4_2]; [B5 B6 V5 A4_2] # σ🃡16..ೆ仧ݖ ++N; \u200CΣ🃡16..\u0CC6仧\u0756; [B1 B5 B6 C1 V5 A4_2]; [B1 B5 B6 C1 V5 A4_2] # σ🃡16..ೆ仧ݖ ++T; \u200Cσ🃡16..\u0CC6仧\u0756; [B1 B5 B6 C1 V5 A4_2]; [B5 B6 V5 A4_2] # σ🃡16..ೆ仧ݖ ++N; \u200Cσ🃡16..\u0CC6仧\u0756; [B1 B5 B6 C1 V5 A4_2]; [B1 B5 B6 C1 V5 A4_2] # σ🃡16..ೆ仧ݖ ++B; xn--16-ubc66061c..xn--9ob79ycx2e; [B5 B6 V5 A4_2]; [B5 B6 V5 A4_2] # σ🃡16..ೆ仧ݖ ++B; xn--16-ubc7700avy99b..xn--9ob79ycx2e; [B1 B5 B6 C1 V5 A4_2]; [B1 B5 B6 C1 V5 A4_2] # σ🃡16..ೆ仧ݖ ++B; xn--16-rbc1800avy99b..xn--9ob79ycx2e; [B1 B5 B6 C1 V5 A4_2]; [B1 B5 B6 C1 V5 A4_2] # ς🃡16..ೆ仧ݖ ++T; \u200CΣ🃡⒗.\u0CC6仧\u0756; [B1 B5 B6 C1 P1 V5 V6]; [B5 B6 P1 V5 V6] # σ🃡⒗.ೆ仧ݖ ++N; \u200CΣ🃡⒗.\u0CC6仧\u0756; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # σ🃡⒗.ೆ仧ݖ ++T; \u200Cσ🃡⒗.\u0CC6仧\u0756; [B1 B5 B6 C1 P1 V5 V6]; [B5 B6 P1 V5 V6] # σ🃡⒗.ೆ仧ݖ ++N; \u200Cσ🃡⒗.\u0CC6仧\u0756; [B1 B5 B6 C1 P1 V5 V6]; [B1 B5 B6 C1 P1 V5 V6] # σ🃡⒗.ೆ仧ݖ ++B; xn--4xa229nbu92a.xn--9ob79ycx2e; [B5 B6 V5 V6]; [B5 B6 V5 V6] # σ🃡⒗.ೆ仧ݖ ++B; xn--4xa595lz9czy52d.xn--9ob79ycx2e; [B1 B5 B6 C1 V5 V6]; [B1 B5 B6 C1 V5 V6] # σ🃡⒗.ೆ仧ݖ ++B; xn--3xa795lz9czy52d.xn--9ob79ycx2e; [B1 B5 B6 C1 V5 V6]; [B1 B5 B6 C1 V5 V6] # ς🃡⒗.ೆ仧ݖ ++B; -.𞸚; [B1 V3]; [B1 V3] # -.ظ ++B; -.\u0638; [B1 V3]; [B1 V3] # -.ظ ++B; -.xn--3gb; [B1 V3]; [B1 V3] # -.ظ ++B; 򏛓\u0683.\u0F7E\u0634; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ڃ.ཾش ++B; xn--8ib92728i.xn--zgb968b; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ڃ.ཾش ++B; \u0FE6\u0843񽶬.𐮏; [B5 P1 V6]; [B5 P1 V6] # ࡃ.𐮏 ++B; xn--1vb320b5m04p.xn--m29c; [B5 V6]; [B5 V6] # ࡃ.𐮏 ++T; 2񎨠\u07CBß。ᠽ; [B1 P1 V6]; [B1 P1 V6] # 2ߋß.ᠽ ++N; 2񎨠\u07CBß。ᠽ; [B1 P1 V6]; [B1 P1 V6] # 2ߋß.ᠽ ++B; 2񎨠\u07CBSS。ᠽ; [B1 P1 V6]; [B1 P1 V6] # 2ߋss.ᠽ ++B; 2񎨠\u07CBss。ᠽ; [B1 P1 V6]; [B1 P1 V6] # 2ߋss.ᠽ ++B; 2񎨠\u07CBSs。ᠽ; [B1 P1 V6]; [B1 P1 V6] # 2ߋss.ᠽ ++B; xn--2ss-odg83511n.xn--w7e; [B1 V6]; [B1 V6] # 2ߋss.ᠽ ++B; xn--2-qfa924cez02l.xn--w7e; [B1 V6]; [B1 V6] # 2ߋß.ᠽ ++T; 㸳\u07CA≮.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێß- ++N; 㸳\u07CA≮.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێß- ++T; 㸳\u07CA<\u0338.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێß- ++N; 㸳\u07CA<\u0338.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێß- ++T; 㸳\u07CA≮.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێß- ++N; 㸳\u07CA≮.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێß- ++T; 㸳\u07CA<\u0338.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێß- ++N; 㸳\u07CA<\u0338.\u06CEß-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێß- ++T; 㸳\u07CA<\u0338.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- ++N; 㸳\u07CA<\u0338.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- ++T; 㸳\u07CA≮.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- ++N; 㸳\u07CA≮.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- ++T; 㸳\u07CA≮.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- ++N; 㸳\u07CA≮.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- ++T; 㸳\u07CA<\u0338.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- ++N; 㸳\u07CA<\u0338.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- ++T; 㸳\u07CA<\u0338.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- ++N; 㸳\u07CA<\u0338.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- ++T; 㸳\u07CA≮.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- ++N; 㸳\u07CA≮.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- ++B; xn--lsb457kkut.xn--ss--qjf; [B2 B3 B5 B6 V3 V6]; [B2 B3 B5 B6 V3 V6] # 㸳ߊ≮.ێss- ++B; xn--lsb457kkut.xn--ss--qjf2343a; [B2 B3 B5 B6 C2 V6]; [B2 B3 B5 B6 C2 V6] # 㸳ߊ≮.ێss- ++B; xn--lsb457kkut.xn----pfa076bys4a; [B2 B3 B5 B6 C2 V6]; [B2 B3 B5 B6 C2 V6] # 㸳ߊ≮.ێß- ++T; 㸳\u07CA<\u0338.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- ++N; 㸳\u07CA<\u0338.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- ++T; 㸳\u07CA≮.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- ++N; 㸳\u07CA≮.\u06CESS-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- ++T; 㸳\u07CA≮.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- ++N; 㸳\u07CA≮.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- ++T; 㸳\u07CA<\u0338.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- ++N; 㸳\u07CA<\u0338.\u06CEss-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- ++T; 㸳\u07CA<\u0338.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- ++N; 㸳\u07CA<\u0338.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- ++T; 㸳\u07CA≮.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V3 V6] # 㸳ߊ≮.ێss- ++N; 㸳\u07CA≮.\u06CESs-\u200D; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # 㸳ߊ≮.ێss- ++B; -򷝬\u135E𑜧.\u1DEB-︒; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -፞𑜧.ᷫ-︒ ++B; -򷝬\u135E𑜧.\u1DEB-。; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -፞𑜧.ᷫ-. ++B; xn----b5h1837n2ok9f.xn----mkm.; [V3 V5 V6]; [V3 V5 V6] # -፞𑜧.ᷫ-. ++B; xn----b5h1837n2ok9f.xn----mkmw278h; [V3 V5 V6]; [V3 V5 V6] # -፞𑜧.ᷫ-︒ ++B; ︒.򚠡\u1A59; [P1 V6]; [P1 V6] # ︒.ᩙ ++B; 。.򚠡\u1A59; [P1 V6 A4_2]; [P1 V6 A4_2] # ..ᩙ ++B; ..xn--cof61594i; [V6 A4_2]; [V6 A4_2] # ..ᩙ ++B; xn--y86c.xn--cof61594i; [V6]; [V6] # ︒.ᩙ ++T; \u0323\u2DE1。\u200C⓾\u200C\u06B9; [B1 B3 B6 C1 V5]; [B1 B3 B6 V5] # ̣ⷡ.⓾ڹ ++N; \u0323\u2DE1。\u200C⓾\u200C\u06B9; [B1 B3 B6 C1 V5]; [B1 B3 B6 C1 V5] # ̣ⷡ.⓾ڹ ++B; xn--kta899s.xn--skb116m; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ̣ⷡ.⓾ڹ ++B; xn--kta899s.xn--skb970ka771c; [B1 B3 B6 C1 V5]; [B1 B3 B6 C1 V5] # ̣ⷡ.⓾ڹ ++B; 𞠶ᠴ\u06DD。\u1074𞤵󠅦; [B1 B2 P1 V5 V6]; [B1 B2 P1 V5 V6] # 𞠶ᠴ.ၴ𞤵 ++B; 𞠶ᠴ\u06DD。\u1074𞤵󠅦; [B1 B2 P1 V5 V6]; [B1 B2 P1 V5 V6] # 𞠶ᠴ.ၴ𞤵 ++B; 𞠶ᠴ\u06DD。\u1074𞤓󠅦; [B1 B2 P1 V5 V6]; [B1 B2 P1 V5 V6] # 𞠶ᠴ.ၴ𞤵 ++B; xn--tlb199fwl35a.xn--yld4613v; [B1 B2 V5 V6]; [B1 B2 V5 V6] # 𞠶ᠴ.ၴ𞤵 ++B; 𞠶ᠴ\u06DD。\u1074𞤓󠅦; [B1 B2 P1 V5 V6]; [B1 B2 P1 V5 V6] # 𞠶ᠴ.ၴ𞤵 ++B; 𑰺.-򑟏; [P1 V3 V5 V6]; [P1 V3 V5 V6] ++B; xn--jk3d.xn----iz68g; [V3 V5 V6]; [V3 V5 V6] ++B; 󠻩.赏; [P1 V6]; [P1 V6] ++B; 󠻩.赏; [P1 V6]; [P1 V6] ++B; xn--2856e.xn--6o3a; [V6]; [V6] ++B; \u06B0ᠡ。Ⴁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ڰᠡ.Ⴁ ++B; \u06B0ᠡ。Ⴁ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # ڰᠡ.Ⴁ ++B; \u06B0ᠡ。ⴁ; [B2 B3]; [B2 B3] # ڰᠡ.ⴁ ++B; xn--jkb440g.xn--skj; [B2 B3]; [B2 B3] # ڰᠡ.ⴁ ++B; xn--jkb440g.xn--8md; [B2 B3 V6]; [B2 B3 V6] # ڰᠡ.Ⴁ ++B; \u06B0ᠡ。ⴁ; [B2 B3]; [B2 B3] # ڰᠡ.ⴁ ++T; \u20DEႪ\u06BBς。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻς.- ++N; \u20DEႪ\u06BBς。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻς.- ++T; \u20DEႪ\u06BBς。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻς.- ++N; \u20DEႪ\u06BBς。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻς.- ++T; \u20DEⴊ\u06BBς。-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻς.- ++N; \u20DEⴊ\u06BBς。-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻς.- ++B; \u20DEႪ\u06BBΣ。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻσ.- ++B; \u20DEⴊ\u06BBσ。-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻσ.- ++B; \u20DEႪ\u06BBσ。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻσ.- ++B; xn--4xa33m7zmb0q.-; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ⃞Ⴊڻσ.- ++B; xn--4xa33mr38aeel.-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻσ.- ++B; xn--3xa53mr38aeel.-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻς.- ++B; xn--3xa53m7zmb0q.-; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ⃞Ⴊڻς.- ++T; \u20DEⴊ\u06BBς。-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻς.- ++N; \u20DEⴊ\u06BBς。-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻς.- ++B; \u20DEႪ\u06BBΣ。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻσ.- ++B; \u20DEⴊ\u06BBσ。-; [B1 V3 V5]; [B1 V3 V5] # ⃞ⴊڻσ.- ++B; \u20DEႪ\u06BBσ。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ⃞Ⴊڻσ.- ++T; Ⴍ.񍇦\u200C; [C1 P1 V6]; [P1 V6] # Ⴍ. ++N; Ⴍ.񍇦\u200C; [C1 P1 V6]; [C1 P1 V6] # Ⴍ. ++T; Ⴍ.񍇦\u200C; [C1 P1 V6]; [P1 V6] # Ⴍ. ++N; Ⴍ.񍇦\u200C; [C1 P1 V6]; [C1 P1 V6] # Ⴍ. ++T; ⴍ.񍇦\u200C; [C1 P1 V6]; [P1 V6] # ⴍ. ++N; ⴍ.񍇦\u200C; [C1 P1 V6]; [C1 P1 V6] # ⴍ. ++B; xn--4kj.xn--p01x; [V6]; [V6] ++B; xn--4kj.xn--0ug56448b; [C1 V6]; [C1 V6] # ⴍ. ++B; xn--lnd.xn--p01x; [V6]; [V6] ++B; xn--lnd.xn--0ug56448b; [C1 V6]; [C1 V6] # Ⴍ. ++T; ⴍ.񍇦\u200C; [C1 P1 V6]; [P1 V6] # ⴍ. ++N; ⴍ.񍇦\u200C; [C1 P1 V6]; [C1 P1 V6] # ⴍ. ++B; 򉟂󠵣.𐫫\u1A60󴺖\u1B44; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # .𐫫᩠᭄ ++B; xn--9u37blu98h.xn--jof13bt568cork1j; [B2 B3 B6 V6]; [B2 B3 B6 V6] # .𐫫᩠᭄ ++B; ≯❊ᠯ。𐹱⺨; [B1 P1 V6]; [B1 P1 V6] ++B; >\u0338❊ᠯ。𐹱⺨; [B1 P1 V6]; [B1 P1 V6] ++B; ≯❊ᠯ。𐹱⺨; [B1 P1 V6]; [B1 P1 V6] ++B; >\u0338❊ᠯ。𐹱⺨; [B1 P1 V6]; [B1 P1 V6] ++B; xn--i7e163ct2d.xn--vwj7372e; [B1 V6]; [B1 V6] ++B; 􁕜𐹧𞭁𐹩。Ⴈ𐫮Ⴏ; [B5 B6 P1 V6]; [B5 B6 P1 V6] ++B; 􁕜𐹧𞭁𐹩。ⴈ𐫮ⴏ; [B5 B6 P1 V6]; [B5 B6 P1 V6] ++B; xn--fo0de1270ope54j.xn--zkjo0151o; [B5 B6 V6]; [B5 B6 V6] ++B; xn--fo0de1270ope54j.xn--gndo2033q; [B5 B6 V6]; [B5 B6 V6] ++B; 𞠂。\uA926; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𞠂.ꤦ ++B; xn--145h.xn--ti9a; [B1 B3 B6 V5]; [B1 B3 B6 V5] # 𞠂.ꤦ ++B; 𝟔𐹫.\u0733\u10379ꡇ; [B1 V5]; [B1 V5] # 6𐹫.့ܳ9ꡇ ++B; 𝟔𐹫.\u1037\u07339ꡇ; [B1 V5]; [B1 V5] # 6𐹫.့ܳ9ꡇ ++B; 6𐹫.\u1037\u07339ꡇ; [B1 V5]; [B1 V5] # 6𐹫.့ܳ9ꡇ ++B; xn--6-t26i.xn--9-91c730e8u8n; [B1 V5]; [B1 V5] # 6𐹫.့ܳ9ꡇ ++B; \u0724\u0603𞲶.\u06D8; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ܤ.ۘ ++B; \u0724\u0603𞲶.\u06D8; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # ܤ.ۘ ++B; xn--lfb19ct414i.xn--olb; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # ܤ.ۘ ++T; ✆񱔩ꡋ.\u0632\u200D𞣴; [B1 C2 P1 V6]; [B1 P1 V6] # ✆ꡋ.ز ++N; ✆񱔩ꡋ.\u0632\u200D𞣴; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ✆ꡋ.ز ++T; ✆񱔩ꡋ.\u0632\u200D𞣴; [B1 C2 P1 V6]; [B1 P1 V6] # ✆ꡋ.ز ++N; ✆񱔩ꡋ.\u0632\u200D𞣴; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ✆ꡋ.ز ++B; xn--1biv525bcix0d.xn--xgb6828v; [B1 V6]; [B1 V6] # ✆ꡋ.ز ++B; xn--1biv525bcix0d.xn--xgb253k0m73a; [B1 C2 V6]; [B1 C2 V6] # ✆ꡋ.ز ++B; \u0845񃾰𞸍-.≠򃁟𑋪; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ࡅن-.≠𑋪 ++B; \u0845񃾰𞸍-.=\u0338򃁟𑋪; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ࡅن-.≠𑋪 ++B; \u0845񃾰\u0646-.≠򃁟𑋪; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ࡅن-.≠𑋪 ++B; \u0845񃾰\u0646-.=\u0338򃁟𑋪; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ࡅن-.≠𑋪 ++B; xn----qoc64my971s.xn--1ch7585g76o3c; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # ࡅن-.≠𑋪 ++B; 𝟛.笠; 3.笠; 3.xn--6vz ++B; 𝟛.笠; 3.笠; 3.xn--6vz ++B; 3.笠; ; 3.xn--6vz ++B; 3.xn--6vz; 3.笠; 3.xn--6vz ++T; -\u200D.Ⴞ𐋷; [C2 P1 V3 V6]; [P1 V3 V6] # -.Ⴞ𐋷 ++N; -\u200D.Ⴞ𐋷; [C2 P1 V3 V6]; [C2 P1 V3 V6] # -.Ⴞ𐋷 ++T; -\u200D.ⴞ𐋷; [C2 V3]; [V3] # -.ⴞ𐋷 ++N; -\u200D.ⴞ𐋷; [C2 V3]; [C2 V3] # -.ⴞ𐋷 ++B; -.xn--mlj8559d; [V3]; [V3] ++B; xn----ugn.xn--mlj8559d; [C2 V3]; [C2 V3] # -.ⴞ𐋷 ++B; -.xn--2nd2315j; [V3 V6]; [V3 V6] ++B; xn----ugn.xn--2nd2315j; [C2 V3 V6]; [C2 V3 V6] # -.Ⴞ𐋷 ++T; \u200Dςß\u0731.\u0BCD; [C2 V5]; [V5] # ςßܱ.் ++N; \u200Dςß\u0731.\u0BCD; [C2 V5]; [C2 V5] # ςßܱ.் ++T; \u200Dςß\u0731.\u0BCD; [C2 V5]; [V5] # ςßܱ.் ++N; \u200Dςß\u0731.\u0BCD; [C2 V5]; [C2 V5] # ςßܱ.் ++T; \u200DΣSS\u0731.\u0BCD; [C2 V5]; [V5] # σssܱ.் ++N; \u200DΣSS\u0731.\u0BCD; [C2 V5]; [C2 V5] # σssܱ.் ++T; \u200Dσss\u0731.\u0BCD; [C2 V5]; [V5] # σssܱ.் ++N; \u200Dσss\u0731.\u0BCD; [C2 V5]; [C2 V5] # σssܱ.் ++T; \u200DΣss\u0731.\u0BCD; [C2 V5]; [V5] # σssܱ.் ++N; \u200DΣss\u0731.\u0BCD; [C2 V5]; [C2 V5] # σssܱ.் ++B; xn--ss-ubc826a.xn--xmc; [V5]; [V5] # σssܱ.் ++B; xn--ss-ubc826ab34b.xn--xmc; [C2 V5]; [C2 V5] # σssܱ.் ++T; \u200DΣß\u0731.\u0BCD; [C2 V5]; [V5] # σßܱ.் ++N; \u200DΣß\u0731.\u0BCD; [C2 V5]; [C2 V5] # σßܱ.் ++T; \u200Dσß\u0731.\u0BCD; [C2 V5]; [V5] # σßܱ.் ++N; \u200Dσß\u0731.\u0BCD; [C2 V5]; [C2 V5] # σßܱ.் ++B; xn--zca39lk1di19a.xn--xmc; [C2 V5]; [C2 V5] # σßܱ.் ++B; xn--zca19ln1di19a.xn--xmc; [C2 V5]; [C2 V5] # ςßܱ.் ++T; \u200DΣSS\u0731.\u0BCD; [C2 V5]; [V5] # σssܱ.் ++N; \u200DΣSS\u0731.\u0BCD; [C2 V5]; [C2 V5] # σssܱ.் ++T; \u200Dσss\u0731.\u0BCD; [C2 V5]; [V5] # σssܱ.் ++N; \u200Dσss\u0731.\u0BCD; [C2 V5]; [C2 V5] # σssܱ.் ++T; \u200DΣss\u0731.\u0BCD; [C2 V5]; [V5] # σssܱ.் ++N; \u200DΣss\u0731.\u0BCD; [C2 V5]; [C2 V5] # σssܱ.் ++T; \u200DΣß\u0731.\u0BCD; [C2 V5]; [V5] # σßܱ.் ++N; \u200DΣß\u0731.\u0BCD; [C2 V5]; [C2 V5] # σßܱ.் ++T; \u200Dσß\u0731.\u0BCD; [C2 V5]; [V5] # σßܱ.் ++N; \u200Dσß\u0731.\u0BCD; [C2 V5]; [C2 V5] # σßܱ.் ++T; ≠.\u200D; [C2 P1 V6]; [P1 V6] # ≠. ++N; ≠.\u200D; [C2 P1 V6]; [C2 P1 V6] # ≠. ++T; =\u0338.\u200D; [C2 P1 V6]; [P1 V6] # ≠. ++N; =\u0338.\u200D; [C2 P1 V6]; [C2 P1 V6] # ≠. ++T; ≠.\u200D; [C2 P1 V6]; [P1 V6] # ≠. ++N; ≠.\u200D; [C2 P1 V6]; [C2 P1 V6] # ≠. ++T; =\u0338.\u200D; [C2 P1 V6]; [P1 V6] # ≠. ++N; =\u0338.\u200D; [C2 P1 V6]; [C2 P1 V6] # ≠. ++B; xn--1ch.; [V6]; [V6] ++B; xn--1ch.xn--1ug; [C2 V6]; [C2 V6] # ≠. ++B; \uFC01。\u0C81ᠼ▗򒁋; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ئح.ಁᠼ▗ ++B; \u0626\u062D。\u0C81ᠼ▗򒁋; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ئح.ಁᠼ▗ ++B; \u064A\u0654\u062D。\u0C81ᠼ▗򒁋; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ئح.ಁᠼ▗ ++B; xn--lgbo.xn--2rc021dcxkrx55t; [B1 V5 V6]; [B1 V5 V6] # ئح.ಁᠼ▗ ++T; 󧋵\u09CDς.ς𐨿; [P1 V6]; [P1 V6] # ্ς.ς𐨿 ++N; 󧋵\u09CDς.ς𐨿; [P1 V6]; [P1 V6] # ্ς.ς𐨿 ++T; 󧋵\u09CDς.ς𐨿; [P1 V6]; [P1 V6] # ্ς.ς𐨿 ++N; 󧋵\u09CDς.ς𐨿; [P1 V6]; [P1 V6] # ্ς.ς𐨿 ++B; 󧋵\u09CDΣ.Σ𐨿; [P1 V6]; [P1 V6] # ্σ.σ𐨿 ++T; 󧋵\u09CDσ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 ++N; 󧋵\u09CDσ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 ++B; 󧋵\u09CDσ.σ𐨿; [P1 V6]; [P1 V6] # ্σ.σ𐨿 ++B; 󧋵\u09CDΣ.σ𐨿; [P1 V6]; [P1 V6] # ্σ.σ𐨿 ++B; xn--4xa502av8297a.xn--4xa6055k; [V6]; [V6] # ্σ.σ𐨿 ++T; 󧋵\u09CDΣ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 ++N; 󧋵\u09CDΣ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 ++B; xn--4xa502av8297a.xn--3xa8055k; [V6]; [V6] # ্σ.ς𐨿 ++B; xn--3xa702av8297a.xn--3xa8055k; [V6]; [V6] # ্ς.ς𐨿 ++B; 󧋵\u09CDΣ.Σ𐨿; [P1 V6]; [P1 V6] # ্σ.σ𐨿 ++T; 󧋵\u09CDσ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 ++N; 󧋵\u09CDσ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 ++B; 󧋵\u09CDσ.σ𐨿; [P1 V6]; [P1 V6] # ্σ.σ𐨿 ++B; 󧋵\u09CDΣ.σ𐨿; [P1 V6]; [P1 V6] # ্σ.σ𐨿 ++T; 󧋵\u09CDΣ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 ++N; 󧋵\u09CDΣ.ς𐨿; [P1 V6]; [P1 V6] # ্σ.ς𐨿 ++B; 𐫓\u07D8牅\u08F8。𞦤\u1A17򱍰Ⴙ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐫓ߘ牅ࣸ.ᨗႹ ++B; 𐫓\u07D8牅\u08F8。𞦤\u1A17򱍰Ⴙ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐫓ߘ牅ࣸ.ᨗႹ ++B; 𐫓\u07D8牅\u08F8。𞦤\u1A17򱍰ⴙ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐫓ߘ牅ࣸ.ᨗⴙ ++B; xn--zsb09cu46vjs6f.xn--gmf469fr883am5r1e; [B2 B3 V6]; [B2 B3 V6] # 𐫓ߘ牅ࣸ.ᨗⴙ ++B; xn--zsb09cu46vjs6f.xn--xnd909bv540bm5k9d; [B2 B3 V6]; [B2 B3 V6] # 𐫓ߘ牅ࣸ.ᨗႹ ++B; 𐫓\u07D8牅\u08F8。𞦤\u1A17򱍰ⴙ; [B2 B3 P1 V6]; [B2 B3 P1 V6] # 𐫓ߘ牅ࣸ.ᨗⴙ ++B; 񣤒。륧; [P1 V6]; [P1 V6] ++B; 񣤒。륧; [P1 V6]; [P1 V6] ++B; 񣤒。륧; [P1 V6]; [P1 V6] ++B; 񣤒。륧; [P1 V6]; [P1 V6] ++B; xn--s264a.xn--pw2b; [V6]; [V6] ++T; 𐹷\u200D。󉵢; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹷. ++N; 𐹷\u200D。󉵢; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹷. ++B; xn--vo0d.xn--8088d; [B1 V6]; [B1 V6] ++B; xn--1ugx205g.xn--8088d; [B1 C2 V6]; [B1 C2 V6] # 𐹷. ++B; Ⴘ\u06C2𑲭。-; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # Ⴘۂ𑲭.- ++B; Ⴘ\u06C1\u0654𑲭。-; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # Ⴘۂ𑲭.- ++B; Ⴘ\u06C2𑲭。-; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # Ⴘۂ𑲭.- ++B; Ⴘ\u06C1\u0654𑲭。-; [B1 B5 B6 P1 V3 V6]; [B1 B5 B6 P1 V3 V6] # Ⴘۂ𑲭.- ++B; ⴘ\u06C1\u0654𑲭。-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # ⴘۂ𑲭.- ++B; ⴘ\u06C2𑲭。-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # ⴘۂ𑲭.- ++B; xn--1kb147qfk3n.-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # ⴘۂ𑲭.- ++B; xn--1kb312c139t.-; [B1 B5 B6 V3 V6]; [B1 B5 B6 V3 V6] # Ⴘۂ𑲭.- ++B; ⴘ\u06C1\u0654𑲭。-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # ⴘۂ𑲭.- ++B; ⴘ\u06C2𑲭。-; [B1 B5 B6 V3]; [B1 B5 B6 V3] # ⴘۂ𑲭.- ++B; \uA806\u067B₆ᡐ。🛇\uFCDD; [B1 V5]; [B1 V5] # ꠆ٻ6ᡐ.🛇يم ++B; \uA806\u067B6ᡐ。🛇\u064A\u0645; [B1 V5]; [B1 V5] # ꠆ٻ6ᡐ.🛇يم ++B; xn--6-rrc018krt9k.xn--hhbj61429a; [B1 V5]; [B1 V5] # ꠆ٻ6ᡐ.🛇يم ++B; 򸍂.㇄ᡟ𐫂\u0622; [B1 P1 V6]; [B1 P1 V6] # .㇄ᡟ𐫂آ ++B; 򸍂.㇄ᡟ𐫂\u0627\u0653; [B1 P1 V6]; [B1 P1 V6] # .㇄ᡟ𐫂آ ++B; xn--p292d.xn--hgb154ghrsvm2r; [B1 V6]; [B1 V6] # .㇄ᡟ𐫂آ ++B; \u07DF򵚌。-\u07E9; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ߟ.-ߩ ++B; xn--6sb88139l.xn----pdd; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # ߟ.-ߩ ++T; ς\u0643⾑.\u200Cᢟ\u200C⒈; [B1 B5 C1 P1 V6]; [B5 P1 V6] # ςك襾.ᢟ⒈ ++N; ς\u0643⾑.\u200Cᢟ\u200C⒈; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # ςك襾.ᢟ⒈ ++T; ς\u0643襾.\u200Cᢟ\u200C1.; [B1 B5 C1]; [B5] # ςك襾.ᢟ1. ++N; ς\u0643襾.\u200Cᢟ\u200C1.; [B1 B5 C1]; [B1 B5 C1] # ςك襾.ᢟ1. ++T; Σ\u0643襾.\u200Cᢟ\u200C1.; [B1 B5 C1]; [B5] # σك襾.ᢟ1. ++N; Σ\u0643襾.\u200Cᢟ\u200C1.; [B1 B5 C1]; [B1 B5 C1] # σك襾.ᢟ1. ++T; σ\u0643襾.\u200Cᢟ\u200C1.; [B1 B5 C1]; [B5] # σك襾.ᢟ1. ++N; σ\u0643襾.\u200Cᢟ\u200C1.; [B1 B5 C1]; [B1 B5 C1] # σك襾.ᢟ1. ++B; xn--4xa49jux8r.xn--1-4ck.; [B5]; [B5] # σك襾.ᢟ1. ++B; xn--4xa49jux8r.xn--1-4ck691bba.; [B1 B5 C1]; [B1 B5 C1] # σك襾.ᢟ1. ++B; xn--3xa69jux8r.xn--1-4ck691bba.; [B1 B5 C1]; [B1 B5 C1] # ςك襾.ᢟ1. ++T; Σ\u0643⾑.\u200Cᢟ\u200C⒈; [B1 B5 C1 P1 V6]; [B5 P1 V6] # σك襾.ᢟ⒈ ++N; Σ\u0643⾑.\u200Cᢟ\u200C⒈; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # σك襾.ᢟ⒈ ++T; σ\u0643⾑.\u200Cᢟ\u200C⒈; [B1 B5 C1 P1 V6]; [B5 P1 V6] # σك襾.ᢟ⒈ ++N; σ\u0643⾑.\u200Cᢟ\u200C⒈; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # σك襾.ᢟ⒈ ++B; xn--4xa49jux8r.xn--pbf212d; [B5 V6]; [B5 V6] # σك襾.ᢟ⒈ ++B; xn--4xa49jux8r.xn--pbf519aba607b; [B1 B5 C1 V6]; [B1 B5 C1 V6] # σك襾.ᢟ⒈ ++B; xn--3xa69jux8r.xn--pbf519aba607b; [B1 B5 C1 V6]; [B1 B5 C1 V6] # ςك襾.ᢟ⒈ ++B; ᡆ𑓝.𞵆; [P1 V6]; [P1 V6] ++B; ᡆ𑓝.𞵆; [P1 V6]; [P1 V6] ++B; xn--57e0440k.xn--k86h; [V6]; [V6] ++T; \u0A4D𦍓\u1DEE。\u200C\u08BD񝹲; [B1 C1 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ੍𦍓ᷮ.ࢽ ++N; \u0A4D𦍓\u1DEE。\u200C\u08BD񝹲; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # ੍𦍓ᷮ.ࢽ ++T; \u0A4D𦍓\u1DEE。\u200C\u08BD񝹲; [B1 C1 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ੍𦍓ᷮ.ࢽ ++N; \u0A4D𦍓\u1DEE。\u200C\u08BD񝹲; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # ੍𦍓ᷮ.ࢽ ++B; xn--ybc461hph93b.xn--jzb29857e; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # ੍𦍓ᷮ.ࢽ ++B; xn--ybc461hph93b.xn--jzb740j1y45h; [B1 C1 V5 V6]; [B1 C1 V5 V6] # ੍𦍓ᷮ.ࢽ ++T; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B2 B3 P1 V3 V6] # خ݈-.먿 ++N; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B1 B2 B3 C1 P1 V3 V6] # خ݈-.먿 ++T; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B2 B3 P1 V3 V6] # خ݈-.먿 ++N; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B1 B2 B3 C1 P1 V3 V6] # خ݈-.먿 ++T; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B2 B3 P1 V3 V6] # خ݈-.먿 ++N; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B1 B2 B3 C1 P1 V3 V6] # خ݈-.먿 ++T; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B2 B3 P1 V3 V6] # خ݈-.먿 ++N; \u062E\u0748񅪪-.\u200C먿; [B1 B2 B3 C1 P1 V3 V6]; [B1 B2 B3 C1 P1 V3 V6] # خ݈-.먿 ++B; xn----dnc06f42153a.xn--v22b; [B2 B3 V3 V6]; [B2 B3 V3 V6] # خ݈-.먿 ++B; xn----dnc06f42153a.xn--0ug1581d; [B1 B2 B3 C1 V3 V6]; [B1 B2 B3 C1 V3 V6] # خ݈-.먿 ++B; 􋿦。ᠽ; [P1 V6]; [P1 V6] ++B; 􋿦。ᠽ; [P1 V6]; [P1 V6] ++B; xn--j890g.xn--w7e; [V6]; [V6] ++T; 嬃𝍌.\u200D\u0B44; [C2]; [V5] # 嬃𝍌.ୄ ++N; 嬃𝍌.\u200D\u0B44; [C2]; [C2] # 嬃𝍌.ୄ ++T; 嬃𝍌.\u200D\u0B44; [C2]; [V5] # 嬃𝍌.ୄ ++N; 嬃𝍌.\u200D\u0B44; [C2]; [C2] # 嬃𝍌.ୄ ++B; xn--b6s0078f.xn--0ic; [V5]; [V5] # 嬃𝍌.ୄ ++B; xn--b6s0078f.xn--0ic557h; [C2]; [C2] # 嬃𝍌.ୄ ++B; \u0602𝌪≯.𚋲򵁨; [B1 P1 V6]; [B1 P1 V6] # 𝌪≯. ++B; \u0602𝌪>\u0338.𚋲򵁨; [B1 P1 V6]; [B1 P1 V6] # 𝌪≯. ++B; \u0602𝌪≯.𚋲򵁨; [B1 P1 V6]; [B1 P1 V6] # 𝌪≯. ++B; \u0602𝌪>\u0338.𚋲򵁨; [B1 P1 V6]; [B1 P1 V6] # 𝌪≯. ++B; xn--kfb866llx01a.xn--wp1gm3570b; [B1 V6]; [B1 V6] # 𝌪≯. ++B; 򫾥\u08B7\u17CC\uA9C0.𞼠; [B5 P1 V6]; [B5 P1 V6] # ࢷ៌꧀. ++B; xn--dzb638ewm4i1iy1h.xn--3m7h; [B5 V6]; [B5 V6] # ࢷ៌꧀. ++T; \u200C.񟛤; [C1 P1 V6]; [P1 V6 A4_2] # . ++N; \u200C.񟛤; [C1 P1 V6]; [C1 P1 V6] # . ++B; .xn--q823a; [V6 A4_2]; [V6 A4_2] ++B; xn--0ug.xn--q823a; [C1 V6]; [C1 V6] # . ++B; 򺛕Ⴃ䠅.𐸑; [P1 V6]; [P1 V6] ++B; 򺛕Ⴃ䠅.𐸑; [P1 V6]; [P1 V6] ++B; 򺛕ⴃ䠅.𐸑; [P1 V6]; [P1 V6] ++B; xn--ukju77frl47r.xn--yl0d; [V6]; [V6] ++B; xn--bnd074zr557n.xn--yl0d; [V6]; [V6] ++B; 򺛕ⴃ䠅.𐸑; [P1 V6]; [P1 V6] ++B; \u1BF1𐹳𐹵𞤚。𝟨Ⴅ; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᯱ𐹳𐹵𞤼.6Ⴅ ++B; \u1BF1𐹳𐹵𞤚。6Ⴅ; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ᯱ𐹳𐹵𞤼.6Ⴅ ++B; \u1BF1𐹳𐹵𞤼。6ⴅ; [B1 V5]; [B1 V5] # ᯱ𐹳𐹵𞤼.6ⴅ ++B; xn--zzfy954hga2415t.xn--6-kvs; [B1 V5]; [B1 V5] # ᯱ𐹳𐹵𞤼.6ⴅ ++B; xn--zzfy954hga2415t.xn--6-h0g; [B1 V5 V6]; [B1 V5 V6] # ᯱ𐹳𐹵𞤼.6Ⴅ ++B; \u1BF1𐹳𐹵𞤼。𝟨ⴅ; [B1 V5]; [B1 V5] # ᯱ𐹳𐹵𞤼.6ⴅ ++B; \u1BF1𐹳𐹵𞤚。6ⴅ; [B1 V5]; [B1 V5] # ᯱ𐹳𐹵𞤼.6ⴅ ++B; \u1BF1𐹳𐹵𞤚。𝟨ⴅ; [B1 V5]; [B1 V5] # ᯱ𐹳𐹵𞤼.6ⴅ ++B; -。︒; [P1 V3 V6]; [P1 V3 V6] ++B; -。。; [V3 A4_2]; [V3 A4_2] ++B; -..; [V3 A4_2]; [V3 A4_2] ++B; -.xn--y86c; [V3 V6]; [V3 V6] ++B; \u07DBჀ。-⁵--; [B1 B2 B3 P1 V2 V3 V6]; [B1 B2 B3 P1 V2 V3 V6] # ߛჀ.-5-- ++B; \u07DBჀ。-5--; [B1 B2 B3 P1 V2 V3 V6]; [B1 B2 B3 P1 V2 V3 V6] # ߛჀ.-5-- ++B; \u07DBⴠ。-5--; [B1 B2 B3 V2 V3]; [B1 B2 B3 V2 V3] # ߛⴠ.-5-- ++B; xn--2sb691q.-5--; [B1 B2 B3 V2 V3]; [B1 B2 B3 V2 V3] # ߛⴠ.-5-- ++B; xn--2sb866b.-5--; [B1 B2 B3 V2 V3 V6]; [B1 B2 B3 V2 V3 V6] # ߛჀ.-5-- ++B; \u07DBⴠ。-⁵--; [B1 B2 B3 V2 V3]; [B1 B2 B3 V2 V3] # ߛⴠ.-5-- ++B; ≯\uD8DD󠑕。𐹷𐹻≯𐷒; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ ++B; >\u0338\uD8DD󠑕。𐹷𐹻>\u0338𐷒; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ ++B; ≯\uD8DD󠑕。𐹷𐹻≯𐷒; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ ++B; >\u0338\uD8DD󠑕。𐹷𐹻>\u0338𐷒; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ ++B; ≯\uD8DD󠑕.xn--hdh8283gdoaqa; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ ++B; >\u0338\uD8DD󠑕.xn--hdh8283gdoaqa; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ ++B; >\u0338\uD8DD󠑕.XN--HDH8283GDOAQA; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ ++B; ≯\uD8DD󠑕.XN--HDH8283GDOAQA; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ ++B; ≯\uD8DD󠑕.Xn--Hdh8283gdoaqa; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ ++B; >\u0338\uD8DD󠑕.Xn--Hdh8283gdoaqa; [B1 P1 V6]; [B1 P1 V6 A3] # ≯.𐹷𐹻≯ ++T; ㍔\u08E6\u077C\u200D。\u0346򁳊𝅶\u0604; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ルーブルࣦݼ.͆ ++N; ㍔\u08E6\u077C\u200D。\u0346򁳊𝅶\u0604; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # ルーブルࣦݼ.͆ ++T; ルーブル\u08E6\u077C\u200D。\u0346򁳊𝅶\u0604; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ルーブルࣦݼ.͆ ++N; ルーブル\u08E6\u077C\u200D。\u0346򁳊𝅶\u0604; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # ルーブルࣦݼ.͆ ++T; ルーフ\u3099ル\u08E6\u077C\u200D。\u0346򁳊𝅶\u0604; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ルーブルࣦݼ.͆ ++N; ルーフ\u3099ル\u08E6\u077C\u200D。\u0346򁳊𝅶\u0604; [B1 B5 B6 C2 P1 V5 V6]; [B1 B5 B6 C2 P1 V5 V6] # ルーブルࣦݼ.͆ ++B; xn--dqb73el09fncab4h.xn--kua81ls548d3608b; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ルーブルࣦݼ.͆ ++B; xn--dqb73ec22c9kp8cb1j.xn--kua81ls548d3608b; [B1 B5 B6 C2 V5 V6]; [B1 B5 B6 C2 V5 V6] # ルーブルࣦݼ.͆ ++T; \u200D.F; [C2]; [A4_2] # .f ++N; \u200D.F; [C2]; [C2] # .f ++T; \u200D.f; [C2]; [A4_2] # .f ++N; \u200D.f; [C2]; [C2] # .f ++B; .f; [A4_2]; [A4_2] ++B; xn--1ug.f; [C2]; [C2] # .f ++B; f; ; ++T; \u200D㨲。ß; [C2]; xn--9bm.ss # 㨲.ß ++N; \u200D㨲。ß; [C2]; [C2] # 㨲.ß ++T; \u200D㨲。ß; [C2]; xn--9bm.ss # 㨲.ß ++N; \u200D㨲。ß; [C2]; [C2] # 㨲.ß ++T; \u200D㨲。SS; [C2]; xn--9bm.ss # 㨲.ss ++N; \u200D㨲。SS; [C2]; [C2] # 㨲.ss ++T; \u200D㨲。ss; [C2]; xn--9bm.ss # 㨲.ss ++N; \u200D㨲。ss; [C2]; [C2] # 㨲.ss ++T; \u200D㨲。Ss; [C2]; xn--9bm.ss # 㨲.ss ++N; \u200D㨲。Ss; [C2]; [C2] # 㨲.ss ++B; xn--9bm.ss; 㨲.ss; xn--9bm.ss ++B; 㨲.ss; ; xn--9bm.ss ++B; 㨲.SS; 㨲.ss; xn--9bm.ss ++B; 㨲.Ss; 㨲.ss; xn--9bm.ss ++B; xn--1ug914h.ss; [C2]; [C2] # 㨲.ss ++B; xn--1ug914h.xn--zca; [C2]; [C2] # 㨲.ß ++T; \u200D㨲。SS; [C2]; xn--9bm.ss # 㨲.ss ++N; \u200D㨲。SS; [C2]; [C2] # 㨲.ss ++T; \u200D㨲。ss; [C2]; xn--9bm.ss # 㨲.ss ++N; \u200D㨲。ss; [C2]; [C2] # 㨲.ss ++T; \u200D㨲。Ss; [C2]; xn--9bm.ss # 㨲.ss ++N; \u200D㨲。Ss; [C2]; [C2] # 㨲.ss ++B; \u0605\u067E。\u08A8; [B1 P1 V6]; [B1 P1 V6] # پ.ࢨ ++B; \u0605\u067E。\u08A8; [B1 P1 V6]; [B1 P1 V6] # پ.ࢨ ++B; xn--nfb6v.xn--xyb; [B1 V6]; [B1 V6] # پ.ࢨ ++B; ⾑\u0753𞤁。𐹵\u0682; [B1 B5 B6]; [B1 B5 B6] # 襾ݓ𞤣.𐹵ڂ ++B; 襾\u0753𞤁。𐹵\u0682; [B1 B5 B6]; [B1 B5 B6] # 襾ݓ𞤣.𐹵ڂ ++B; 襾\u0753𞤣。𐹵\u0682; [B1 B5 B6]; [B1 B5 B6] # 襾ݓ𞤣.𐹵ڂ ++B; xn--6ob9577deqwl.xn--7ib5526k; [B1 B5 B6]; [B1 B5 B6] # 襾ݓ𞤣.𐹵ڂ ++B; ⾑\u0753𞤣。𐹵\u0682; [B1 B5 B6]; [B1 B5 B6] # 襾ݓ𞤣.𐹵ڂ ++T; 񦴻ς-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # ς-⃫.ݔ-ꡛ ++N; 񦴻ς-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # ς-⃫.ݔ-ꡛ ++T; 񦴻ς-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # ς-⃫.ݔ-ꡛ ++N; 񦴻ς-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # ς-⃫.ݔ-ꡛ ++B; 񦴻Σ-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # σ-⃫.ݔ-ꡛ ++B; 񦴻σ-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # σ-⃫.ݔ-ꡛ ++B; xn----zmb705tuo34l.xn----53c4874j; [B2 B3 B6 V6]; [B2 B3 B6 V6] # σ-⃫.ݔ-ꡛ ++B; xn----xmb015tuo34l.xn----53c4874j; [B2 B3 B6 V6]; [B2 B3 B6 V6] # ς-⃫.ݔ-ꡛ ++B; 񦴻Σ-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # σ-⃫.ݔ-ꡛ ++B; 񦴻σ-\u20EB。\u0754-ꡛ; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # σ-⃫.ݔ-ꡛ ++T; \u200D.􀸨; [C2 P1 V6]; [P1 V6 A4_2] # . ++N; \u200D.􀸨; [C2 P1 V6]; [C2 P1 V6] # . ++T; \u200D.􀸨; [C2 P1 V6]; [P1 V6 A4_2] # . ++N; \u200D.􀸨; [C2 P1 V6]; [C2 P1 V6] # . ++B; .xn--h327f; [V6 A4_2]; [V6 A4_2] ++B; xn--1ug.xn--h327f; [C2 V6]; [C2 V6] # . ++B; 񣭻񌥁。≠𝟲; [P1 V6]; [P1 V6] ++B; 񣭻񌥁。=\u0338𝟲; [P1 V6]; [P1 V6] ++B; 񣭻񌥁。≠6; [P1 V6]; [P1 V6] ++B; 񣭻񌥁。=\u03386; [P1 V6]; [P1 V6] ++B; xn--h79w4z99a.xn--6-tfo; [V6]; [V6] ++T; 󠅊ᡭ\u200D.𐥡; [B6 C2 P1 V6]; [P1 V6] # ᡭ. ++N; 󠅊ᡭ\u200D.𐥡; [B6 C2 P1 V6]; [B6 C2 P1 V6] # ᡭ. ++B; xn--98e.xn--om9c; [V6]; [V6] ++B; xn--98e810b.xn--om9c; [B6 C2 V6]; [B6 C2 V6] # ᡭ. ++B; \u0C40\u0855𐥛𑄴.󭰵; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ీࡕ𑄴. ++B; \u0C40\u0855𐥛𑄴.󭰵; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ీࡕ𑄴. ++B; xn--kwb91r5112avtg.xn--o580f; [B1 V5 V6]; [B1 V5 V6] # ీࡕ𑄴. ++T; 𞤮。𑇊\u200C≯\u1CE6; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤮.𑇊≯᳦ ++N; 𞤮。𑇊\u200C≯\u1CE6; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𞤮.𑇊≯᳦ ++T; 𞤮。𑇊\u200C>\u0338\u1CE6; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤮.𑇊≯᳦ ++N; 𞤮。𑇊\u200C>\u0338\u1CE6; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𞤮.𑇊≯᳦ ++T; 𞤌。𑇊\u200C>\u0338\u1CE6; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤮.𑇊≯᳦ ++N; 𞤌。𑇊\u200C>\u0338\u1CE6; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𞤮.𑇊≯᳦ ++T; 𞤌。𑇊\u200C≯\u1CE6; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤮.𑇊≯᳦ ++N; 𞤌。𑇊\u200C≯\u1CE6; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𞤮.𑇊≯᳦ ++B; xn--me6h.xn--z6fz8ueq2v; [B1 V5 V6]; [B1 V5 V6] # 𞤮.𑇊≯᳦ ++B; xn--me6h.xn--z6f16kn9b2642b; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 𞤮.𑇊≯᳦ ++B; 󠄀𝟕.𞤌񛗓Ⴉ; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] ++B; 󠄀7.𞤌񛗓Ⴉ; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] ++B; 󠄀7.𞤮񛗓ⴉ; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] ++B; 7.xn--0kjz523lv1vv; [B1 B2 B3 V6]; [B1 B2 B3 V6] ++B; 7.xn--hnd3403vv1vv; [B1 B2 B3 V6]; [B1 B2 B3 V6] ++B; 󠄀𝟕.𞤮񛗓ⴉ; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] ++B; 󠄀7.𞤌񛗓ⴉ; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] ++B; 󠄀𝟕.𞤌񛗓ⴉ; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] ++B; 閃9𝩍。Ↄ\u0669\u08B1\u0B4D; [B5 B6 P1 V6]; [B5 B6 P1 V6] # 閃9𝩍.Ↄ٩ࢱ୍ ++B; 閃9𝩍。ↄ\u0669\u08B1\u0B4D; [B5 B6]; [B5 B6] # 閃9𝩍.ↄ٩ࢱ୍ ++B; xn--9-3j6dk517f.xn--iib28ij3c4t9a; [B5 B6]; [B5 B6] # 閃9𝩍.ↄ٩ࢱ୍ ++B; xn--9-3j6dk517f.xn--iib28ij3c0t9a; [B5 B6 V6]; [B5 B6 V6] # 閃9𝩍.Ↄ٩ࢱ୍ ++B; \uAAF6ᢏ\u0E3A2.𐋢\u0745\u0F9F︒; [P1 V5 V6]; [P1 V5 V6] # ꫶ᢏฺ2.𐋢݅ྟ︒ ++B; \uAAF6ᢏ\u0E3A2.𐋢\u0745\u0F9F。; [V5]; [V5] # ꫶ᢏฺ2.𐋢݅ྟ. ++B; xn--2-2zf840fk16m.xn--sob093b2m7s.; [V5]; [V5] # ꫶ᢏฺ2.𐋢݅ྟ. ++B; xn--2-2zf840fk16m.xn--sob093bj62sz9d; [V5 V6]; [V5 V6] # ꫶ᢏฺ2.𐋢݅ྟ︒ ++B; 󅴧。≠-󠙄⾛; [P1 V6]; [P1 V6] ++B; 󅴧。=\u0338-󠙄⾛; [P1 V6]; [P1 V6] ++B; 󅴧。≠-󠙄走; [P1 V6]; [P1 V6] ++B; 󅴧。=\u0338-󠙄走; [P1 V6]; [P1 V6] ++B; xn--gm57d.xn----tfo4949b3664m; [V6]; [V6] ++B; \u076E\u0604Ⴊ。-≠\u1160; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ݮႪ.-≠ ++B; \u076E\u0604Ⴊ。-=\u0338\u1160; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ݮႪ.-≠ ++B; \u076E\u0604ⴊ。-=\u0338\u1160; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ݮⴊ.-≠ ++B; \u076E\u0604ⴊ。-≠\u1160; [B1 B2 B3 P1 V3 V6]; [B1 B2 B3 P1 V3 V6] # ݮⴊ.-≠ ++B; xn--mfb73ek93f.xn----5bh589i; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # ݮⴊ.-≠ ++B; xn--mfb73ex6r.xn----5bh589i; [B1 B2 B3 V3 V6]; [B1 B2 B3 V3 V6] # ݮႪ.-≠ ++T; \uFB4F𐹧𝟒≯。\u200C; [B1 B3 B4 C1 P1 V6]; [B3 B4 P1 V6] # אל𐹧4≯. ++N; \uFB4F𐹧𝟒≯。\u200C; [B1 B3 B4 C1 P1 V6]; [B1 B3 B4 C1 P1 V6] # אל𐹧4≯. ++T; \uFB4F𐹧𝟒>\u0338。\u200C; [B1 B3 B4 C1 P1 V6]; [B3 B4 P1 V6] # אל𐹧4≯. ++N; \uFB4F𐹧𝟒>\u0338。\u200C; [B1 B3 B4 C1 P1 V6]; [B1 B3 B4 C1 P1 V6] # אל𐹧4≯. ++T; \u05D0\u05DC𐹧4≯。\u200C; [B1 B3 B4 C1 P1 V6]; [B3 B4 P1 V6] # אל𐹧4≯. ++N; \u05D0\u05DC𐹧4≯。\u200C; [B1 B3 B4 C1 P1 V6]; [B1 B3 B4 C1 P1 V6] # אל𐹧4≯. ++T; \u05D0\u05DC𐹧4>\u0338。\u200C; [B1 B3 B4 C1 P1 V6]; [B3 B4 P1 V6] # אל𐹧4≯. ++N; \u05D0\u05DC𐹧4>\u0338。\u200C; [B1 B3 B4 C1 P1 V6]; [B1 B3 B4 C1 P1 V6] # אל𐹧4≯. ++B; xn--4-zhc0by36txt0w.; [B3 B4 V6]; [B3 B4 V6] # אל𐹧4≯. ++B; xn--4-zhc0by36txt0w.xn--0ug; [B1 B3 B4 C1 V6]; [B1 B3 B4 C1 V6] # אל𐹧4≯. ++B; 𝟎。甯; 0.甯; 0.xn--qny ++B; 0。甯; 0.甯; 0.xn--qny ++B; 0.xn--qny; 0.甯; 0.xn--qny ++B; 0.甯; ; 0.xn--qny ++B; -⾆.\uAAF6; [V3 V5]; [V3 V5] # -舌.꫶ ++B; -舌.\uAAF6; [V3 V5]; [V3 V5] # -舌.꫶ ++B; xn----ef8c.xn--2v9a; [V3 V5]; [V3 V5] # -舌.꫶ ++B; -。ᢘ; [V3]; [V3] ++B; -。ᢘ; [V3]; [V3] ++B; -.xn--ibf; [V3]; [V3] ++B; 🂴Ⴋ.≮; [P1 V6]; [P1 V6] ++B; 🂴Ⴋ.<\u0338; [P1 V6]; [P1 V6] ++B; 🂴ⴋ.<\u0338; [P1 V6]; [P1 V6] ++B; 🂴ⴋ.≮; [P1 V6]; [P1 V6] ++B; xn--2kj7565l.xn--gdh; [V6]; [V6] ++B; xn--jnd1986v.xn--gdh; [V6]; [V6] ++T; 璼𝨭。\u200C󠇟; [C1]; xn--gky8837e. # 璼𝨭. ++N; 璼𝨭。\u200C󠇟; [C1]; [C1] # 璼𝨭. ++T; 璼𝨭。\u200C󠇟; [C1]; xn--gky8837e. # 璼𝨭. ++N; 璼𝨭。\u200C󠇟; [C1]; [C1] # 璼𝨭. ++B; xn--gky8837e.; 璼𝨭.; xn--gky8837e. ++B; 璼𝨭.; ; xn--gky8837e. ++B; xn--gky8837e.xn--0ug; [C1]; [C1] # 璼𝨭. ++B; \u06698񂍽。-5🞥; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ٩8.-5🞥 ++B; \u06698񂍽。-5🞥; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ٩8.-5🞥 ++B; xn--8-qqc97891f.xn---5-rp92a; [B1 V3 V6]; [B1 V3 V6] # ٩8.-5🞥 ++T; \u200C.\u200C; [C1]; [A4_2] # . ++N; \u200C.\u200C; [C1]; [C1] # . ++B; xn--0ug.xn--0ug; [C1]; [C1] # . ++T; \u200D튛.\u0716; [B1 C2]; xn--157b.xn--gnb # 튛.ܖ ++N; \u200D튛.\u0716; [B1 C2]; [B1 C2] # 튛.ܖ ++T; \u200D튛.\u0716; [B1 C2]; xn--157b.xn--gnb # 튛.ܖ ++N; \u200D튛.\u0716; [B1 C2]; [B1 C2] # 튛.ܖ ++B; xn--157b.xn--gnb; 튛.\u0716; xn--157b.xn--gnb # 튛.ܖ ++B; 튛.\u0716; ; xn--157b.xn--gnb # 튛.ܖ ++B; 튛.\u0716; 튛.\u0716; xn--157b.xn--gnb # 튛.ܖ ++B; xn--1ug4441e.xn--gnb; [B1 C2]; [B1 C2] # 튛.ܖ ++B; ᡋ𐹰𞽳.\u0779ⴞ; [B2 B3 B5 B6 P1 V6]; [B2 B3 B5 B6 P1 V6] # ᡋ𐹰.ݹⴞ ++B; ᡋ𐹰𞽳.\u0779Ⴞ; [B2 B3 B5 B6 P1 V6]; [B2 B3 B5 B6 P1 V6] # ᡋ𐹰.ݹႾ ++B; xn--b8e0417jocvf.xn--9pb068b; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] # ᡋ𐹰.ݹႾ ++B; xn--b8e0417jocvf.xn--9pb883q; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] # ᡋ𐹰.ݹⴞ ++B; 𐷃\u0662𝅻𝟧.𐹮𐹬Ⴇ; [B1 B4 P1 V6]; [B1 B4 P1 V6] # ٢𝅻5.𐹮𐹬Ⴇ ++B; 𐷃\u0662𝅻5.𐹮𐹬Ⴇ; [B1 B4 P1 V6]; [B1 B4 P1 V6] # ٢𝅻5.𐹮𐹬Ⴇ ++B; 𐷃\u0662𝅻5.𐹮𐹬ⴇ; [B1 B4 P1 V6]; [B1 B4 P1 V6] # ٢𝅻5.𐹮𐹬ⴇ ++B; xn--5-cqc8833rhv7f.xn--ykjz523efa; [B1 B4 V6]; [B1 B4 V6] # ٢𝅻5.𐹮𐹬ⴇ ++B; xn--5-cqc8833rhv7f.xn--fnd3401kfa; [B1 B4 V6]; [B1 B4 V6] # ٢𝅻5.𐹮𐹬Ⴇ ++B; 𐷃\u0662𝅻𝟧.𐹮𐹬ⴇ; [B1 B4 P1 V6]; [B1 B4 P1 V6] # ٢𝅻5.𐹮𐹬ⴇ ++B; Ⴗ.\u05C2𑄴\uA9B7񘃨; [P1 V5 V6]; [P1 V5 V6] # Ⴗ.𑄴ׂꦷ ++B; Ⴗ.𑄴\u05C2\uA9B7񘃨; [P1 V5 V6]; [P1 V5 V6] # Ⴗ.𑄴ׂꦷ ++B; Ⴗ.𑄴\u05C2\uA9B7񘃨; [P1 V5 V6]; [P1 V5 V6] # Ⴗ.𑄴ׂꦷ ++B; ⴗ.𑄴\u05C2\uA9B7񘃨; [P1 V5 V6]; [P1 V5 V6] # ⴗ.𑄴ׂꦷ ++B; xn--flj.xn--qdb0605f14ycrms3c; [V5 V6]; [V5 V6] # ⴗ.𑄴ׂꦷ ++B; xn--vnd.xn--qdb0605f14ycrms3c; [V5 V6]; [V5 V6] # Ⴗ.𑄴ׂꦷ ++B; ⴗ.𑄴\u05C2\uA9B7񘃨; [P1 V5 V6]; [P1 V5 V6] # ⴗ.𑄴ׂꦷ ++B; ⴗ.\u05C2𑄴\uA9B7񘃨; [P1 V5 V6]; [P1 V5 V6] # ⴗ.𑄴ׂꦷ ++B; 𝟾𾤘.򇕛\u066C; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 8.٬ ++B; 8𾤘.򇕛\u066C; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # 8.٬ ++B; xn--8-kh23b.xn--lib78461i; [B1 B5 B6 V6]; [B1 B5 B6 V6] # 8.٬ ++B; ⒈酫︒。\u08D6; [P1 V5 V6]; [P1 V5 V6] # ⒈酫︒.ࣖ ++B; 1.酫。。\u08D6; [V5 A4_2]; [V5 A4_2] # 1.酫..ࣖ ++B; 1.xn--8j4a..xn--8zb; [V5 A4_2]; [V5 A4_2] # 1.酫..ࣖ ++B; xn--tsh4490bfe8c.xn--8zb; [V5 V6]; [V5 V6] # ⒈酫︒.ࣖ ++T; \u2DE3\u200C≮\u1A6B.\u200C\u0E3A; [C1 P1 V5 V6]; [P1 V5 V6] # ⷣ≮ᩫ.ฺ ++N; \u2DE3\u200C≮\u1A6B.\u200C\u0E3A; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⷣ≮ᩫ.ฺ ++T; \u2DE3\u200C<\u0338\u1A6B.\u200C\u0E3A; [C1 P1 V5 V6]; [P1 V5 V6] # ⷣ≮ᩫ.ฺ ++N; \u2DE3\u200C<\u0338\u1A6B.\u200C\u0E3A; [C1 P1 V5 V6]; [C1 P1 V5 V6] # ⷣ≮ᩫ.ฺ ++B; xn--uof548an0j.xn--o4c; [V5 V6]; [V5 V6] # ⷣ≮ᩫ.ฺ ++B; xn--uof63xk4bf3s.xn--o4c732g; [C1 V5 V6]; [C1 V5 V6] # ⷣ≮ᩫ.ฺ ++T; 𞪂。ႷႽ¹\u200D; [B6 C2 P1 V6]; [P1 V6] # .ႷႽ1 ++N; 𞪂。ႷႽ¹\u200D; [B6 C2 P1 V6]; [B6 C2 P1 V6] # .ႷႽ1 ++T; 𞪂。ႷႽ1\u200D; [B6 C2 P1 V6]; [P1 V6] # .ႷႽ1 ++N; 𞪂。ႷႽ1\u200D; [B6 C2 P1 V6]; [B6 C2 P1 V6] # .ႷႽ1 ++T; 𞪂。ⴗⴝ1\u200D; [B6 C2 P1 V6]; [P1 V6] # .ⴗⴝ1 ++N; 𞪂。ⴗⴝ1\u200D; [B6 C2 P1 V6]; [B6 C2 P1 V6] # .ⴗⴝ1 ++T; 𞪂。Ⴗⴝ1\u200D; [B6 C2 P1 V6]; [P1 V6] # .Ⴗⴝ1 ++N; 𞪂。Ⴗⴝ1\u200D; [B6 C2 P1 V6]; [B6 C2 P1 V6] # .Ⴗⴝ1 ++B; xn--co6h.xn--1-h1g429s; [V6]; [V6] ++B; xn--co6h.xn--1-h1g398iewm; [B6 C2 V6]; [B6 C2 V6] # .Ⴗⴝ1 ++B; xn--co6h.xn--1-kwssa; [V6]; [V6] ++B; xn--co6h.xn--1-ugn710dya; [B6 C2 V6]; [B6 C2 V6] # .ⴗⴝ1 ++B; xn--co6h.xn--1-h1gs; [V6]; [V6] ++B; xn--co6h.xn--1-h1gs597m; [B6 C2 V6]; [B6 C2 V6] # .ႷႽ1 ++T; 𞪂。ⴗⴝ¹\u200D; [B6 C2 P1 V6]; [P1 V6] # .ⴗⴝ1 ++N; 𞪂。ⴗⴝ¹\u200D; [B6 C2 P1 V6]; [B6 C2 P1 V6] # .ⴗⴝ1 ++T; 𞪂。Ⴗⴝ¹\u200D; [B6 C2 P1 V6]; [P1 V6] # .Ⴗⴝ1 ++N; 𞪂。Ⴗⴝ¹\u200D; [B6 C2 P1 V6]; [B6 C2 P1 V6] # .Ⴗⴝ1 ++B; 𑄴𑄳2.𞳿󠀳-; [B1 B3 P1 V3 V5 V6]; [B1 B3 P1 V3 V5 V6] ++B; xn--2-h87ic.xn----s39r33498d; [B1 B3 V3 V5 V6]; [B1 B3 V3 V5 V6] ++B; 󠕲󟶶\u0665。񀁁𑄳𞤃\u0710; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ٥.𑄳𞤥ܐ ++B; 󠕲󟶶\u0665。񀁁𑄳𞤃\u0710; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ٥.𑄳𞤥ܐ ++B; 󠕲󟶶\u0665。񀁁𑄳𞤥\u0710; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ٥.𑄳𞤥ܐ ++B; xn--eib57614py3ea.xn--9mb5737kqnpfzkwr; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ٥.𑄳𞤥ܐ ++B; 󠕲󟶶\u0665。񀁁𑄳𞤥\u0710; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ٥.𑄳𞤥ܐ ++T; \u0720򲠽𐹢\u17BB。ςᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 P1 V6] # ܠ𐹢ុ.ςᢈ🝭 ++N; \u0720򲠽𐹢\u17BB。ςᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 C1 P1 V6] # ܠ𐹢ុ.ςᢈ🝭 ++T; \u0720򲠽𐹢\u17BB。ςᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 P1 V6] # ܠ𐹢ុ.ςᢈ🝭 ++N; \u0720򲠽𐹢\u17BB。ςᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 C1 P1 V6] # ܠ𐹢ុ.ςᢈ🝭 ++T; \u0720򲠽𐹢\u17BB。Σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 P1 V6] # ܠ𐹢ុ.σᢈ🝭 ++N; \u0720򲠽𐹢\u17BB。Σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 C1 P1 V6] # ܠ𐹢ុ.σᢈ🝭 ++T; \u0720򲠽𐹢\u17BB。σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 P1 V6] # ܠ𐹢ុ.σᢈ🝭 ++N; \u0720򲠽𐹢\u17BB。σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 C1 P1 V6] # ܠ𐹢ុ.σᢈ🝭 ++B; xn--qnb616fis0qzt36f.xn--4xa847hli46a; [B2 B6 V6]; [B2 B6 V6] # ܠ𐹢ុ.σᢈ🝭 ++B; xn--qnb616fis0qzt36f.xn--4xa847h6ofgl44c; [B2 B6 C1 V6]; [B2 B6 C1 V6] # ܠ𐹢ុ.σᢈ🝭 ++B; xn--qnb616fis0qzt36f.xn--3xa057h6ofgl44c; [B2 B6 C1 V6]; [B2 B6 C1 V6] # ܠ𐹢ុ.ςᢈ🝭 ++T; \u0720򲠽𐹢\u17BB。Σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 P1 V6] # ܠ𐹢ុ.σᢈ🝭 ++N; \u0720򲠽𐹢\u17BB。Σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 C1 P1 V6] # ܠ𐹢ុ.σᢈ🝭 ++T; \u0720򲠽𐹢\u17BB。σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 P1 V6] # ܠ𐹢ុ.σᢈ🝭 ++N; \u0720򲠽𐹢\u17BB。σᢈ🝭\u200C; [B2 B6 C1 P1 V6]; [B2 B6 C1 P1 V6] # ܠ𐹢ុ.σᢈ🝭 ++T; \u200D--≮。𐹧; [B1 C2 P1 V6]; [B1 P1 V3 V6] # --≮.𐹧 ++N; \u200D--≮。𐹧; [B1 C2 P1 V6]; [B1 C2 P1 V6] # --≮.𐹧 ++T; \u200D--<\u0338。𐹧; [B1 C2 P1 V6]; [B1 P1 V3 V6] # --≮.𐹧 ++N; \u200D--<\u0338。𐹧; [B1 C2 P1 V6]; [B1 C2 P1 V6] # --≮.𐹧 ++B; xn-----ujv.xn--fo0d; [B1 V3 V6]; [B1 V3 V6] ++B; xn-----l1tz1k.xn--fo0d; [B1 C2 V6]; [B1 C2 V6] # --≮.𐹧 ++B; \uA806。𻚏\u0FB0⒕; [P1 V5 V6]; [P1 V5 V6] # ꠆.ྰ⒕ ++B; \uA806。𻚏\u0FB014.; [P1 V5 V6]; [P1 V5 V6] # ꠆.ྰ14. ++B; xn--l98a.xn--14-jsj57880f.; [V5 V6]; [V5 V6] # ꠆.ྰ14. ++B; xn--l98a.xn--dgd218hhp28d; [V5 V6]; [V5 V6] # ꠆.ྰ⒕ ++B; 򮉂\u06BC.𑆺\u0669; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ڼ.𑆺٩ ++B; 򮉂\u06BC.𑆺\u0669; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # ڼ.𑆺٩ ++B; xn--vkb92243l.xn--iib9797k; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # ڼ.𑆺٩ ++B; 󠁎\u06D0-。𞤴; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ې-.𞤴 ++B; 󠁎\u06D0-。𞤒; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ې-.𞤴 ++B; xn----mwc72685y.xn--se6h; [B1 V3 V6]; [B1 V3 V6] # ې-.𞤴 ++T; 𝟠4󠇗𝈻.\u200D𐋵⛧\u200D; [C2]; xn--84-s850a.xn--59h6326e # 84𝈻.𐋵⛧ ++N; 𝟠4󠇗𝈻.\u200D𐋵⛧\u200D; [C2]; [C2] # 84𝈻.𐋵⛧ ++T; 84󠇗𝈻.\u200D𐋵⛧\u200D; [C2]; xn--84-s850a.xn--59h6326e # 84𝈻.𐋵⛧ ++N; 84󠇗𝈻.\u200D𐋵⛧\u200D; [C2]; [C2] # 84𝈻.𐋵⛧ ++B; xn--84-s850a.xn--59h6326e; 84𝈻.𐋵⛧; xn--84-s850a.xn--59h6326e; NV8 ++B; 84𝈻.𐋵⛧; ; xn--84-s850a.xn--59h6326e; NV8 ++B; xn--84-s850a.xn--1uga573cfq1w; [C2]; [C2] # 84𝈻.𐋵⛧ ++B; -\u0601。ᡪ; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.ᡪ ++B; -\u0601。ᡪ; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.ᡪ ++B; xn----tkc.xn--68e; [B1 V3 V6]; [B1 V3 V6] # -.ᡪ ++T; ≮𝟕.謖ß≯; [P1 V6]; [P1 V6] ++N; ≮𝟕.謖ß≯; [P1 V6]; [P1 V6] ++T; <\u0338𝟕.謖ß>\u0338; [P1 V6]; [P1 V6] ++N; <\u0338𝟕.謖ß>\u0338; [P1 V6]; [P1 V6] ++T; ≮7.謖ß≯; [P1 V6]; [P1 V6] ++N; ≮7.謖ß≯; [P1 V6]; [P1 V6] ++T; <\u03387.謖ß>\u0338; [P1 V6]; [P1 V6] ++N; <\u03387.謖ß>\u0338; [P1 V6]; [P1 V6] ++B; <\u03387.謖SS>\u0338; [P1 V6]; [P1 V6] ++B; ≮7.謖SS≯; [P1 V6]; [P1 V6] ++B; ≮7.謖ss≯; [P1 V6]; [P1 V6] ++B; <\u03387.謖ss>\u0338; [P1 V6]; [P1 V6] ++B; <\u03387.謖Ss>\u0338; [P1 V6]; [P1 V6] ++B; ≮7.謖Ss≯; [P1 V6]; [P1 V6] ++B; xn--7-mgo.xn--ss-xjvv174c; [V6]; [V6] ++B; xn--7-mgo.xn--zca892oly5e; [V6]; [V6] ++B; <\u0338𝟕.謖SS>\u0338; [P1 V6]; [P1 V6] ++B; ≮𝟕.謖SS≯; [P1 V6]; [P1 V6] ++B; ≮𝟕.謖ss≯; [P1 V6]; [P1 V6] ++B; <\u0338𝟕.謖ss>\u0338; [P1 V6]; [P1 V6] ++B; <\u0338𝟕.謖Ss>\u0338; [P1 V6]; [P1 V6] ++B; ≮𝟕.謖Ss≯; [P1 V6]; [P1 V6] ++B; 朶Ⴉ𞪡.𝨽\u0825📻-; [B1 B5 B6 P1 V3 V5 V6]; [B1 B5 B6 P1 V3 V5 V6] # 朶Ⴉ.𝨽ࠥ📻- ++B; 朶ⴉ𞪡.𝨽\u0825📻-; [B1 B5 B6 P1 V3 V5 V6]; [B1 B5 B6 P1 V3 V5 V6] # 朶ⴉ.𝨽ࠥ📻- ++B; xn--0kjz47pd57t.xn----3gd37096apmwa; [B1 B5 B6 V3 V5 V6]; [B1 B5 B6 V3 V5 V6] # 朶ⴉ.𝨽ࠥ📻- ++B; xn--hnd7245bd56p.xn----3gd37096apmwa; [B1 B5 B6 V3 V5 V6]; [B1 B5 B6 V3 V5 V6] # 朶Ⴉ.𝨽ࠥ📻- ++T; 𐤎。󑿰\u200C≮\u200D; [B6 C1 C2 P1 V6]; [B6 P1 V6] # 𐤎.≮ ++N; 𐤎。󑿰\u200C≮\u200D; [B6 C1 C2 P1 V6]; [B6 C1 C2 P1 V6] # 𐤎.≮ ++T; 𐤎。󑿰\u200C<\u0338\u200D; [B6 C1 C2 P1 V6]; [B6 P1 V6] # 𐤎.≮ ++N; 𐤎。󑿰\u200C<\u0338\u200D; [B6 C1 C2 P1 V6]; [B6 C1 C2 P1 V6] # 𐤎.≮ ++B; xn--bk9c.xn--gdhx6802k; [B6 V6]; [B6 V6] ++B; xn--bk9c.xn--0ugc04p2u638c; [B6 C1 C2 V6]; [B6 C1 C2 V6] # 𐤎.≮ ++T; 񭜎⒈。\u200C𝟤; [C1 P1 V6]; [P1 V6] # ⒈.2 ++N; 񭜎⒈。\u200C𝟤; [C1 P1 V6]; [C1 P1 V6] # ⒈.2 ++T; 񭜎1.。\u200C2; [C1 P1 V6 A4_2]; [P1 V6 A4_2] # 1..2 ++N; 񭜎1.。\u200C2; [C1 P1 V6 A4_2]; [C1 P1 V6 A4_2] # 1..2 ++B; xn--1-ex54e..2; [V6 A4_2]; [V6 A4_2] ++B; xn--1-ex54e..xn--2-rgn; [C1 V6 A4_2]; [C1 V6 A4_2] # 1..2 ++B; xn--tsh94183d.2; [V6]; [V6] ++B; xn--tsh94183d.xn--2-rgn; [C1 V6]; [C1 V6] # ⒈.2 ++T; 󠟊𐹤\u200D.𐹳󙄵𐹶; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹤.𐹳𐹶 ++N; 󠟊𐹤\u200D.𐹳󙄵𐹶; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹤.𐹳𐹶 ++T; 󠟊𐹤\u200D.𐹳󙄵𐹶; [B1 C2 P1 V6]; [B1 P1 V6] # 𐹤.𐹳𐹶 ++N; 󠟊𐹤\u200D.𐹳󙄵𐹶; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 𐹤.𐹳𐹶 ++B; xn--co0d98977c.xn--ro0dga22807v; [B1 V6]; [B1 V6] ++B; xn--1ugy994g7k93g.xn--ro0dga22807v; [B1 C2 V6]; [B1 C2 V6] # 𐹤.𐹳𐹶 ++B; 𞤴𐹻𑓂𐭝.\u094D\uFE07􉛯; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴𐹻𑓂𐭝.् ++B; 𞤴𐹻𑓂𐭝.\u094D\uFE07􉛯; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴𐹻𑓂𐭝.् ++B; 𞤒𐹻𑓂𐭝.\u094D\uFE07􉛯; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴𐹻𑓂𐭝.् ++B; xn--609c96c09grp2w.xn--n3b28708s; [B1 V5 V6]; [B1 V5 V6] # 𞤴𐹻𑓂𐭝.् ++B; 𞤒𐹻𑓂𐭝.\u094D\uFE07􉛯; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𞤴𐹻𑓂𐭝.् ++B; \u0668。𐹠𐹽񗮶; [B1 P1 V6]; [B1 P1 V6] # ٨.𐹠𐹽 ++B; \u0668。𐹠𐹽񗮶; [B1 P1 V6]; [B1 P1 V6] # ٨.𐹠𐹽 ++B; xn--hib.xn--7n0d2bu9196b; [B1 V6]; [B1 V6] # ٨.𐹠𐹽 ++B; \u1160񍀜.8򶾵\u069C; [B1 P1 V6]; [B1 P1 V6] # .8ڜ ++B; xn--psd85033d.xn--8-otc61545t; [B1 V6]; [B1 V6] # .8ڜ ++T; \u200D\u200C󠆪。ß𑓃; [C1 C2]; [A4_2] # .ß𑓃 ++N; \u200D\u200C󠆪。ß𑓃; [C1 C2]; [C1 C2] # .ß𑓃 ++T; \u200D\u200C󠆪。ß𑓃; [C1 C2]; [A4_2] # .ß𑓃 ++N; \u200D\u200C󠆪。ß𑓃; [C1 C2]; [C1 C2] # .ß𑓃 ++T; \u200D\u200C󠆪。SS𑓃; [C1 C2]; [A4_2] # .ss𑓃 ++N; \u200D\u200C󠆪。SS𑓃; [C1 C2]; [C1 C2] # .ss𑓃 ++T; \u200D\u200C󠆪。ss𑓃; [C1 C2]; [A4_2] # .ss𑓃 ++N; \u200D\u200C󠆪。ss𑓃; [C1 C2]; [C1 C2] # .ss𑓃 ++T; \u200D\u200C󠆪。Ss𑓃; [C1 C2]; [A4_2] # .ss𑓃 ++N; \u200D\u200C󠆪。Ss𑓃; [C1 C2]; [C1 C2] # .ss𑓃 ++B; .xn--ss-bh7o; [A4_2]; [A4_2] ++B; xn--0ugb.xn--ss-bh7o; [C1 C2]; [C1 C2] # .ss𑓃 ++B; xn--0ugb.xn--zca0732l; [C1 C2]; [C1 C2] # .ß𑓃 ++T; \u200D\u200C󠆪。SS𑓃; [C1 C2]; [A4_2] # .ss𑓃 ++N; \u200D\u200C󠆪。SS𑓃; [C1 C2]; [C1 C2] # .ss𑓃 ++T; \u200D\u200C󠆪。ss𑓃; [C1 C2]; [A4_2] # .ss𑓃 ++N; \u200D\u200C󠆪。ss𑓃; [C1 C2]; [C1 C2] # .ss𑓃 ++T; \u200D\u200C󠆪。Ss𑓃; [C1 C2]; [A4_2] # .ss𑓃 ++N; \u200D\u200C󠆪。Ss𑓃; [C1 C2]; [C1 C2] # .ss𑓃 ++B; xn--ss-bh7o; ss𑓃; xn--ss-bh7o ++B; ss𑓃; ; xn--ss-bh7o ++B; SS𑓃; ss𑓃; xn--ss-bh7o ++B; Ss𑓃; ss𑓃; xn--ss-bh7o ++T; ︒\u200Cヶ䒩.ꡪ; [C1 P1 V6]; [P1 V6] # ︒ヶ䒩.ꡪ ++N; ︒\u200Cヶ䒩.ꡪ; [C1 P1 V6]; [C1 P1 V6] # ︒ヶ䒩.ꡪ ++T; 。\u200Cヶ䒩.ꡪ; [C1 A4_2]; [A4_2] # .ヶ䒩.ꡪ ++N; 。\u200Cヶ䒩.ꡪ; [C1 A4_2]; [C1 A4_2] # .ヶ䒩.ꡪ ++B; .xn--qekw60d.xn--gd9a; [A4_2]; [A4_2] ++B; .xn--0ug287dj0o.xn--gd9a; [C1 A4_2]; [C1 A4_2] # .ヶ䒩.ꡪ ++B; xn--qekw60dns9k.xn--gd9a; [V6]; [V6] ++B; xn--0ug287dj0or48o.xn--gd9a; [C1 V6]; [C1 V6] # ︒ヶ䒩.ꡪ ++B; xn--qekw60d.xn--gd9a; ヶ䒩.ꡪ; xn--qekw60d.xn--gd9a ++B; ヶ䒩.ꡪ; ; xn--qekw60d.xn--gd9a ++T; \u200C⒈𤮍.󢓋\u1A60; [C1 P1 V6]; [P1 V6] # ⒈𤮍.᩠ ++N; \u200C⒈𤮍.󢓋\u1A60; [C1 P1 V6]; [C1 P1 V6] # ⒈𤮍.᩠ ++T; \u200C1.𤮍.󢓋\u1A60; [C1 P1 V6]; [P1 V6] # 1.𤮍.᩠ ++N; \u200C1.𤮍.󢓋\u1A60; [C1 P1 V6]; [C1 P1 V6] # 1.𤮍.᩠ ++B; 1.xn--4x6j.xn--jof45148n; [V6]; [V6] # 1.𤮍.᩠ ++B; xn--1-rgn.xn--4x6j.xn--jof45148n; [C1 V6]; [C1 V6] # 1.𤮍.᩠ ++B; xn--tshw462r.xn--jof45148n; [V6]; [V6] # ⒈𤮍.᩠ ++B; xn--0ug88o7471d.xn--jof45148n; [C1 V6]; [C1 V6] # ⒈𤮍.᩠ ++T; ⒈\u200C𐫓󠀺。\u1A60񤰵\u200D; [B1 C1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ⒈𐫓.᩠ ++N; ⒈\u200C𐫓󠀺。\u1A60񤰵\u200D; [B1 C1 C2 P1 V5 V6]; [B1 C1 C2 P1 V5 V6] # ⒈𐫓.᩠ ++T; 1.\u200C𐫓󠀺。\u1A60񤰵\u200D; [B1 C1 C2 P1 V5 V6]; [B1 B3 P1 V5 V6] # 1.𐫓.᩠ ++N; 1.\u200C𐫓󠀺。\u1A60񤰵\u200D; [B1 C1 C2 P1 V5 V6]; [B1 C1 C2 P1 V5 V6] # 1.𐫓.᩠ ++B; 1.xn--8w9c40377c.xn--jofz5294e; [B1 B3 V5 V6]; [B1 B3 V5 V6] # 1.𐫓.᩠ ++B; 1.xn--0ug8853gk263g.xn--jof95xex98m; [B1 C1 C2 V5 V6]; [B1 C1 C2 V5 V6] # 1.𐫓.᩠ ++B; xn--tsh4435fk263g.xn--jofz5294e; [B1 V5 V6]; [B1 V5 V6] # ⒈𐫓.᩠ ++B; xn--0ug78ol75wzcx4i.xn--jof95xex98m; [B1 C1 C2 V5 V6]; [B1 C1 C2 V5 V6] # ⒈𐫓.᩠ ++B; 𝅵。𝟫𞀈䬺⒈; [P1 V6]; [P1 V6] ++B; 𝅵。9𞀈䬺1.; [P1 V6]; [P1 V6] ++B; xn--3f1h.xn--91-030c1650n.; [V6]; [V6] ++B; xn--3f1h.xn--9-ecp936non25a; [V6]; [V6] ++B; 򡼺≯。盚\u0635; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ≯.盚ص ++B; 򡼺>\u0338。盚\u0635; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ≯.盚ص ++B; xn--hdh30181h.xn--0gb7878c; [B5 B6 V6]; [B5 B6 V6] # ≯.盚ص ++B; -񿰭\u05B4。-󠁊𐢸≯; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ִ.-≯ ++B; -񿰭\u05B4。-󠁊𐢸>\u0338; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ִ.-≯ ++B; xn----fgc06667m.xn----pgoy615he5y4i; [B1 V3 V6]; [B1 V3 V6] # -ִ.-≯ ++T; 󿭓\u1B44\u200C\u0A4D.𐭛񳋔; [B2 B3 B6 P1 V6]; [B2 B3 P1 V6] # ᭄੍.𐭛 ++N; 󿭓\u1B44\u200C\u0A4D.𐭛񳋔; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # ᭄੍.𐭛 ++T; 󿭓\u1B44\u200C\u0A4D.𐭛񳋔; [B2 B3 B6 P1 V6]; [B2 B3 P1 V6] # ᭄੍.𐭛 ++N; 󿭓\u1B44\u200C\u0A4D.𐭛񳋔; [B2 B3 B6 P1 V6]; [B2 B3 B6 P1 V6] # ᭄੍.𐭛 ++B; xn--ybc997fb5881a.xn--409c6100y; [B2 B3 V6]; [B2 B3 V6] # ᭄੍.𐭛 ++B; xn--ybc997f6rd2n772c.xn--409c6100y; [B2 B3 B6 V6]; [B2 B3 B6 V6] # ᭄੍.𐭛 ++T; ⾇.\u067D𞤴\u06BB\u200D; [B3 C2]; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ ++N; ⾇.\u067D𞤴\u06BB\u200D; [B3 C2]; [B3 C2] # 舛.ٽ𞤴ڻ ++T; 舛.\u067D𞤴\u06BB\u200D; [B3 C2]; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ ++N; 舛.\u067D𞤴\u06BB\u200D; [B3 C2]; [B3 C2] # 舛.ٽ𞤴ڻ ++T; 舛.\u067D𞤒\u06BB\u200D; [B3 C2]; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ ++N; 舛.\u067D𞤒\u06BB\u200D; [B3 C2]; [B3 C2] # 舛.ٽ𞤴ڻ ++B; xn--8c1a.xn--2ib8jn539l; 舛.\u067D𞤴\u06BB; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ ++B; 舛.\u067D𞤴\u06BB; ; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ ++B; 舛.\u067D𞤒\u06BB; 舛.\u067D𞤴\u06BB; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ ++B; xn--8c1a.xn--2ib8jv19e6413b; [B3 C2]; [B3 C2] # 舛.ٽ𞤴ڻ ++T; ⾇.\u067D𞤒\u06BB\u200D; [B3 C2]; xn--8c1a.xn--2ib8jn539l # 舛.ٽ𞤴ڻ ++N; ⾇.\u067D𞤒\u06BB\u200D; [B3 C2]; [B3 C2] # 舛.ٽ𞤴ڻ ++B; 4򭆥。\u0767≯; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 4.ݧ≯ ++B; 4򭆥。\u0767>\u0338; [B1 B3 P1 V6]; [B1 B3 P1 V6] # 4.ݧ≯ ++B; xn--4-xn17i.xn--rpb459k; [B1 B3 V6]; [B1 B3 V6] # 4.ݧ≯ ++B; 𲔏𞫨񺿂硲.\u06AD; [B5 P1 V6]; [B5 P1 V6] # 硲.ڭ ++B; 𲔏𞫨񺿂硲.\u06AD; [B5 P1 V6]; [B5 P1 V6] # 硲.ڭ ++B; xn--lcz1610fn78gk609a.xn--gkb; [B5 V6]; [B5 V6] # 硲.ڭ ++T; \u200C.\uFE08\u0666Ⴆ℮; [B1 C1 P1 V6]; [B1 P1 V6 A4_2] # .٦Ⴆ℮ ++N; \u200C.\uFE08\u0666Ⴆ℮; [B1 C1 P1 V6]; [B1 C1 P1 V6] # .٦Ⴆ℮ ++T; \u200C.\uFE08\u0666ⴆ℮; [B1 C1]; [B1 A4_2] # .٦ⴆ℮ ++N; \u200C.\uFE08\u0666ⴆ℮; [B1 C1]; [B1 C1] # .٦ⴆ℮ ++B; .xn--fib628k4li; [B1 A4_2]; [B1 A4_2] # .٦ⴆ℮ ++B; xn--0ug.xn--fib628k4li; [B1 C1]; [B1 C1] # .٦ⴆ℮ ++B; .xn--fib263c0yn; [B1 V6 A4_2]; [B1 V6 A4_2] # .٦Ⴆ℮ ++B; xn--0ug.xn--fib263c0yn; [B1 C1 V6]; [B1 C1 V6] # .٦Ⴆ℮ ++T; \u06A3.\u0D4D\u200DϞ; [B1 V5]; [B1 V5] # ڣ.്ϟ ++N; \u06A3.\u0D4D\u200DϞ; [B1 V5]; [B1 V5] # ڣ.്ϟ ++T; \u06A3.\u0D4D\u200DϞ; [B1 V5]; [B1 V5] # ڣ.്ϟ ++N; \u06A3.\u0D4D\u200DϞ; [B1 V5]; [B1 V5] # ڣ.്ϟ ++T; \u06A3.\u0D4D\u200Dϟ; [B1 V5]; [B1 V5] # ڣ.്ϟ ++N; \u06A3.\u0D4D\u200Dϟ; [B1 V5]; [B1 V5] # ڣ.്ϟ ++B; xn--5jb.xn--xya149b; [B1 V5]; [B1 V5] # ڣ.്ϟ ++B; xn--5jb.xn--xya149bpvp; [B1 V5]; [B1 V5] # ڣ.്ϟ ++T; \u06A3.\u0D4D\u200Dϟ; [B1 V5]; [B1 V5] # ڣ.്ϟ ++N; \u06A3.\u0D4D\u200Dϟ; [B1 V5]; [B1 V5] # ڣ.്ϟ ++T; \u200C𞸇𑘿。\u0623𐮂-腍; [B1 B2 B3 C1]; [B2 B3] # ح𑘿.أ𐮂-腍 ++N; \u200C𞸇𑘿。\u0623𐮂-腍; [B1 B2 B3 C1]; [B1 B2 B3 C1] # ح𑘿.أ𐮂-腍 ++T; \u200C𞸇𑘿。\u0627\u0654𐮂-腍; [B1 B2 B3 C1]; [B2 B3] # ح𑘿.أ𐮂-腍 ++N; \u200C𞸇𑘿。\u0627\u0654𐮂-腍; [B1 B2 B3 C1]; [B1 B2 B3 C1] # ح𑘿.أ𐮂-腍 ++T; \u200C\u062D𑘿。\u0623𐮂-腍; [B1 B2 B3 C1]; [B2 B3] # ح𑘿.أ𐮂-腍 ++N; \u200C\u062D𑘿。\u0623𐮂-腍; [B1 B2 B3 C1]; [B1 B2 B3 C1] # ح𑘿.أ𐮂-腍 ++T; \u200C\u062D𑘿。\u0627\u0654𐮂-腍; [B1 B2 B3 C1]; [B2 B3] # ح𑘿.أ𐮂-腍 ++N; \u200C\u062D𑘿。\u0627\u0654𐮂-腍; [B1 B2 B3 C1]; [B1 B2 B3 C1] # ح𑘿.أ𐮂-腍 ++B; xn--sgb4140l.xn----qmc5075grs9e; [B2 B3]; [B2 B3] # ح𑘿.أ𐮂-腍 ++B; xn--sgb953kmi8o.xn----qmc5075grs9e; [B1 B2 B3 C1]; [B1 B2 B3 C1] # ح𑘿.أ𐮂-腍 ++B; -򭷙\u066B纛。𝟛񭤇🄅; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -٫纛.3🄅 ++B; -򭷙\u066B纛。3񭤇4,; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -٫纛.34, ++B; xn----vqc8143g0tt4i.xn--34,-8787l; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -٫纛.34, ++B; xn----vqc8143g0tt4i.xn--3-os1sn476y; [B1 V3 V6]; [B1 V3 V6] # -٫纛.3🄅 ++B; 🔔.Ⴂ\u07CC\u0BCD𐋮; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 🔔.Ⴂߌ்𐋮 ++B; 🔔.Ⴂ\u07CC\u0BCD𐋮; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 🔔.Ⴂߌ்𐋮 ++B; 🔔.ⴂ\u07CC\u0BCD𐋮; [B1 B5]; [B1 B5] # 🔔.ⴂߌ்𐋮 ++B; xn--nv8h.xn--nsb46rvz1b222p; [B1 B5]; [B1 B5] # 🔔.ⴂߌ்𐋮 ++B; xn--nv8h.xn--nsb46r83e8112a; [B1 B5 V6]; [B1 B5 V6] # 🔔.Ⴂߌ்𐋮 ++B; 🔔.ⴂ\u07CC\u0BCD𐋮; [B1 B5]; [B1 B5] # 🔔.ⴂߌ்𐋮 ++B; 軥\u06B3.-𖬵; [B1 B5 B6 V3]; [B1 B5 B6 V3] # 軥ڳ.-𖬵 ++B; xn--mkb5480e.xn----6u5m; [B1 B5 B6 V3]; [B1 B5 B6 V3] # 軥ڳ.-𖬵 ++B; 𐹤\u07CA\u06B6.𐨂-; [B1 V3 V5]; [B1 V3 V5] # 𐹤ߊڶ.𐨂- ++B; xn--pkb56cn614d.xn----974i; [B1 V3 V5]; [B1 V3 V5] # 𐹤ߊڶ.𐨂- ++B; -󠅱0。\u17CF\u1DFD톇십; [V3 V5]; [V3 V5] # -0.៏᷽톇십 ++B; -󠅱0。\u17CF\u1DFD톇십; [V3 V5]; [V3 V5] # -0.៏᷽톇십 ++B; -󠅱0。\u17CF\u1DFD톇십; [V3 V5]; [V3 V5] # -0.៏᷽톇십 ++B; -󠅱0。\u17CF\u1DFD톇십; [V3 V5]; [V3 V5] # -0.៏᷽톇십 ++B; -0.xn--r4e872ah77nghm; [V3 V5]; [V3 V5] # -0.៏᷽톇십 ++B; ꡰ︒--。\u17CC靈𐹢񘳮; [B1 B6 P1 V2 V3 V5 V6]; [B1 B6 P1 V2 V3 V5 V6] # ꡰ︒--.៌靈𐹢 ++B; ꡰ。--。\u17CC靈𐹢񘳮; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ꡰ.--.៌靈𐹢 ++B; xn--md9a.--.xn--o4e6836dpxudz0v1c; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ꡰ.--.៌靈𐹢 ++B; xn-----bk9hu24z.xn--o4e6836dpxudz0v1c; [B1 B6 V2 V3 V5 V6]; [B1 B6 V2 V3 V5 V6] # ꡰ︒--.៌靈𐹢 ++B; \u115FႿႵრ。\u0B4D; [P1 V5 V6]; [P1 V5 V6] # ႿႵრ.୍ ++B; \u115FႿႵრ。\u0B4D; [P1 V5 V6]; [P1 V5 V6] # ႿႵრ.୍ ++B; \u115Fⴟⴕრ。\u0B4D; [P1 V5 V6]; [P1 V5 V6] # ⴟⴕრ.୍ ++B; \u115FႿⴕრ。\u0B4D; [P1 V5 V6]; [P1 V5 V6] # Ⴟⴕრ.୍ ++B; xn--3nd0etsm92g.xn--9ic; [V5 V6]; [V5 V6] # Ⴟⴕრ.୍ ++B; xn--1od7wz74eeb.xn--9ic; [V5 V6]; [V5 V6] # ⴟⴕრ.୍ ++B; xn--tndt4hvw.xn--9ic; [V5 V6]; [V5 V6] # ႿႵრ.୍ ++B; \u115Fⴟⴕრ。\u0B4D; [P1 V5 V6]; [P1 V5 V6] # ⴟⴕრ.୍ ++B; \u115FႿⴕრ。\u0B4D; [P1 V5 V6]; [P1 V5 V6] # Ⴟⴕრ.୍ ++B; 🄃𐹠.\u0664󠅇; [B1 P1 V6]; [B1 P1 V6] # 🄃𐹠.٤ ++B; 2,𐹠.\u0664󠅇; [B1 P1 V6]; [B1 P1 V6] # 2,𐹠.٤ ++B; xn--2,-5g3o.xn--dib; [B1 P1 V6]; [B1 P1 V6] # 2,𐹠.٤ ++B; xn--7n0d1189a.xn--dib; [B1 V6]; [B1 V6] # 🄃𐹠.٤ ++T; 򻲼\u200C\uFC5B.\u07D2\u0848\u1BF3; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # ذٰ.ߒࡈ᯳ ++N; 򻲼\u200C\uFC5B.\u07D2\u0848\u1BF3; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 C1 P1 V6] # ذٰ.ߒࡈ᯳ ++T; 򻲼\u200C\u0630\u0670.\u07D2\u0848\u1BF3; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 P1 V6] # ذٰ.ߒࡈ᯳ ++N; 򻲼\u200C\u0630\u0670.\u07D2\u0848\u1BF3; [B2 B3 B5 B6 C1 P1 V6]; [B2 B3 B5 B6 C1 P1 V6] # ذٰ.ߒࡈ᯳ ++B; xn--vgb2kp1223g.xn--tsb0vz43c; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] # ذٰ.ߒࡈ᯳ ++B; xn--vgb2kq00fl213y.xn--tsb0vz43c; [B2 B3 B5 B6 C1 V6]; [B2 B3 B5 B6 C1 V6] # ذٰ.ߒࡈ᯳ ++T; \u200D\u200D𞵪\u200C。ᡘ𑲭\u17B5; [B1 C1 C2 P1 V6]; [P1 V6] # .ᡘ𑲭 ++N; \u200D\u200D𞵪\u200C。ᡘ𑲭\u17B5; [B1 C1 C2 P1 V6]; [B1 C1 C2 P1 V6] # .ᡘ𑲭 ++B; xn--l96h.xn--03e93aq365d; [V6]; [V6] # .ᡘ𑲭 ++B; xn--0ugba05538b.xn--03e93aq365d; [B1 C1 C2 V6]; [B1 C1 C2 V6] # .ᡘ𑲭 ++B; 𞷻。⚄񗑇𑁿; [B1 P1 V6]; [B1 P1 V6] ++B; xn--qe7h.xn--c7h2966f7so4a; [B1 V6]; [B1 V6] ++B; \uA8C4≠.𞠨\u0667; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꣄≠.𞠨٧ ++B; \uA8C4=\u0338.𞠨\u0667; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꣄≠.𞠨٧ ++B; \uA8C4≠.𞠨\u0667; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꣄≠.𞠨٧ ++B; \uA8C4=\u0338.𞠨\u0667; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ꣄≠.𞠨٧ ++B; xn--1chy504c.xn--gib1777v; [B1 V5 V6]; [B1 V5 V6] # ꣄≠.𞠨٧ ++B; 𝟛𝆪\uA8C4。\uA8EA-; [V3 V5]; [V3 V5] # 3꣄𝆪.꣪- ++B; 𝟛\uA8C4𝆪。\uA8EA-; [V3 V5]; [V3 V5] # 3꣄𝆪.꣪- ++B; 3\uA8C4𝆪。\uA8EA-; [V3 V5]; [V3 V5] # 3꣄𝆪.꣪- ++B; xn--3-sl4eu679e.xn----xn4e; [V3 V5]; [V3 V5] # 3꣄𝆪.꣪- ++B; \u075F\u1BA2\u103AႧ.4; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # ݟᮢ်Ⴇ.4 ++B; \u075F\u1BA2\u103Aⴇ.4; [B1 B2 B3]; [B1 B2 B3] # ݟᮢ်ⴇ.4 ++B; xn--jpb846bjzj7pr.4; [B1 B2 B3]; [B1 B2 B3] # ݟᮢ်ⴇ.4 ++B; xn--jpb846bmjw88a.4; [B1 B2 B3 V6]; [B1 B2 B3 V6] # ݟᮢ်Ⴇ.4 ++B; ᄹ。\u0ECA򠯤󠄞; [P1 V5 V6]; [P1 V5 V6] # ᄹ.໊ ++B; ᄹ。\u0ECA򠯤󠄞; [P1 V5 V6]; [P1 V5 V6] # ᄹ.໊ ++B; xn--lrd.xn--s8c05302k; [V5 V6]; [V5 V6] # ᄹ.໊ ++B; Ⴆ򻢩.󠆡\uFE09𞤍; [P1 V6]; [P1 V6] ++B; Ⴆ򻢩.󠆡\uFE09𞤍; [P1 V6]; [P1 V6] ++B; ⴆ򻢩.󠆡\uFE09𞤯; [P1 V6]; [P1 V6] ++B; xn--xkjw3965g.xn--ne6h; [V6]; [V6] ++B; xn--end82983m.xn--ne6h; [V6]; [V6] ++B; ⴆ򻢩.󠆡\uFE09𞤯; [P1 V6]; [P1 V6] ++B; ⴆ򻢩.󠆡\uFE09𞤍; [P1 V6]; [P1 V6] ++B; ⴆ򻢩.󠆡\uFE09𞤍; [P1 V6]; [P1 V6] ++T; ß\u080B︒\u067B.帼F∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ßࠋ︒ٻ.帼f∫∫ ++N; ß\u080B︒\u067B.帼F∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ßࠋ︒ٻ.帼f∫∫ ++T; ß\u080B。\u067B.帼F∫∫\u200C; [B5 B6 C1]; [B5 B6] # ßࠋ.ٻ.帼f∫∫ ++N; ß\u080B。\u067B.帼F∫∫\u200C; [B5 B6 C1]; [B5 B6 C1] # ßࠋ.ٻ.帼f∫∫ ++T; ß\u080B。\u067B.帼f∫∫\u200C; [B5 B6 C1]; [B5 B6] # ßࠋ.ٻ.帼f∫∫ ++N; ß\u080B。\u067B.帼f∫∫\u200C; [B5 B6 C1]; [B5 B6 C1] # ßࠋ.ٻ.帼f∫∫ ++T; SS\u080B。\u067B.帼F∫∫\u200C; [B5 B6 C1]; [B5 B6] # ssࠋ.ٻ.帼f∫∫ ++N; SS\u080B。\u067B.帼F∫∫\u200C; [B5 B6 C1]; [B5 B6 C1] # ssࠋ.ٻ.帼f∫∫ ++T; ss\u080B。\u067B.帼f∫∫\u200C; [B5 B6 C1]; [B5 B6] # ssࠋ.ٻ.帼f∫∫ ++N; ss\u080B。\u067B.帼f∫∫\u200C; [B5 B6 C1]; [B5 B6 C1] # ssࠋ.ٻ.帼f∫∫ ++T; Ss\u080B。\u067B.帼F∫∫\u200C; [B5 B6 C1]; [B5 B6] # ssࠋ.ٻ.帼f∫∫ ++N; Ss\u080B。\u067B.帼F∫∫\u200C; [B5 B6 C1]; [B5 B6 C1] # ssࠋ.ٻ.帼f∫∫ ++B; xn--ss-uze.xn--0ib.xn--f-tcoa9162d; [B5 B6]; [B5 B6] # ssࠋ.ٻ.帼f∫∫ ++B; xn--ss-uze.xn--0ib.xn--f-sgn48ga6997e; [B5 B6 C1]; [B5 B6 C1] # ssࠋ.ٻ.帼f∫∫ ++B; xn--zca687a.xn--0ib.xn--f-sgn48ga6997e; [B5 B6 C1]; [B5 B6 C1] # ßࠋ.ٻ.帼f∫∫ ++T; ß\u080B︒\u067B.帼f∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ßࠋ︒ٻ.帼f∫∫ ++N; ß\u080B︒\u067B.帼f∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ßࠋ︒ٻ.帼f∫∫ ++T; SS\u080B︒\u067B.帼F∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ssࠋ︒ٻ.帼f∫∫ ++N; SS\u080B︒\u067B.帼F∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ssࠋ︒ٻ.帼f∫∫ ++T; ss\u080B︒\u067B.帼f∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ssࠋ︒ٻ.帼f∫∫ ++N; ss\u080B︒\u067B.帼f∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ssࠋ︒ٻ.帼f∫∫ ++T; Ss\u080B︒\u067B.帼F∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # ssࠋ︒ٻ.帼f∫∫ ++N; Ss\u080B︒\u067B.帼F∬\u200C; [B5 B6 C1 P1 V6]; [B5 B6 C1 P1 V6] # ssࠋ︒ٻ.帼f∫∫ ++B; xn--ss-k0d31nu121d.xn--f-tcoa9162d; [B5 B6 V6]; [B5 B6 V6] # ssࠋ︒ٻ.帼f∫∫ ++B; xn--ss-k0d31nu121d.xn--f-sgn48ga6997e; [B5 B6 C1 V6]; [B5 B6 C1 V6] # ssࠋ︒ٻ.帼f∫∫ ++B; xn--zca68zj8ac956c.xn--f-sgn48ga6997e; [B5 B6 C1 V6]; [B5 B6 C1 V6] # ßࠋ︒ٻ.帼f∫∫ ++T; 󘪗。𐹴𞨌\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # .𐹴 ++N; 󘪗。𐹴𞨌\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # .𐹴 ++T; 󘪗。𐹴𞨌\u200D; [B1 C2 P1 V6]; [B1 P1 V6] # .𐹴 ++N; 󘪗。𐹴𞨌\u200D; [B1 C2 P1 V6]; [B1 C2 P1 V6] # .𐹴 ++B; xn--8l83e.xn--so0dw168a; [B1 V6]; [B1 V6] ++B; xn--8l83e.xn--1ug4105gsxwf; [B1 C2 V6]; [B1 C2 V6] # .𐹴 ++B; 񗛨.򅟢𝟨\uA8C4; [P1 V6]; [P1 V6] # .6꣄ ++B; 񗛨.򅟢6\uA8C4; [P1 V6]; [P1 V6] # .6꣄ ++B; xn--mi60a.xn--6-sl4es8023c; [V6]; [V6] # .6꣄ ++B; \u1AB2\uFD8E。-۹ႱႨ; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ᪲مخج.-۹ႱႨ ++B; \u1AB2\u0645\u062E\u062C。-۹ႱႨ; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ᪲مخج.-۹ႱႨ ++B; \u1AB2\u0645\u062E\u062C。-۹ⴑⴈ; [B1 V3 V5]; [B1 V3 V5] # ᪲مخج.-۹ⴑⴈ ++B; \u1AB2\u0645\u062E\u062C。-۹Ⴑⴈ; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ᪲مخج.-۹Ⴑⴈ ++B; xn--rgbd2e831i.xn----zyc875efr3a; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ᪲مخج.-۹Ⴑⴈ ++B; xn--rgbd2e831i.xn----zyc3430a9a; [B1 V3 V5]; [B1 V3 V5] # ᪲مخج.-۹ⴑⴈ ++B; xn--rgbd2e831i.xn----zyc155e9a; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ᪲مخج.-۹ႱႨ ++B; \u1AB2\uFD8E。-۹ⴑⴈ; [B1 V3 V5]; [B1 V3 V5] # ᪲مخج.-۹ⴑⴈ ++B; \u1AB2\uFD8E。-۹Ⴑⴈ; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ᪲مخج.-۹Ⴑⴈ ++B; 𞤤.-\u08A3︒; [B1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤤.-ࢣ︒ ++B; 𞤤.-\u08A3。; [B1 V3]; [B1 V3] # 𞤤.-ࢣ. ++B; 𞤂.-\u08A3。; [B1 V3]; [B1 V3] # 𞤤.-ࢣ. ++B; xn--ce6h.xn----cod.; [B1 V3]; [B1 V3] # 𞤤.-ࢣ. ++B; 𞤂.-\u08A3︒; [B1 P1 V3 V6]; [B1 P1 V3 V6] # 𞤤.-ࢣ︒ ++B; xn--ce6h.xn----cod7069p; [B1 V3 V6]; [B1 V3 V6] # 𞤤.-ࢣ︒ ++T; \u200C𐺨.\u0859--; [B1 C1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # .࡙-- ++N; \u200C𐺨.\u0859--; [B1 C1 P1 V3 V5 V6]; [B1 C1 P1 V3 V5 V6] # .࡙-- ++B; xn--9p0d.xn-----h6e; [B1 V3 V5 V6]; [B1 V3 V5 V6] # .࡙-- ++B; xn--0ug7905g.xn-----h6e; [B1 C1 V3 V5 V6]; [B1 C1 V3 V5 V6] # .࡙-- ++B; 𐋸󮘋Ⴢ.Ⴁ; [P1 V6]; [P1 V6] ++B; 𐋸󮘋ⴢ.ⴁ; [P1 V6]; [P1 V6] ++B; 𐋸󮘋Ⴢ.ⴁ; [P1 V6]; [P1 V6] ++B; xn--6nd5215jr2u0h.xn--skj; [V6]; [V6] ++B; xn--qlj1559dr224h.xn--skj; [V6]; [V6] ++B; xn--6nd5215jr2u0h.xn--8md; [V6]; [V6] ++T; 񗑿\uA806₄򩞆。𲩧󠒹ς; [P1 V6]; [P1 V6] # ꠆4.ς ++N; 񗑿\uA806₄򩞆。𲩧󠒹ς; [P1 V6]; [P1 V6] # ꠆4.ς ++T; 񗑿\uA8064򩞆。𲩧󠒹ς; [P1 V6]; [P1 V6] # ꠆4.ς ++N; 񗑿\uA8064򩞆。𲩧󠒹ς; [P1 V6]; [P1 V6] # ꠆4.ς ++B; 񗑿\uA8064򩞆。𲩧󠒹Σ; [P1 V6]; [P1 V6] # ꠆4.σ ++B; 񗑿\uA8064򩞆。𲩧󠒹σ; [P1 V6]; [P1 V6] # ꠆4.σ ++B; xn--4-w93ej7463a9io5a.xn--4xa31142bk3f0d; [V6]; [V6] # ꠆4.σ ++B; xn--4-w93ej7463a9io5a.xn--3xa51142bk3f0d; [V6]; [V6] # ꠆4.ς ++B; 񗑿\uA806₄򩞆。𲩧󠒹Σ; [P1 V6]; [P1 V6] # ꠆4.σ ++B; 񗑿\uA806₄򩞆。𲩧󠒹σ; [P1 V6]; [P1 V6] # ꠆4.σ ++B; 󠆀\u0723。\u1DF4\u0775; [B1 V5]; [B1 V5] # ܣ.ᷴݵ ++B; xn--tnb.xn--5pb136i; [B1 V5]; [B1 V5] # ܣ.ᷴݵ ++T; 𐹱\u0842𝪨。𬼖Ⴑ\u200D; [B1 B6 C2 P1 V6]; [B1 P1 V6] # 𐹱ࡂ𝪨.𬼖Ⴑ ++N; 𐹱\u0842𝪨。𬼖Ⴑ\u200D; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # 𐹱ࡂ𝪨.𬼖Ⴑ ++T; 𐹱\u0842𝪨。𬼖Ⴑ\u200D; [B1 B6 C2 P1 V6]; [B1 P1 V6] # 𐹱ࡂ𝪨.𬼖Ⴑ ++N; 𐹱\u0842𝪨。𬼖Ⴑ\u200D; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # 𐹱ࡂ𝪨.𬼖Ⴑ ++T; 𐹱\u0842𝪨。𬼖ⴑ\u200D; [B1 B6 C2]; [B1] # 𐹱ࡂ𝪨.𬼖ⴑ ++N; 𐹱\u0842𝪨。𬼖ⴑ\u200D; [B1 B6 C2]; [B1 B6 C2] # 𐹱ࡂ𝪨.𬼖ⴑ ++B; xn--0vb1535kdb6e.xn--8kjz186s; [B1]; [B1] # 𐹱ࡂ𝪨.𬼖ⴑ ++B; xn--0vb1535kdb6e.xn--1ug742c5714c; [B1 B6 C2]; [B1 B6 C2] # 𐹱ࡂ𝪨.𬼖ⴑ ++B; xn--0vb1535kdb6e.xn--pnd93707a; [B1 V6]; [B1 V6] # 𐹱ࡂ𝪨.𬼖Ⴑ ++B; xn--0vb1535kdb6e.xn--pnd879eqy33c; [B1 B6 C2 V6]; [B1 B6 C2 V6] # 𐹱ࡂ𝪨.𬼖Ⴑ ++T; 𐹱\u0842𝪨。𬼖ⴑ\u200D; [B1 B6 C2]; [B1] # 𐹱ࡂ𝪨.𬼖ⴑ ++N; 𐹱\u0842𝪨。𬼖ⴑ\u200D; [B1 B6 C2]; [B1 B6 C2] # 𐹱ࡂ𝪨.𬼖ⴑ ++T; \u1714𐭪󠙘\u200D。-𐹴; [B1 C2 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ᜔𐭪.-𐹴 ++N; \u1714𐭪󠙘\u200D。-𐹴; [B1 C2 P1 V3 V5 V6]; [B1 C2 P1 V3 V5 V6] # ᜔𐭪.-𐹴 ++T; \u1714𐭪󠙘\u200D。-𐹴; [B1 C2 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ᜔𐭪.-𐹴 ++N; \u1714𐭪󠙘\u200D。-𐹴; [B1 C2 P1 V3 V5 V6]; [B1 C2 P1 V3 V5 V6] # ᜔𐭪.-𐹴 ++B; xn--fze4126jujt0g.xn----c36i; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ᜔𐭪.-𐹴 ++B; xn--fze807bso0spy14i.xn----c36i; [B1 C2 V3 V5 V6]; [B1 C2 V3 V5 V6] # ᜔𐭪.-𐹴 ++B; 𾢬。\u0729︒쯙𝟧; [B2 P1 V6]; [B2 P1 V6] # .ܩ︒쯙5 ++B; 𾢬。\u0729︒쯙𝟧; [B2 P1 V6]; [B2 P1 V6] # .ܩ︒쯙5 ++B; 𾢬。\u0729。쯙5; [P1 V6]; [P1 V6] # .ܩ.쯙5 ++B; 𾢬。\u0729。쯙5; [P1 V6]; [P1 V6] # .ܩ.쯙5 ++B; xn--t92s.xn--znb.xn--5-y88f; [V6]; [V6] # .ܩ.쯙5 ++B; xn--t92s.xn--5-p1c0712mm8rb; [B2 V6]; [B2 V6] # .ܩ︒쯙5 ++B; 𞤟-。\u0762≮뻐; [B2 B3 P1 V3 V6]; [B2 B3 P1 V3 V6] # 𞥁-.ݢ≮뻐 ++B; 𞤟-。\u0762<\u0338뻐; [B2 B3 P1 V3 V6]; [B2 B3 P1 V3 V6] # 𞥁-.ݢ≮뻐 ++B; 𞥁-。\u0762<\u0338뻐; [B2 B3 P1 V3 V6]; [B2 B3 P1 V3 V6] # 𞥁-.ݢ≮뻐 ++B; 𞥁-。\u0762≮뻐; [B2 B3 P1 V3 V6]; [B2 B3 P1 V3 V6] # 𞥁-.ݢ≮뻐 ++B; xn----1j8r.xn--mpb269krv4i; [B2 B3 V3 V6]; [B2 B3 V3 V6] # 𞥁-.ݢ≮뻐 ++B; 𞥩-򊫠.\u08B4≠; [B2 B3 P1 V6]; [B2 B3 P1 V6] # -.ࢴ≠ ++B; 𞥩-򊫠.\u08B4=\u0338; [B2 B3 P1 V6]; [B2 B3 P1 V6] # -.ࢴ≠ ++B; 𞥩-򊫠.\u08B4≠; [B2 B3 P1 V6]; [B2 B3 P1 V6] # -.ࢴ≠ ++B; 𞥩-򊫠.\u08B4=\u0338; [B2 B3 P1 V6]; [B2 B3 P1 V6] # -.ࢴ≠ ++B; xn----cm8rp3609a.xn--9yb852k; [B2 B3 V6]; [B2 B3 V6] # -.ࢴ≠ ++T; -񅂏ςႼ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςႼ.١ ++N; -񅂏ςႼ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςႼ.١ ++T; -񅂏ςႼ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςႼ.١ ++N; -񅂏ςႼ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςႼ.١ ++T; -񅂏ςⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςⴜ.١ ++N; -񅂏ςⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςⴜ.١ ++B; -񅂏ΣႼ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -σႼ.١ ++B; -񅂏σⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -σⴜ.١ ++B; -񅂏Σⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -σⴜ.١ ++B; xn----0mb9682aov12f.xn--9hb; [B1 V3 V6]; [B1 V3 V6] # -σⴜ.١ ++B; xn----0mb770hun11i.xn--9hb; [B1 V3 V6]; [B1 V3 V6] # -σႼ.١ ++B; xn----ymb2782aov12f.xn--9hb; [B1 V3 V6]; [B1 V3 V6] # -ςⴜ.١ ++B; xn----ymb080hun11i.xn--9hb; [B1 V3 V6]; [B1 V3 V6] # -ςႼ.١ ++T; -񅂏ςⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςⴜ.١ ++N; -񅂏ςⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ςⴜ.١ ++B; -񅂏ΣႼ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -σႼ.١ ++B; -񅂏σⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -σⴜ.١ ++B; -񅂏Σⴜ.\u0661; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -σⴜ.١ ++T; \u17CA.\u200D𝟮𑀿; [C2 V5]; [V5] # ៊.2𑀿 ++N; \u17CA.\u200D𝟮𑀿; [C2 V5]; [C2 V5] # ៊.2𑀿 ++T; \u17CA.\u200D2𑀿; [C2 V5]; [V5] # ៊.2𑀿 ++N; \u17CA.\u200D2𑀿; [C2 V5]; [C2 V5] # ៊.2𑀿 ++B; xn--m4e.xn--2-ku7i; [V5]; [V5] # ៊.2𑀿 ++B; xn--m4e.xn--2-tgnv469h; [C2 V5]; [C2 V5] # ៊.2𑀿 ++B; ≯𝟖。\u1A60𐫓򟇑; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≯8.᩠𐫓 ++B; >\u0338𝟖。\u1A60𐫓򟇑; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≯8.᩠𐫓 ++B; ≯8。\u1A60𐫓򟇑; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≯8.᩠𐫓 ++B; >\u03388。\u1A60𐫓򟇑; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≯8.᩠𐫓 ++B; xn--8-ogo.xn--jof5303iv1z5d; [B1 V5 V6]; [B1 V5 V6] # ≯8.᩠𐫓 ++T; 𑲫Ↄ\u0664。\u200C; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𑲫Ↄ٤. ++N; 𑲫Ↄ\u0664。\u200C; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𑲫Ↄ٤. ++T; 𑲫Ↄ\u0664。\u200C; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # 𑲫Ↄ٤. ++N; 𑲫Ↄ\u0664。\u200C; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # 𑲫Ↄ٤. ++T; 𑲫ↄ\u0664。\u200C; [B1 C1 V5]; [B1 V5] # 𑲫ↄ٤. ++N; 𑲫ↄ\u0664。\u200C; [B1 C1 V5]; [B1 C1 V5] # 𑲫ↄ٤. ++B; xn--dib100l8x1p.; [B1 V5]; [B1 V5] # 𑲫ↄ٤. ++B; xn--dib100l8x1p.xn--0ug; [B1 C1 V5]; [B1 C1 V5] # 𑲫ↄ٤. ++B; xn--dib999kcy1p.; [B1 V5 V6]; [B1 V5 V6] # 𑲫Ↄ٤. ++B; xn--dib999kcy1p.xn--0ug; [B1 C1 V5 V6]; [B1 C1 V5 V6] # 𑲫Ↄ٤. ++T; 𑲫ↄ\u0664。\u200C; [B1 C1 V5]; [B1 V5] # 𑲫ↄ٤. ++N; 𑲫ↄ\u0664。\u200C; [B1 C1 V5]; [B1 C1 V5] # 𑲫ↄ٤. ++T; \u0C00𝟵\u200D\uFC9D.\u200D\u0750⒈; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ఀ9بح.ݐ⒈ ++N; \u0C00𝟵\u200D\uFC9D.\u200D\u0750⒈; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ఀ9بح.ݐ⒈ ++T; \u0C009\u200D\u0628\u062D.\u200D\u07501.; [B1 C2 V5]; [B1 V5] # ఀ9بح.ݐ1. ++N; \u0C009\u200D\u0628\u062D.\u200D\u07501.; [B1 C2 V5]; [B1 C2 V5] # ఀ9بح.ݐ1. ++B; xn--9-1mcp570d.xn--1-x3c.; [B1 V5]; [B1 V5] # ఀ9بح.ݐ1. ++B; xn--9-1mcp570dl51a.xn--1-x3c211q.; [B1 C2 V5]; [B1 C2 V5] # ఀ9بح.ݐ1. ++B; xn--9-1mcp570d.xn--3ob470m; [B1 V5 V6]; [B1 V5 V6] # ఀ9بح.ݐ⒈ ++B; xn--9-1mcp570dl51a.xn--3ob977jmfd; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ఀ9بح.ݐ⒈ ++T; \uAAF6。嬶ß葽; [V5]; [V5] # ꫶.嬶ß葽 ++N; \uAAF6。嬶ß葽; [V5]; [V5] # ꫶.嬶ß葽 ++B; \uAAF6。嬶SS葽; [V5]; [V5] # ꫶.嬶ss葽 ++B; \uAAF6。嬶ss葽; [V5]; [V5] # ꫶.嬶ss葽 ++B; \uAAF6。嬶Ss葽; [V5]; [V5] # ꫶.嬶ss葽 ++B; xn--2v9a.xn--ss-q40dp97m; [V5]; [V5] # ꫶.嬶ss葽 ++B; xn--2v9a.xn--zca7637b14za; [V5]; [V5] # ꫶.嬶ß葽 ++B; 𑚶⒈。񞻡𐹺; [B5 B6 P1 V5 V6]; [B5 B6 P1 V5 V6] ++B; 𑚶1.。񞻡𐹺; [B5 B6 P1 V5 V6 A4_2]; [B5 B6 P1 V5 V6 A4_2] ++B; xn--1-3j0j..xn--yo0d5914s; [B5 B6 V5 V6 A4_2]; [B5 B6 V5 V6 A4_2] ++B; xn--tshz969f.xn--yo0d5914s; [B5 B6 V5 V6]; [B5 B6 V5 V6] ++B; 𑜤︒≮.񚕽\u05D8𞾩; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𑜤︒≮.ט ++B; 𑜤︒<\u0338.񚕽\u05D8𞾩; [B1 B5 B6 P1 V5 V6]; [B1 B5 B6 P1 V5 V6] # 𑜤︒≮.ט ++B; 𑜤。≮.񚕽\u05D8𞾩; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # 𑜤.≮.ט ++B; 𑜤。<\u0338.񚕽\u05D8𞾩; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # 𑜤.≮.ט ++B; xn--ci2d.xn--gdh.xn--deb0091w5q9u; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # 𑜤.≮.ט ++B; xn--gdh5267fdzpa.xn--deb0091w5q9u; [B1 B5 B6 V5 V6]; [B1 B5 B6 V5 V6] # 𑜤︒≮.ט ++T; 󠆋\u0603񏦤.⇁ς򏋈򺇥; [B1 P1 V6]; [B1 P1 V6] # .⇁ς ++N; 󠆋\u0603񏦤.⇁ς򏋈򺇥; [B1 P1 V6]; [B1 P1 V6] # .⇁ς ++B; 󠆋\u0603񏦤.⇁Σ򏋈򺇥; [B1 P1 V6]; [B1 P1 V6] # .⇁σ ++B; 󠆋\u0603񏦤.⇁σ򏋈򺇥; [B1 P1 V6]; [B1 P1 V6] # .⇁σ ++B; xn--lfb04106d.xn--4xa964mxv16m8moq; [B1 V6]; [B1 V6] # .⇁σ ++B; xn--lfb04106d.xn--3xa174mxv16m8moq; [B1 V6]; [B1 V6] # .⇁ς ++T; ς𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [P1 V6] # ς𑐽𑜫.𐫄 ++N; ς𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [C1 P1 V6] # ς𑐽𑜫.𐫄 ++T; ς𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [P1 V6] # ς𑐽𑜫.𐫄 ++N; ς𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [C1 P1 V6] # ς𑐽𑜫.𐫄 ++T; Σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [P1 V6] # σ𑐽𑜫.𐫄 ++N; Σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [C1 P1 V6] # σ𑐽𑜫.𐫄 ++T; σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [P1 V6] # σ𑐽𑜫.𐫄 ++N; σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [C1 P1 V6] # σ𑐽𑜫.𐫄 ++B; xn--4xa2260lk3b8z15g.xn--tw9ct349a; [V6]; [V6] ++B; xn--4xa2260lk3b8z15g.xn--0ug4653g2xzf; [C1 V6]; [C1 V6] # σ𑐽𑜫.𐫄 ++B; xn--3xa4260lk3b8z15g.xn--0ug4653g2xzf; [C1 V6]; [C1 V6] # ς𑐽𑜫.𐫄 ++T; Σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [P1 V6] # σ𑐽𑜫.𐫄 ++N; Σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [C1 P1 V6] # σ𑐽𑜫.𐫄 ++T; σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [P1 V6] # σ𑐽𑜫.𐫄 ++N; σ𑐽𵢈𑜫。𞬩\u200C𐫄; [C1 P1 V6]; [C1 P1 V6] # σ𑐽𑜫.𐫄 ++B; -򵏽。-\uFC4C\u075B; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.-نحݛ ++B; -򵏽。-\u0646\u062D\u075B; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -.-نحݛ ++B; xn----o452j.xn----cnc8e38c; [B1 V3 V6]; [B1 V3 V6] # -.-نحݛ ++T; ⺢򇺅𝟤。\u200D🚷; [C2 P1 V6]; [P1 V6] # ⺢2.🚷 ++N; ⺢򇺅𝟤。\u200D🚷; [C2 P1 V6]; [C2 P1 V6] # ⺢2.🚷 ++T; ⺢򇺅2。\u200D🚷; [C2 P1 V6]; [P1 V6] # ⺢2.🚷 ++N; ⺢򇺅2。\u200D🚷; [C2 P1 V6]; [C2 P1 V6] # ⺢2.🚷 ++B; xn--2-4jtr4282f.xn--m78h; [V6]; [V6] ++B; xn--2-4jtr4282f.xn--1ugz946p; [C2 V6]; [C2 V6] # ⺢2.🚷 ++T; \u0CF8\u200D\u2DFE𐹲。򤐶; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # ⷾ𐹲. ++N; \u0CF8\u200D\u2DFE𐹲。򤐶; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # ⷾ𐹲. ++T; \u0CF8\u200D\u2DFE𐹲。򤐶; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # ⷾ𐹲. ++N; \u0CF8\u200D\u2DFE𐹲。򤐶; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # ⷾ𐹲. ++B; xn--hvc220of37m.xn--3e36c; [B5 B6 V6]; [B5 B6 V6] # ⷾ𐹲. ++B; xn--hvc488g69j402t.xn--3e36c; [B5 B6 C2 V6]; [B5 B6 C2 V6] # ⷾ𐹲. ++B; 𐹢.Ⴍ₉⁸; [B1 P1 V6]; [B1 P1 V6] ++B; 𐹢.Ⴍ98; [B1 P1 V6]; [B1 P1 V6] ++B; 𐹢.ⴍ98; [B1]; [B1] ++B; xn--9n0d.xn--98-u61a; [B1]; [B1] ++B; xn--9n0d.xn--98-7ek; [B1 V6]; [B1 V6] ++B; 𐹢.ⴍ₉⁸; [B1]; [B1] ++T; \u200C\u034F。ß\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ß⒚≯ ++N; \u200C\u034F。ß\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ß⒚≯ ++T; \u200C\u034F。ß\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ß⒚≯ ++N; \u200C\u034F。ß\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ß⒚≯ ++T; \u200C\u034F。ß\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ß19.≯ ++N; \u200C\u034F。ß\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ß19.≯ ++T; \u200C\u034F。ß\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ß19.≯ ++N; \u200C\u034F。ß\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ß19.≯ ++T; \u200C\u034F。SS\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ss19.≯ ++N; \u200C\u034F。SS\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ss19.≯ ++T; \u200C\u034F。SS\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ss19.≯ ++N; \u200C\u034F。SS\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ss19.≯ ++T; \u200C\u034F。ss\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ss19.≯ ++N; \u200C\u034F。ss\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ss19.≯ ++T; \u200C\u034F。ss\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ss19.≯ ++N; \u200C\u034F。ss\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ss19.≯ ++T; \u200C\u034F。Ss\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ss19.≯ ++N; \u200C\u034F。Ss\u08E219.>\u0338; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ss19.≯ ++T; \u200C\u034F。Ss\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 P1 V6 A4_2] # .ss19.≯ ++N; \u200C\u034F。Ss\u08E219.≯; [B1 B5 C1 P1 V6]; [B1 B5 C1 P1 V6] # .ss19.≯ ++B; .xn--ss19-w0i.xn--hdh; [B1 B5 V6 A4_2]; [B1 B5 V6 A4_2] # .ss19.≯ ++B; xn--0ug.xn--ss19-w0i.xn--hdh; [B1 B5 C1 V6]; [B1 B5 C1 V6] # .ss19.≯ ++B; xn--0ug.xn--19-fia813f.xn--hdh; [B1 B5 C1 V6]; [B1 B5 C1 V6] # .ß19.≯ ++T; \u200C\u034F。SS\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ss⒚≯ ++N; \u200C\u034F。SS\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ss⒚≯ ++T; \u200C\u034F。SS\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ss⒚≯ ++N; \u200C\u034F。SS\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ss⒚≯ ++T; \u200C\u034F。ss\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ss⒚≯ ++N; \u200C\u034F。ss\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ss⒚≯ ++T; \u200C\u034F。ss\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ss⒚≯ ++N; \u200C\u034F。ss\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ss⒚≯ ++T; \u200C\u034F。Ss\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ss⒚≯ ++N; \u200C\u034F。Ss\u08E2⒚>\u0338; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ss⒚≯ ++T; \u200C\u034F。Ss\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6 A4_2] # .ss⒚≯ ++N; \u200C\u034F。Ss\u08E2⒚≯; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # .ss⒚≯ ++B; .xn--ss-9if872xjjc; [B5 B6 V6 A4_2]; [B5 B6 V6 A4_2] # .ss⒚≯ ++B; xn--0ug.xn--ss-9if872xjjc; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # .ss⒚≯ ++B; xn--0ug.xn--zca612bx9vo5b; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # .ß⒚≯ ++T; \u200C𞥍ᡌ.𣃔; [B1 C1 P1 V6]; [B2 B3 P1 V6] # ᡌ.𣃔 ++N; \u200C𞥍ᡌ.𣃔; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ᡌ.𣃔 ++T; \u200C𞥍ᡌ.𣃔; [B1 C1 P1 V6]; [B2 B3 P1 V6] # ᡌ.𣃔 ++N; \u200C𞥍ᡌ.𣃔; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ᡌ.𣃔 ++B; xn--c8e5919u.xn--od1j; [B2 B3 V6]; [B2 B3 V6] ++B; xn--c8e180bqz13b.xn--od1j; [B1 C1 V6]; [B1 C1 V6] # ᡌ.𣃔 ++B; \u07D0򜬝-񡢬。\u0FA0Ⴛ𞷏𝆬; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ߐ-.ྠႻ𝆬 ++B; \u07D0򜬝-񡢬。\u0FA0ⴛ𞷏𝆬; [B1 B2 B3 P1 V5 V6]; [B1 B2 B3 P1 V5 V6] # ߐ-.ྠⴛ𝆬 ++B; xn----8bd11730jefvw.xn--wfd802mpm20agsxa; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # ߐ-.ྠⴛ𝆬 ++B; xn----8bd11730jefvw.xn--wfd08cd265hgsxa; [B1 B2 B3 V5 V6]; [B1 B2 B3 V5 V6] # ߐ-.ྠႻ𝆬 ++B; 𝨥。⫟𑈾; [V5]; [V5] ++B; xn--n82h.xn--63iw010f; [V5]; [V5] ++T; ⾛\u0753.Ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # 走ݓ.Ⴕ𞠬 ++N; ⾛\u0753.Ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # 走ݓ.Ⴕ𞠬 ++T; 走\u0753.Ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # 走ݓ.Ⴕ𞠬 ++N; 走\u0753.Ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # 走ݓ.Ⴕ𞠬 ++T; 走\u0753.ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # 走ݓ.ⴕ𞠬 ++N; 走\u0753.ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # 走ݓ.ⴕ𞠬 ++B; xn--6ob9779d.xn--mfb511rxu80a; [B5 B6 V6]; [B5 B6 V6] # 走ݓ.ⴕ𞠬 ++B; xn--6ob9779d.xn--mfb444k5gjt754b; [B5 B6 C2 V6]; [B5 B6 C2 V6] # 走ݓ.ⴕ𞠬 ++B; xn--6ob9779d.xn--mfb785ck569a; [B5 B6 V6]; [B5 B6 V6] # 走ݓ.Ⴕ𞠬 ++B; xn--6ob9779d.xn--mfb785czmm0y85b; [B5 B6 C2 V6]; [B5 B6 C2 V6] # 走ݓ.Ⴕ𞠬 ++T; ⾛\u0753.ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # 走ݓ.ⴕ𞠬 ++N; ⾛\u0753.ⴕ𞠬\u0604\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # 走ݓ.ⴕ𞠬 ++T; -ᢗ\u200C🄄.𑜢; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # -ᢗ🄄.𑜢 ++N; -ᢗ\u200C🄄.𑜢; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # -ᢗ🄄.𑜢 ++T; -ᢗ\u200C3,.𑜢; [C1 P1 V3 V5 V6]; [P1 V3 V5 V6] # -ᢗ3,.𑜢 ++N; -ᢗ\u200C3,.𑜢; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # -ᢗ3,.𑜢 ++B; xn---3,-3eu.xn--9h2d; [P1 V3 V5 V6]; [P1 V3 V5 V6] ++B; xn---3,-3eu051c.xn--9h2d; [C1 P1 V3 V5 V6]; [C1 P1 V3 V5 V6] # -ᢗ3,.𑜢 ++B; xn----pck1820x.xn--9h2d; [V3 V5 V6]; [V3 V5 V6] ++B; xn----pck312bx563c.xn--9h2d; [C1 V3 V5 V6]; [C1 V3 V5 V6] # -ᢗ🄄.𑜢 ++T; ≠𐸁𹏁\u200C.Ⴚ򳄠; [B1 C1 P1 V6]; [B1 P1 V6] # ≠.Ⴚ ++N; ≠𐸁𹏁\u200C.Ⴚ򳄠; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ≠.Ⴚ ++T; =\u0338𐸁𹏁\u200C.Ⴚ򳄠; [B1 C1 P1 V6]; [B1 P1 V6] # ≠.Ⴚ ++N; =\u0338𐸁𹏁\u200C.Ⴚ򳄠; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ≠.Ⴚ ++T; =\u0338𐸁𹏁\u200C.ⴚ򳄠; [B1 C1 P1 V6]; [B1 P1 V6] # ≠.ⴚ ++N; =\u0338𐸁𹏁\u200C.ⴚ򳄠; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ≠.ⴚ ++T; ≠𐸁𹏁\u200C.ⴚ򳄠; [B1 C1 P1 V6]; [B1 P1 V6] # ≠.ⴚ ++N; ≠𐸁𹏁\u200C.ⴚ򳄠; [B1 C1 P1 V6]; [B1 C1 P1 V6] # ≠.ⴚ ++B; xn--1ch2293gv3nr.xn--ilj23531g; [B1 V6]; [B1 V6] ++B; xn--0ug83gn618a21ov.xn--ilj23531g; [B1 C1 V6]; [B1 C1 V6] # ≠.ⴚ ++B; xn--1ch2293gv3nr.xn--ynd49496l; [B1 V6]; [B1 V6] ++B; xn--0ug83gn618a21ov.xn--ynd49496l; [B1 C1 V6]; [B1 C1 V6] # ≠.Ⴚ ++B; \u0669。󠇀𑇊; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ٩.𑇊 ++B; \u0669。󠇀𑇊; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ٩.𑇊 ++B; xn--iib.xn--6d1d; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ٩.𑇊 ++B; \u1086𞶀≯⒍。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ႆ≯⒍.- ++B; \u1086𞶀>\u0338⒍。-; [B1 P1 V3 V5 V6]; [B1 P1 V3 V5 V6] # ႆ≯⒍.- ++B; \u1086𞶀≯6.。-; [B1 P1 V3 V5 V6 A4_2]; [B1 P1 V3 V5 V6 A4_2] # ႆ≯6..- ++B; \u1086𞶀>\u03386.。-; [B1 P1 V3 V5 V6 A4_2]; [B1 P1 V3 V5 V6 A4_2] # ႆ≯6..- ++B; xn--6-oyg968k7h74b..-; [B1 V3 V5 V6 A4_2]; [B1 V3 V5 V6 A4_2] # ႆ≯6..- ++B; xn--hmd482gqqb8730g.-; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ႆ≯⒍.- ++B; \u17B4.쮇-; [P1 V3 V5 V6]; [P1 V3 V5 V6] # .쮇- ++B; \u17B4.쮇-; [P1 V3 V5 V6]; [P1 V3 V5 V6] # .쮇- ++B; xn--z3e.xn----938f; [V3 V5 V6]; [V3 V5 V6] # .쮇- ++T; \u200C𑓂。⒈-􀪛; [C1 P1 V6]; [P1 V5 V6] # 𑓂.⒈- ++N; \u200C𑓂。⒈-􀪛; [C1 P1 V6]; [C1 P1 V6] # 𑓂.⒈- ++T; \u200C𑓂。1.-􀪛; [C1 P1 V3 V6]; [P1 V3 V5 V6] # 𑓂.1.- ++N; \u200C𑓂。1.-􀪛; [C1 P1 V3 V6]; [C1 P1 V3 V6] # 𑓂.1.- ++B; xn--wz1d.1.xn----rg03o; [V3 V5 V6]; [V3 V5 V6] ++B; xn--0ugy057g.1.xn----rg03o; [C1 V3 V6]; [C1 V3 V6] # 𑓂.1.- ++B; xn--wz1d.xn----dcp29674o; [V5 V6]; [V5 V6] ++B; xn--0ugy057g.xn----dcp29674o; [C1 V6]; [C1 V6] # 𑓂.⒈- ++T; ⒈\uFEAE\u200C。\u20E9🖞\u200C𖬴; [B1 C1 P1 V5 V6]; [B1 P1 V5 V6] # ⒈ر.⃩🖞𖬴 ++N; ⒈\uFEAE\u200C。\u20E9🖞\u200C𖬴; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # ⒈ر.⃩🖞𖬴 ++T; 1.\u0631\u200C。\u20E9🖞\u200C𖬴; [B1 B3 C1 V5]; [B1 V5] # 1.ر.⃩🖞𖬴 ++N; 1.\u0631\u200C。\u20E9🖞\u200C𖬴; [B1 B3 C1 V5]; [B1 B3 C1 V5] # 1.ر.⃩🖞𖬴 ++B; 1.xn--wgb.xn--c1g6021kg18c; [B1 V5]; [B1 V5] # 1.ر.⃩🖞𖬴 ++B; 1.xn--wgb253k.xn--0ugz6a8040fty5d; [B1 B3 C1 V5]; [B1 B3 C1 V5] # 1.ر.⃩🖞𖬴 ++B; xn--wgb746m.xn--c1g6021kg18c; [B1 V5 V6]; [B1 V5 V6] # ⒈ر.⃩🖞𖬴 ++B; xn--wgb253kmfd.xn--0ugz6a8040fty5d; [B1 C1 V5 V6]; [B1 C1 V5 V6] # ⒈ر.⃩🖞𖬴 ++B; 󌭇。𝟐\u1BA8\u07D4; [B1 P1 V6]; [B1 P1 V6] # .2ᮨߔ ++B; 󌭇。2\u1BA8\u07D4; [B1 P1 V6]; [B1 P1 V6] # .2ᮨߔ ++B; xn--xm89d.xn--2-icd143m; [B1 V6]; [B1 V6] # .2ᮨߔ ++T; \uFD8F򫳺.ς\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # مخم.ς𐹷 ++N; \uFD8F򫳺.ς\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # مخم.ς𐹷 ++T; \u0645\u062E\u0645򫳺.ς\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # مخم.ς𐹷 ++N; \u0645\u062E\u0645򫳺.ς\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # مخم.ς𐹷 ++T; \u0645\u062E\u0645򫳺.Σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # مخم.σ𐹷 ++N; \u0645\u062E\u0645򫳺.Σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # مخم.σ𐹷 ++T; \u0645\u062E\u0645򫳺.σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # مخم.σ𐹷 ++N; \u0645\u062E\u0645򫳺.σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # مخم.σ𐹷 ++B; xn--tgb9bb64691z.xn--4xa6667k; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] # مخم.σ𐹷 ++B; xn--tgb9bb64691z.xn--4xa895lrp7n; [B2 B3 B5 B6 C2 V6]; [B2 B3 B5 B6 C2 V6] # مخم.σ𐹷 ++B; xn--tgb9bb64691z.xn--3xa006lrp7n; [B2 B3 B5 B6 C2 V6]; [B2 B3 B5 B6 C2 V6] # مخم.ς𐹷 ++T; \uFD8F򫳺.Σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # مخم.σ𐹷 ++N; \uFD8F򫳺.Σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # مخم.σ𐹷 ++T; \uFD8F򫳺.σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # مخم.σ𐹷 ++N; \uFD8F򫳺.σ\u200D𐹷; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # مخم.σ𐹷 ++B; ⒎\u06C1\u0605。\uAAF6۵𐇽; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ⒎ہ.꫶۵𐇽 ++B; 7.\u06C1\u0605。\uAAF6۵𐇽; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 7.ہ.꫶۵𐇽 ++B; 7.xn--nfb98a.xn--imb3805fxt8b; [B1 V5 V6]; [B1 V5 V6] # 7.ہ.꫶۵𐇽 ++B; xn--nfb98ai25e.xn--imb3805fxt8b; [B1 V5 V6]; [B1 V5 V6] # ⒎ہ.꫶۵𐇽 ++B; -ᡥ᠆󍲭。\u0605\u1A5D𐹡; [B1 P1 V3 V6]; [B1 P1 V3 V6] # -ᡥ᠆.ᩝ𐹡 ++B; xn----f3j6s87156i.xn--nfb035hoo2p; [B1 V3 V6]; [B1 V3 V6] # -ᡥ᠆.ᩝ𐹡 ++T; \u200D.\u06BD\u0663\u0596; [B1 C2]; [A4_2] # .ڽ٣֖ ++N; \u200D.\u06BD\u0663\u0596; [B1 C2]; [B1 C2] # .ڽ٣֖ ++B; .xn--hcb32bni; [A4_2]; [A4_2] # .ڽ٣֖ ++B; xn--1ug.xn--hcb32bni; [B1 C2]; [B1 C2] # .ڽ٣֖ ++B; xn--hcb32bni; \u06BD\u0663\u0596; xn--hcb32bni # ڽ٣֖ ++B; \u06BD\u0663\u0596; ; xn--hcb32bni # ڽ٣֖ ++T; 㒧۱.Ⴚ\u0678\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # 㒧۱.Ⴚيٴ ++N; 㒧۱.Ⴚ\u0678\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # 㒧۱.Ⴚيٴ ++T; 㒧۱.Ⴚ\u064A\u0674\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # 㒧۱.Ⴚيٴ ++N; 㒧۱.Ⴚ\u064A\u0674\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # 㒧۱.Ⴚيٴ ++T; 㒧۱.ⴚ\u064A\u0674\u200D; [B5 B6 C2]; [B5 B6] # 㒧۱.ⴚيٴ ++N; 㒧۱.ⴚ\u064A\u0674\u200D; [B5 B6 C2]; [B5 B6 C2] # 㒧۱.ⴚيٴ ++B; xn--emb715u.xn--mhb8fy26k; [B5 B6]; [B5 B6] # 㒧۱.ⴚيٴ ++B; xn--emb715u.xn--mhb8f960g03l; [B5 B6 C2]; [B5 B6 C2] # 㒧۱.ⴚيٴ ++B; xn--emb715u.xn--mhb8f817a; [B5 B6 V6]; [B5 B6 V6] # 㒧۱.Ⴚيٴ ++B; xn--emb715u.xn--mhb8f817ao2p; [B5 B6 C2 V6]; [B5 B6 C2 V6] # 㒧۱.Ⴚيٴ ++T; 㒧۱.ⴚ\u0678\u200D; [B5 B6 C2]; [B5 B6] # 㒧۱.ⴚيٴ ++N; 㒧۱.ⴚ\u0678\u200D; [B5 B6 C2]; [B5 B6 C2] # 㒧۱.ⴚيٴ ++B; \u0F94ꡋ-.-𖬴; [V3 V5]; [V3 V5] # ྔꡋ-.-𖬴 ++B; \u0F94ꡋ-.-𖬴; [V3 V5]; [V3 V5] # ྔꡋ-.-𖬴 ++B; xn----ukg9938i.xn----4u5m; [V3 V5]; [V3 V5] # ྔꡋ-.-𖬴 ++T; 񿒳-⋢\u200C.标-; [C1 P1 V3 V6]; [P1 V3 V6] # -⋢.标- ++N; 񿒳-⋢\u200C.标-; [C1 P1 V3 V6]; [C1 P1 V3 V6] # -⋢.标- ++T; 񿒳-⊑\u0338\u200C.标-; [C1 P1 V3 V6]; [P1 V3 V6] # -⋢.标- ++N; 񿒳-⊑\u0338\u200C.标-; [C1 P1 V3 V6]; [C1 P1 V3 V6] # -⋢.标- ++T; 񿒳-⋢\u200C.标-; [C1 P1 V3 V6]; [P1 V3 V6] # -⋢.标- ++N; 񿒳-⋢\u200C.标-; [C1 P1 V3 V6]; [C1 P1 V3 V6] # -⋢.标- ++T; 񿒳-⊑\u0338\u200C.标-; [C1 P1 V3 V6]; [P1 V3 V6] # -⋢.标- ++N; 񿒳-⊑\u0338\u200C.标-; [C1 P1 V3 V6]; [C1 P1 V3 V6] # -⋢.标- ++B; xn----9mo67451g.xn----qj7b; [V3 V6]; [V3 V6] ++B; xn----sgn90kn5663a.xn----qj7b; [C1 V3 V6]; [C1 V3 V6] # -⋢.标- ++T; \u0671.ς\u07DC; [B5 B6]; [B5 B6] # ٱ.ςߜ ++N; \u0671.ς\u07DC; [B5 B6]; [B5 B6] # ٱ.ςߜ ++T; \u0671.ς\u07DC; [B5 B6]; [B5 B6] # ٱ.ςߜ ++N; \u0671.ς\u07DC; [B5 B6]; [B5 B6] # ٱ.ςߜ ++B; \u0671.Σ\u07DC; [B5 B6]; [B5 B6] # ٱ.σߜ ++B; \u0671.σ\u07DC; [B5 B6]; [B5 B6] # ٱ.σߜ ++B; xn--qib.xn--4xa21s; [B5 B6]; [B5 B6] # ٱ.σߜ ++B; xn--qib.xn--3xa41s; [B5 B6]; [B5 B6] # ٱ.ςߜ ++B; \u0671.Σ\u07DC; [B5 B6]; [B5 B6] # ٱ.σߜ ++B; \u0671.σ\u07DC; [B5 B6]; [B5 B6] # ٱ.σߜ ++T; 񼈶\u0605.\u08C1\u200D𑑂𱼱; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # .𑑂 ++N; 񼈶\u0605.\u08C1\u200D𑑂𱼱; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # .𑑂 ++T; 񼈶\u0605.\u08C1\u200D𑑂𱼱; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 P1 V6] # .𑑂 ++N; 񼈶\u0605.\u08C1\u200D𑑂𱼱; [B2 B3 B5 B6 C2 P1 V6]; [B2 B3 B5 B6 C2 P1 V6] # .𑑂 ++B; xn--nfb17942h.xn--nzb6708kx3pn; [B2 B3 B5 B6 V6]; [B2 B3 B5 B6 V6] # .𑑂 ++B; xn--nfb17942h.xn--nzb240jv06otevq; [B2 B3 B5 B6 C2 V6]; [B2 B3 B5 B6 C2 V6] # .𑑂 ++B; 𐹾𐋩𞵜。\u1BF2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𐹾𐋩.᯲ ++B; 𐹾𐋩𞵜。\u1BF2; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𐹾𐋩.᯲ ++B; xn--d97cn8rn44p.xn--0zf; [B1 V5 V6]; [B1 V5 V6] # 𐹾𐋩.᯲ ++T; 6\u1160\u1C33󠸧.򟜊锰\u072Cς; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 6ᰳ.锰ܬς ++N; 6\u1160\u1C33󠸧.򟜊锰\u072Cς; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 6ᰳ.锰ܬς ++B; 6\u1160\u1C33󠸧.򟜊锰\u072CΣ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 6ᰳ.锰ܬσ ++B; 6\u1160\u1C33󠸧.򟜊锰\u072Cσ; [B1 B5 P1 V6]; [B1 B5 P1 V6] # 6ᰳ.锰ܬσ ++B; xn--6-5bh476ewr517a.xn--4xa95ohw6pk078g; [B1 B5 V6]; [B1 B5 V6] # 6ᰳ.锰ܬσ ++B; xn--6-5bh476ewr517a.xn--3xa16ohw6pk078g; [B1 B5 V6]; [B1 B5 V6] # 6ᰳ.锰ܬς ++B; \u06B3\uFE04񅎦𝟽。𐹽; [B1 B2 P1 V6]; [B1 B2 P1 V6] # ڳ7.𐹽 ++B; \u06B3\uFE04񅎦7。𐹽; [B1 B2 P1 V6]; [B1 B2 P1 V6] # ڳ7.𐹽 ++B; xn--7-yuc34665f.xn--1o0d; [B1 B2 V6]; [B1 B2 V6] # ڳ7.𐹽 ++T; 𞮧.\u200C⫞; [B1 C1 P1 V6]; [B1 P1 V6] # .⫞ ++N; 𞮧.\u200C⫞; [B1 C1 P1 V6]; [B1 C1 P1 V6] # .⫞ ++T; 𞮧.\u200C⫞; [B1 C1 P1 V6]; [B1 P1 V6] # .⫞ ++N; 𞮧.\u200C⫞; [B1 C1 P1 V6]; [B1 C1 P1 V6] # .⫞ ++B; xn--pw6h.xn--53i; [B1 V6]; [B1 V6] ++B; xn--pw6h.xn--0ug283b; [B1 C1 V6]; [B1 C1 V6] # .⫞ ++B; -񕉴.\u06E0ᢚ-; [P1 V3 V5 V6]; [P1 V3 V5 V6] # -.۠ᢚ- ++B; xn----qi38c.xn----jxc827k; [V3 V5 V6]; [V3 V5 V6] # -.۠ᢚ- ++T; ⌁\u200D𑄴.\u200C𝟩\u066C; [B1 C1 C2]; [B1] # ⌁𑄴.7٬ ++N; ⌁\u200D𑄴.\u200C𝟩\u066C; [B1 C1 C2]; [B1 C1 C2] # ⌁𑄴.7٬ ++T; ⌁\u200D𑄴.\u200C7\u066C; [B1 C1 C2]; [B1] # ⌁𑄴.7٬ ++N; ⌁\u200D𑄴.\u200C7\u066C; [B1 C1 C2]; [B1 C1 C2] # ⌁𑄴.7٬ ++B; xn--nhh5394g.xn--7-xqc; [B1]; [B1] # ⌁𑄴.7٬ ++B; xn--1ug38i2093a.xn--7-xqc297q; [B1 C1 C2]; [B1 C1 C2] # ⌁𑄴.7٬ ++B; ︒\uFD05\u0E37\uFEFC。岓\u1BF2󠾃ᡂ; [B1 P1 V6]; [B1 P1 V6] # ︒صىืلا.岓᯲ᡂ ++B; 。\u0635\u0649\u0E37\u0644\u0627。岓\u1BF2󠾃ᡂ; [P1 V6 A4_2]; [P1 V6 A4_2] # .صىืلا.岓᯲ᡂ ++B; .xn--mgb1a7bt462h.xn--17e10qe61f9r71s; [V6 A4_2]; [V6 A4_2] # .صىืلا.岓᯲ᡂ ++B; xn--mgb1a7bt462hf267a.xn--17e10qe61f9r71s; [B1 V6]; [B1 V6] # ︒صىืلا.岓᯲ᡂ ++B; 𐹨。8𑁆; [B1]; [B1] ++B; xn--go0d.xn--8-yu7i; [B1]; [B1] ++B; 𞀕\u0D43.ꡚ\u08FA𐹰\u0D44; [B1 B3 B5 B6 V5]; [B1 B3 B5 B6 V5] # 𞀕ൃ.ꡚࣺ𐹰ൄ ++B; 𞀕\u0D43.ꡚ\u08FA𐹰\u0D44; [B1 B3 B5 B6 V5]; [B1 B3 B5 B6 V5] # 𞀕ൃ.ꡚࣺ𐹰ൄ ++B; xn--mxc5210v.xn--90b01t8u2p1ltd; [B1 B3 B5 B6 V5]; [B1 B3 B5 B6 V5] # 𞀕ൃ.ꡚࣺ𐹰ൄ ++B; 󆩏𐦹\u0303。󠍅; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ̃. ++B; 󆩏𐦹\u0303。󠍅; [B1 B5 B6 P1 V6]; [B1 B5 B6 P1 V6] # ̃. ++B; xn--nsa1265kp9z9e.xn--xt36e; [B1 B5 B6 V6]; [B1 B5 B6 V6] # ̃. ++B; ᢌ.-\u085A; [V3]; [V3] # ᢌ.-࡚ ++B; ᢌ.-\u085A; [V3]; [V3] # ᢌ.-࡚ ++B; xn--59e.xn----5jd; [V3]; [V3] # ᢌ.-࡚ ++B; 𥛛𑘶。𐹬𐲸\u0BCD; [B1 P1 V6]; [B1 P1 V6] # 𥛛𑘶.𐹬் ++B; 𥛛𑘶。𐹬𐲸\u0BCD; [B1 P1 V6]; [B1 P1 V6] # 𥛛𑘶.𐹬் ++B; xn--jb2dj685c.xn--xmc5562kmcb; [B1 V6]; [B1 V6] # 𥛛𑘶.𐹬் ++T; Ⴐ\u077F.\u200C; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # Ⴐݿ. ++N; Ⴐ\u077F.\u200C; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # Ⴐݿ. ++T; Ⴐ\u077F.\u200C; [B1 B5 B6 C1 P1 V6]; [B5 B6 P1 V6] # Ⴐݿ. ++N; Ⴐ\u077F.\u200C; [B1 B5 B6 C1 P1 V6]; [B1 B5 B6 C1 P1 V6] # Ⴐݿ. ++T; ⴐ\u077F.\u200C; [B1 B5 B6 C1]; [B5 B6] # ⴐݿ. ++N; ⴐ\u077F.\u200C; [B1 B5 B6 C1]; [B1 B5 B6 C1] # ⴐݿ. ++B; xn--gqb743q.; [B5 B6]; [B5 B6] # ⴐݿ. ++B; xn--gqb743q.xn--0ug; [B1 B5 B6 C1]; [B1 B5 B6 C1] # ⴐݿ. ++B; xn--gqb918b.; [B5 B6 V6]; [B5 B6 V6] # Ⴐݿ. ++B; xn--gqb918b.xn--0ug; [B1 B5 B6 C1 V6]; [B1 B5 B6 C1 V6] # Ⴐݿ. ++T; ⴐ\u077F.\u200C; [B1 B5 B6 C1]; [B5 B6] # ⴐݿ. ++N; ⴐ\u077F.\u200C; [B1 B5 B6 C1]; [B1 B5 B6 C1] # ⴐݿ. ++T; 🄅𑲞-⒈。\u200Dᠩ\u06A5; [B1 C2 P1 V6]; [B1 B5 B6 P1 V6] # 🄅𑲞-⒈.ᠩڥ ++N; 🄅𑲞-⒈。\u200Dᠩ\u06A5; [B1 C2 P1 V6]; [B1 C2 P1 V6] # 🄅𑲞-⒈.ᠩڥ ++T; 4,𑲞-1.。\u200Dᠩ\u06A5; [B1 C2 P1 V6 A4_2]; [B1 B5 B6 P1 V6 A4_2] # 4,𑲞-1..ᠩڥ ++N; 4,𑲞-1.。\u200Dᠩ\u06A5; [B1 C2 P1 V6 A4_2]; [B1 C2 P1 V6 A4_2] # 4,𑲞-1..ᠩڥ ++B; xn--4,-1-w401a..xn--7jb180g; [B1 B5 B6 P1 V6 A4_2]; [B1 B5 B6 P1 V6 A4_2] # 4,𑲞-1..ᠩڥ ++B; xn--4,-1-w401a..xn--7jb180gexf; [B1 C2 P1 V6 A4_2]; [B1 C2 P1 V6 A4_2] # 4,𑲞-1..ᠩڥ ++B; xn----ecp8796hjtvg.xn--7jb180g; [B1 B5 B6 V6]; [B1 B5 B6 V6] # 🄅𑲞-⒈.ᠩڥ ++B; xn----ecp8796hjtvg.xn--7jb180gexf; [B1 C2 V6]; [B1 C2 V6] # 🄅𑲞-⒈.ᠩڥ ++B; 񗀤。𞤪򮿋; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++B; 񗀤。𞤈򮿋; [B2 B3 P1 V6]; [B2 B3 P1 V6] ++B; xn--4240a.xn--ie6h83808a; [B2 B3 V6]; [B2 B3 V6] ++B; \u05C1۲。𐮊\u066C𝨊鄨; [B1 B2 B3 V5]; [B1 B2 B3 V5] # ׁ۲.𐮊٬𝨊鄨 ++B; \u05C1۲。𐮊\u066C𝨊鄨; [B1 B2 B3 V5]; [B1 B2 B3 V5] # ׁ۲.𐮊٬𝨊鄨 ++B; xn--pdb42d.xn--lib6412enztdwv6h; [B1 B2 B3 V5]; [B1 B2 B3 V5] # ׁ۲.𐮊٬𝨊鄨 ++B; 𞭳-ꡁ。\u1A69\u0BCD-; [B1 B2 B3 P1 V3 V5 V6]; [B1 B2 B3 P1 V3 V5 V6] # -ꡁ.ᩩ்- ++B; xn----be4e4276f.xn----lze333i; [B1 B2 B3 V3 V5 V6]; [B1 B2 B3 V3 V5 V6] # -ꡁ.ᩩ்- ++T; \u1039-𚮭🞢.ß; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ß ++N; \u1039-𚮭🞢.ß; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ß ++T; \u1039-𚮭🞢.ß; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ß ++N; \u1039-𚮭🞢.ß; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ß ++B; \u1039-𚮭🞢.SS; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ss ++B; \u1039-𚮭🞢.ss; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ss ++B; \u1039-𚮭🞢.Ss; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ss ++B; xn----9tg11172akr8b.ss; [V5 V6]; [V5 V6] # ္-🞢.ss ++B; xn----9tg11172akr8b.xn--zca; [V5 V6]; [V5 V6] # ္-🞢.ß ++B; \u1039-𚮭🞢.SS; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ss ++B; \u1039-𚮭🞢.ss; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ss ++B; \u1039-𚮭🞢.Ss; [P1 V5 V6]; [P1 V5 V6] # ္-🞢.ss ++T; \uFCF2-\u200C。Ⴟ\u200C␣; [B3 B6 C1 P1 V6]; [B3 B6 P1 V3 V6] # ـَّ-.Ⴟ␣ ++N; \uFCF2-\u200C。Ⴟ\u200C␣; [B3 B6 C1 P1 V6]; [B3 B6 C1 P1 V6] # ـَّ-.Ⴟ␣ ++T; \u0640\u064E\u0651-\u200C。Ⴟ\u200C␣; [B3 B6 C1 P1 V6]; [B3 B6 P1 V3 V6] # ـَّ-.Ⴟ␣ ++N; \u0640\u064E\u0651-\u200C。Ⴟ\u200C␣; [B3 B6 C1 P1 V6]; [B3 B6 C1 P1 V6] # ـَّ-.Ⴟ␣ ++T; \u0640\u064E\u0651-\u200C。ⴟ\u200C␣; [B3 B6 C1]; [B3 B6 V3] # ـَّ-.ⴟ␣ ++N; \u0640\u064E\u0651-\u200C。ⴟ\u200C␣; [B3 B6 C1]; [B3 B6 C1] # ـَّ-.ⴟ␣ ++B; xn----eoc6bm.xn--xph904a; [B3 B6 V3]; [B3 B6 V3] # ـَّ-.ⴟ␣ ++B; xn----eoc6bm0504a.xn--0ug13nd0j; [B3 B6 C1]; [B3 B6 C1] # ـَّ-.ⴟ␣ ++B; xn----eoc6bm.xn--3nd240h; [B3 B6 V3 V6]; [B3 B6 V3 V6] # ـَّ-.Ⴟ␣ ++B; xn----eoc6bm0504a.xn--3nd849e05c; [B3 B6 C1 V6]; [B3 B6 C1 V6] # ـَّ-.Ⴟ␣ ++T; \uFCF2-\u200C。ⴟ\u200C␣; [B3 B6 C1]; [B3 B6 V3] # ـَّ-.ⴟ␣ ++N; \uFCF2-\u200C。ⴟ\u200C␣; [B3 B6 C1]; [B3 B6 C1] # ـَّ-.ⴟ␣ ++T; \u0D4D-\u200D\u200C。񥞧₅≠; [C1 C2 P1 V5 V6]; [P1 V3 V5 V6] # ്-.5≠ ++N; \u0D4D-\u200D\u200C。񥞧₅≠; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # ്-.5≠ ++T; \u0D4D-\u200D\u200C。񥞧₅=\u0338; [C1 C2 P1 V5 V6]; [P1 V3 V5 V6] # ്-.5≠ ++N; \u0D4D-\u200D\u200C。񥞧₅=\u0338; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # ്-.5≠ ++T; \u0D4D-\u200D\u200C。񥞧5≠; [C1 C2 P1 V5 V6]; [P1 V3 V5 V6] # ്-.5≠ ++N; \u0D4D-\u200D\u200C。񥞧5≠; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # ്-.5≠ ++T; \u0D4D-\u200D\u200C。񥞧5=\u0338; [C1 C2 P1 V5 V6]; [P1 V3 V5 V6] # ്-.5≠ ++N; \u0D4D-\u200D\u200C。񥞧5=\u0338; [C1 C2 P1 V5 V6]; [C1 C2 P1 V5 V6] # ്-.5≠ ++B; xn----jmf.xn--5-ufo50192e; [V3 V5 V6]; [V3 V5 V6] # ്-.5≠ ++B; xn----jmf215lda.xn--5-ufo50192e; [C1 C2 V5 V6]; [C1 C2 V5 V6] # ്-.5≠ ++B; 锣。\u0A4D󠘻󠚆; [P1 V5 V6]; [P1 V5 V6] # 锣.੍ ++B; xn--gc5a.xn--ybc83044ppga; [V5 V6]; [V5 V6] # 锣.੍ ++T; \u063D𑈾.\u0649\u200D\uA92B; [B3 C2]; xn--8gb2338k.xn--lhb0154f # ؽ𑈾.ى꤫ ++N; \u063D𑈾.\u0649\u200D\uA92B; [B3 C2]; [B3 C2] # ؽ𑈾.ى꤫ ++T; \u063D𑈾.\u0649\u200D\uA92B; [B3 C2]; xn--8gb2338k.xn--lhb0154f # ؽ𑈾.ى꤫ ++N; \u063D𑈾.\u0649\u200D\uA92B; [B3 C2]; [B3 C2] # ؽ𑈾.ى꤫ ++B; xn--8gb2338k.xn--lhb0154f; \u063D𑈾.\u0649\uA92B; xn--8gb2338k.xn--lhb0154f # ؽ𑈾.ى꤫ ++B; \u063D𑈾.\u0649\uA92B; ; xn--8gb2338k.xn--lhb0154f # ؽ𑈾.ى꤫ ++B; xn--8gb2338k.xn--lhb603k060h; [B3 C2]; [B3 C2] # ؽ𑈾.ى꤫ ++T; \u0666⁴Ⴅ.\u08BD\u200C; [B1 B3 C1 P1 V6]; [B1 P1 V6] # ٦4Ⴅ.ࢽ ++N; \u0666⁴Ⴅ.\u08BD\u200C; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # ٦4Ⴅ.ࢽ ++T; \u06664Ⴅ.\u08BD\u200C; [B1 B3 C1 P1 V6]; [B1 P1 V6] # ٦4Ⴅ.ࢽ ++N; \u06664Ⴅ.\u08BD\u200C; [B1 B3 C1 P1 V6]; [B1 B3 C1 P1 V6] # ٦4Ⴅ.ࢽ ++T; \u06664ⴅ.\u08BD\u200C; [B1 B3 C1]; [B1] # ٦4ⴅ.ࢽ ++N; \u06664ⴅ.\u08BD\u200C; [B1 B3 C1]; [B1 B3 C1] # ٦4ⴅ.ࢽ ++B; xn--4-kqc6770a.xn--jzb; [B1]; [B1] # ٦4ⴅ.ࢽ ++B; xn--4-kqc6770a.xn--jzb840j; [B1 B3 C1]; [B1 B3 C1] # ٦4ⴅ.ࢽ ++B; xn--4-kqc489e.xn--jzb; [B1 V6]; [B1 V6] # ٦4Ⴅ.ࢽ ++B; xn--4-kqc489e.xn--jzb840j; [B1 B3 C1 V6]; [B1 B3 C1 V6] # ٦4Ⴅ.ࢽ ++T; \u0666⁴ⴅ.\u08BD\u200C; [B1 B3 C1]; [B1] # ٦4ⴅ.ࢽ ++N; \u0666⁴ⴅ.\u08BD\u200C; [B1 B3 C1]; [B1 B3 C1] # ٦4ⴅ.ࢽ ++T; ჁႱ6\u0318。ß\u1B03; [P1 V6]; [P1 V6] # ჁႱ6̘.ßᬃ ++N; ჁႱ6\u0318。ß\u1B03; [P1 V6]; [P1 V6] # ჁႱ6̘.ßᬃ ++T; ⴡⴑ6\u0318。ß\u1B03; ⴡⴑ6\u0318.ß\u1B03; xn--6-8cb7433a2ba.xn--ss-2vq # ⴡⴑ6̘.ßᬃ ++N; ⴡⴑ6\u0318。ß\u1B03; ⴡⴑ6\u0318.ß\u1B03; xn--6-8cb7433a2ba.xn--zca894k # ⴡⴑ6̘.ßᬃ ++B; ჁႱ6\u0318。SS\u1B03; [P1 V6]; [P1 V6] # ჁႱ6̘.ssᬃ ++B; ⴡⴑ6\u0318。ss\u1B03; ⴡⴑ6\u0318.ss\u1B03; xn--6-8cb7433a2ba.xn--ss-2vq # ⴡⴑ6̘.ssᬃ ++B; Ⴡⴑ6\u0318。Ss\u1B03; [P1 V6]; [P1 V6] # Ⴡⴑ6̘.ssᬃ ++B; xn--6-8cb306hms1a.xn--ss-2vq; [V6]; [V6] # Ⴡⴑ6̘.ssᬃ ++B; xn--6-8cb7433a2ba.xn--ss-2vq; ⴡⴑ6\u0318.ss\u1B03; xn--6-8cb7433a2ba.xn--ss-2vq # ⴡⴑ6̘.ssᬃ ++B; ⴡⴑ6\u0318.ss\u1B03; ; xn--6-8cb7433a2ba.xn--ss-2vq # ⴡⴑ6̘.ssᬃ ++B; ჁႱ6\u0318.SS\u1B03; [P1 V6]; [P1 V6] # ჁႱ6̘.ssᬃ ++B; Ⴡⴑ6\u0318.Ss\u1B03; [P1 V6]; [P1 V6] # Ⴡⴑ6̘.ssᬃ ++B; xn--6-8cb555h2b.xn--ss-2vq; [V6]; [V6] # ჁႱ6̘.ssᬃ ++B; xn--6-8cb7433a2ba.xn--zca894k; ⴡⴑ6\u0318.ß\u1B03; xn--6-8cb7433a2ba.xn--zca894k # ⴡⴑ6̘.ßᬃ ++T; ⴡⴑ6\u0318.ß\u1B03; ; xn--6-8cb7433a2ba.xn--ss-2vq # ⴡⴑ6̘.ßᬃ ++N; ⴡⴑ6\u0318.ß\u1B03; ; xn--6-8cb7433a2ba.xn--zca894k # ⴡⴑ6̘.ßᬃ ++B; xn--6-8cb555h2b.xn--zca894k; [V6]; [V6] # ჁႱ6̘.ßᬃ ++B; 򋡐。≯𑋪; [P1 V6]; [P1 V6] ++B; 򋡐。>\u0338𑋪; [P1 V6]; [P1 V6] ++B; 򋡐。≯𑋪; [P1 V6]; [P1 V6] ++B; 򋡐。>\u0338𑋪; [P1 V6]; [P1 V6] ++B; xn--eo08b.xn--hdh3385g; [V6]; [V6] ++T; \u065A۲。\u200C-\u1BF3\u08E2; [B1 C1 P1 V5 V6]; [B1 P1 V3 V5 V6] # ٚ۲.-᯳ ++N; \u065A۲。\u200C-\u1BF3\u08E2; [B1 C1 P1 V5 V6]; [B1 C1 P1 V5 V6] # ٚ۲.-᯳ ++B; xn--2hb81a.xn----xrd657l; [B1 V3 V5 V6]; [B1 V3 V5 V6] # ٚ۲.-᯳ ++B; xn--2hb81a.xn----xrd657l30d; [B1 C1 V5 V6]; [B1 C1 V5 V6] # ٚ۲.-᯳ ++B; 󠄏𖬴󠲽。\uFFA0; [P1 V5 V6]; [P1 V5 V6] # 𖬴. ++B; 󠄏𖬴󠲽。\u1160; [P1 V5 V6]; [P1 V5 V6] # 𖬴. ++B; xn--619ep9154c.xn--psd; [V5 V6]; [V5 V6] # 𖬴. ++B; xn--619ep9154c.xn--cl7c; [V5 V6]; [V5 V6] # 𖬴. ++T; ß⒈\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B5 P1 V6]; [B5 P1 V6] # ß⒈ݠ. ++N; ß⒈\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B5 P1 V6]; [B5 P1 V6] # ß⒈ݠ. ++T; ß1.\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B2 B3 B5 P1 V6]; [B2 B3 B5 P1 V6] # ß1.ݠ. ++N; ß1.\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B2 B3 B5 P1 V6]; [B2 B3 B5 P1 V6] # ß1.ݠ. ++B; SS1.\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B2 B3 B5 P1 V6]; [B2 B3 B5 P1 V6] # ss1.ݠ. ++B; ss1.\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B2 B3 B5 P1 V6]; [B2 B3 B5 P1 V6] # ss1.ݠ. ++B; Ss1.\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B2 B3 B5 P1 V6]; [B2 B3 B5 P1 V6] # ss1.ݠ. ++B; ss1.xn--kpb6677h.xn--nfb09923ifkyyb; [B2 B3 B5 V6]; [B2 B3 B5 V6] # ss1.ݠ. ++B; xn--1-pfa.xn--kpb6677h.xn--nfb09923ifkyyb; [B2 B3 B5 V6]; [B2 B3 B5 V6] # ß1.ݠ. ++B; SS⒈\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B5 P1 V6]; [B5 P1 V6] # ss⒈ݠ. ++B; ss⒈\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B5 P1 V6]; [B5 P1 V6] # ss⒈ݠ. ++B; Ss⒈\u0760\uD7AE.􉖲󠅄\u0605򉔯; [B5 P1 V6]; [B5 P1 V6] # ss⒈ݠ. ++B; xn--ss-6ke9690a0g1q.xn--nfb09923ifkyyb; [B5 V6]; [B5 V6] # ss⒈ݠ. ++B; xn--zca444a0s1ao12n.xn--nfb09923ifkyyb; [B5 V6]; [B5 V6] # ß⒈ݠ. ++B; 󠭔.𐋱₂; [P1 V6]; [P1 V6] ++B; 󠭔.𐋱2; [P1 V6]; [P1 V6] ++B; xn--vi56e.xn--2-w91i; [V6]; [V6] ++T; \u0716\u0947。-ß\u06A5\u200C; [B1 C1 V3]; [B1 V3] # ܖे.-ßڥ ++N; \u0716\u0947。-ß\u06A5\u200C; [B1 C1 V3]; [B1 C1 V3] # ܖे.-ßڥ ++T; \u0716\u0947。-SS\u06A5\u200C; [B1 C1 V3]; [B1 V3] # ܖे.-ssڥ ++N; \u0716\u0947。-SS\u06A5\u200C; [B1 C1 V3]; [B1 C1 V3] # ܖे.-ssڥ ++T; \u0716\u0947。-ss\u06A5\u200C; [B1 C1 V3]; [B1 V3] # ܖे.-ssڥ ++N; \u0716\u0947。-ss\u06A5\u200C; [B1 C1 V3]; [B1 C1 V3] # ܖे.-ssڥ ++T; \u0716\u0947。-Ss\u06A5\u200C; [B1 C1 V3]; [B1 V3] # ܖे.-ssڥ ++N; \u0716\u0947。-Ss\u06A5\u200C; [B1 C1 V3]; [B1 C1 V3] # ܖे.-ssڥ ++B; xn--gnb63i.xn---ss-4ef; [B1 V3]; [B1 V3] # ܖे.-ssڥ ++B; xn--gnb63i.xn---ss-4ef9263a; [B1 C1 V3]; [B1 C1 V3] # ܖे.-ssڥ ++B; xn--gnb63i.xn----qfa845bhx4a; [B1 C1 V3]; [B1 C1 V3] # ܖे.-ßڥ ++T; \u1BA9\u200D\u062A񡚈.\u1CD5䷉Ⴡ; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ᮩت.᳕䷉Ⴡ ++N; \u1BA9\u200D\u062A񡚈.\u1CD5䷉Ⴡ; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᮩت.᳕䷉Ⴡ ++T; \u1BA9\u200D\u062A񡚈.\u1CD5䷉Ⴡ; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ᮩت.᳕䷉Ⴡ ++N; \u1BA9\u200D\u062A񡚈.\u1CD5䷉Ⴡ; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᮩت.᳕䷉Ⴡ ++T; \u1BA9\u200D\u062A񡚈.\u1CD5䷉ⴡ; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ᮩت.᳕䷉ⴡ ++N; \u1BA9\u200D\u062A񡚈.\u1CD5䷉ⴡ; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᮩت.᳕䷉ⴡ ++B; xn--pgb911izv33i.xn--i6f270etuy; [B1 V5 V6]; [B1 V5 V6] # ᮩت.᳕䷉ⴡ ++B; xn--pgb911imgdrw34r.xn--i6f270etuy; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ᮩت.᳕䷉ⴡ ++B; xn--pgb911izv33i.xn--5nd792dgv3b; [B1 V5 V6]; [B1 V5 V6] # ᮩت.᳕䷉Ⴡ ++B; xn--pgb911imgdrw34r.xn--5nd792dgv3b; [B1 C2 V5 V6]; [B1 C2 V5 V6] # ᮩت.᳕䷉Ⴡ ++T; \u1BA9\u200D\u062A񡚈.\u1CD5䷉ⴡ; [B1 C2 P1 V5 V6]; [B1 P1 V5 V6] # ᮩت.᳕䷉ⴡ ++N; \u1BA9\u200D\u062A񡚈.\u1CD5䷉ⴡ; [B1 C2 P1 V5 V6]; [B1 C2 P1 V5 V6] # ᮩت.᳕䷉ⴡ ++T; \u2DBF.ß\u200D; [C2 P1 V6]; [P1 V6] # .ß ++N; \u2DBF.ß\u200D; [C2 P1 V6]; [C2 P1 V6] # .ß ++T; \u2DBF.SS\u200D; [C2 P1 V6]; [P1 V6] # .ss ++N; \u2DBF.SS\u200D; [C2 P1 V6]; [C2 P1 V6] # .ss ++T; \u2DBF.ss\u200D; [C2 P1 V6]; [P1 V6] # .ss ++N; \u2DBF.ss\u200D; [C2 P1 V6]; [C2 P1 V6] # .ss ++T; \u2DBF.Ss\u200D; [C2 P1 V6]; [P1 V6] # .ss ++N; \u2DBF.Ss\u200D; [C2 P1 V6]; [C2 P1 V6] # .ss ++B; xn--7pj.ss; [V6]; [V6] # .ss ++B; xn--7pj.xn--ss-n1t; [C2 V6]; [C2 V6] # .ss ++B; xn--7pj.xn--zca870n; [C2 V6]; [C2 V6] # .ß ++B; \u1BF3︒.\u062A≯ꡂ; [B2 B3 B6 P1 V5 V6]; [B2 B3 B6 P1 V5 V6] # ᯳︒.ت≯ꡂ ++B; \u1BF3︒.\u062A>\u0338ꡂ; [B2 B3 B6 P1 V5 V6]; [B2 B3 B6 P1 V5 V6] # ᯳︒.ت≯ꡂ ++B; \u1BF3。.\u062A≯ꡂ; [B2 B3 P1 V5 V6 A4_2]; [B2 B3 P1 V5 V6 A4_2] # ᯳..ت≯ꡂ ++B; \u1BF3。.\u062A>\u0338ꡂ; [B2 B3 P1 V5 V6 A4_2]; [B2 B3 P1 V5 V6 A4_2] # ᯳..ت≯ꡂ ++B; xn--1zf..xn--pgb885lry5g; [B2 B3 V5 V6 A4_2]; [B2 B3 V5 V6 A4_2] # ᯳..ت≯ꡂ ++B; xn--1zf8957g.xn--pgb885lry5g; [B2 B3 B6 V5 V6]; [B2 B3 B6 V5 V6] # ᯳︒.ت≯ꡂ ++B; ≮≠񏻃。-𫠆\u06B7𐹪; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≮≠.-𫠆ڷ𐹪 ++B; <\u0338=\u0338񏻃。-𫠆\u06B7𐹪; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≮≠.-𫠆ڷ𐹪 ++B; ≮≠񏻃。-𫠆\u06B7𐹪; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≮≠.-𫠆ڷ𐹪 ++B; <\u0338=\u0338񏻃。-𫠆\u06B7𐹪; [B1 P1 V3 V6]; [B1 P1 V3 V6] # ≮≠.-𫠆ڷ𐹪 ++B; xn--1ch1a29470f.xn----7uc5363rc1rn; [B1 V3 V6]; [B1 V3 V6] # ≮≠.-𫠆ڷ𐹪 ++B; 𐹡\u0777。ꡂ; [B1]; [B1] # 𐹡ݷ.ꡂ ++B; xn--7pb5275k.xn--bc9a; [B1]; [B1] # 𐹡ݷ.ꡂ ++T; Ⴉ𝆅񔻅\u0619.ß𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴉؙ𝆅.ß𐧦𐹳ݵ ++N; Ⴉ𝆅񔻅\u0619.ß𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴉؙ𝆅.ß𐧦𐹳ݵ ++T; ⴉ𝆅񔻅\u0619.ß𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴉؙ𝆅.ß𐧦𐹳ݵ ++N; ⴉ𝆅񔻅\u0619.ß𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴉؙ𝆅.ß𐧦𐹳ݵ ++B; Ⴉ𝆅񔻅\u0619.SS𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴉؙ𝆅.ss𐧦𐹳ݵ ++B; ⴉ𝆅񔻅\u0619.ss𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # ⴉؙ𝆅.ss𐧦𐹳ݵ ++B; Ⴉ𝆅񔻅\u0619.Ss𐧦𐹳\u0775; [B5 B6 P1 V6]; [B5 B6 P1 V6] # Ⴉؙ𝆅.ss𐧦𐹳ݵ ++B; xn--7fb125cjv87a7xvz.xn--ss-zme7575xp0e; [B5 B6 V6]; [B5 B6 V6] # Ⴉؙ𝆅.ss𐧦𐹳ݵ ++B; xn--7fb940rwt3z7xvz.xn--ss-zme7575xp0e; [B5 B6 V6]; [B5 B6 V6] # ⴉؙ𝆅.ss𐧦𐹳ݵ ++B; xn--7fb940rwt3z7xvz.xn--zca684a699vf2d; [B5 B6 V6]; [B5 B6 V6] # ⴉؙ𝆅.ß𐧦𐹳ݵ ++B; xn--7fb125cjv87a7xvz.xn--zca684a699vf2d; [B5 B6 V6]; [B5 B6 V6] # Ⴉؙ𝆅.ß𐧦𐹳ݵ ++T; \u200D\u0643𐧾↙.񊽡; [B1 C2 P1 V6]; [B3 P1 V6] # ك𐧾↙. ++N; \u200D\u0643𐧾↙.񊽡; [B1 C2 P1 V6]; [B1 C2 P1 V6] # ك𐧾↙. ++B; xn--fhb011lnp8n.xn--7s4w; [B3 V6]; [B3 V6] # ك𐧾↙. ++B; xn--fhb713k87ag053c.xn--7s4w; [B1 C2 V6]; [B1 C2 V6] # ك𐧾↙. ++T; 梉。\u200C; [C1]; xn--7zv. # 梉. ++N; 梉。\u200C; [C1]; [C1] # 梉. ++B; xn--7zv.; 梉.; xn--7zv. ++B; 梉.; ; xn--7zv. ++B; xn--7zv.xn--0ug; [C1]; [C1] # 梉. ++T; ꡣ-≠.\u200D𞤗𐅢Ↄ; [B1 B6 C2 P1 V6]; [B2 B3 B6 P1 V6] # ꡣ-≠.𞤹𐅢Ↄ ++N; ꡣ-≠.\u200D𞤗𐅢Ↄ; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ꡣ-≠.𞤹𐅢Ↄ ++T; ꡣ-=\u0338.\u200D𞤗𐅢Ↄ; [B1 B6 C2 P1 V6]; [B2 B3 B6 P1 V6] # ꡣ-≠.𞤹𐅢Ↄ ++N; ꡣ-=\u0338.\u200D𞤗𐅢Ↄ; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ꡣ-≠.𞤹𐅢Ↄ ++T; ꡣ-=\u0338.\u200D𞤹𐅢ↄ; [B1 B6 C2 P1 V6]; [B2 B3 B6 P1 V6] # ꡣ-≠.𞤹𐅢ↄ ++N; ꡣ-=\u0338.\u200D𞤹𐅢ↄ; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ꡣ-≠.𞤹𐅢ↄ ++T; ꡣ-≠.\u200D𞤹𐅢ↄ; [B1 B6 C2 P1 V6]; [B2 B3 B6 P1 V6] # ꡣ-≠.𞤹𐅢ↄ ++N; ꡣ-≠.\u200D𞤹𐅢ↄ; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ꡣ-≠.𞤹𐅢ↄ ++T; ꡣ-≠.\u200D𞤗𐅢ↄ; [B1 B6 C2 P1 V6]; [B2 B3 B6 P1 V6] # ꡣ-≠.𞤹𐅢ↄ ++N; ꡣ-≠.\u200D𞤗𐅢ↄ; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ꡣ-≠.𞤹𐅢ↄ ++T; ꡣ-=\u0338.\u200D𞤗𐅢ↄ; [B1 B6 C2 P1 V6]; [B2 B3 B6 P1 V6] # ꡣ-≠.𞤹𐅢ↄ ++N; ꡣ-=\u0338.\u200D𞤗𐅢ↄ; [B1 B6 C2 P1 V6]; [B1 B6 C2 P1 V6] # ꡣ-≠.𞤹𐅢ↄ ++B; xn----ufo9661d.xn--r5gy929fhm4f; [B2 B3 B6 V6]; [B2 B3 B6 V6] ++B; xn----ufo9661d.xn--1ug99cj620c71sh; [B1 B6 C2 V6]; [B1 B6 C2 V6] # ꡣ-≠.𞤹𐅢ↄ ++B; xn----ufo9661d.xn--q5g0929fhm4f; [B2 B3 B6 V6]; [B2 B3 B6 V6] ++B; xn----ufo9661d.xn--1ug79cm620c71sh; [B1 B6 C2 V6]; [B1 B6 C2 V6] # ꡣ-≠.𞤹𐅢Ↄ ++T; ς⒐𝆫⸵。𐱢🄊𝟳; [B6 P1 V6]; [B6 P1 V6] ++N; ς⒐𝆫⸵。𐱢🄊𝟳; [B6 P1 V6]; [B6 P1 V6] ++T; ς9.𝆫⸵。𐱢9,7; [B1 P1 V5 V6]; [B1 P1 V5 V6] ++N; ς9.𝆫⸵。𐱢9,7; [B1 P1 V5 V6]; [B1 P1 V5 V6] ++B; Σ9.𝆫⸵。𐱢9,7; [B1 P1 V5 V6]; [B1 P1 V5 V6] ++B; σ9.𝆫⸵。𐱢9,7; [B1 P1 V5 V6]; [B1 P1 V5 V6] ++B; xn--9-zmb.xn--ltj1535k.xn--9,7-r67t; [B1 P1 V5 V6]; [B1 P1 V5 V6] ++B; xn--9-xmb.xn--ltj1535k.xn--9,7-r67t; [B1 P1 V5 V6]; [B1 P1 V5 V6] ++B; Σ⒐𝆫⸵。𐱢🄊𝟳; [B6 P1 V6]; [B6 P1 V6] ++B; σ⒐𝆫⸵。𐱢🄊𝟳; [B6 P1 V6]; [B6 P1 V6] ++B; xn--4xa809nwtghi25b.xn--7-075iy877c; [B6 V6]; [B6 V6] ++B; xn--3xa019nwtghi25b.xn--7-075iy877c; [B6 V6]; [B6 V6] ++T; \u0853.\u200Cß; [B1 C1]; xn--iwb.ss # ࡓ.ß ++N; \u0853.\u200Cß; [B1 C1]; [B1 C1] # ࡓ.ß ++T; \u0853.\u200Cß; [B1 C1]; xn--iwb.ss # ࡓ.ß ++N; \u0853.\u200Cß; [B1 C1]; [B1 C1] # ࡓ.ß ++T; \u0853.\u200CSS; [B1 C1]; xn--iwb.ss # ࡓ.ss ++N; \u0853.\u200CSS; [B1 C1]; [B1 C1] # ࡓ.ss ++T; \u0853.\u200Css; [B1 C1]; xn--iwb.ss # ࡓ.ss ++N; \u0853.\u200Css; [B1 C1]; [B1 C1] # ࡓ.ss ++T; \u0853.\u200CSs; [B1 C1]; xn--iwb.ss # ࡓ.ss ++N; \u0853.\u200CSs; [B1 C1]; [B1 C1] # ࡓ.ss ++B; xn--iwb.ss; \u0853.ss; xn--iwb.ss # ࡓ.ss ++B; \u0853.ss; ; xn--iwb.ss # ࡓ.ss ++B; \u0853.SS; \u0853.ss; xn--iwb.ss # ࡓ.ss ++B; \u0853.Ss; \u0853.ss; xn--iwb.ss # ࡓ.ss ++B; xn--iwb.xn--ss-i1t; [B1 C1]; [B1 C1] # ࡓ.ss ++B; xn--iwb.xn--zca570n; [B1 C1]; [B1 C1] # ࡓ.ß ++T; \u0853.\u200CSS; [B1 C1]; xn--iwb.ss # ࡓ.ss ++N; \u0853.\u200CSS; [B1 C1]; [B1 C1] # ࡓ.ss ++T; \u0853.\u200Css; [B1 C1]; xn--iwb.ss # ࡓ.ss ++N; \u0853.\u200Css; [B1 C1]; [B1 C1] # ࡓ.ss ++T; \u0853.\u200CSs; [B1 C1]; xn--iwb.ss # ࡓ.ss ++N; \u0853.\u200CSs; [B1 C1]; [B1 C1] # ࡓ.ss ++T; 񯶣-.\u200D\u074E\uA94D󠻨; [B1 B6 C2 P1 V3 V6]; [B3 B6 P1 V3 V6] # -.ݎꥍ ++N; 񯶣-.\u200D\u074E\uA94D󠻨; [B1 B6 C2 P1 V3 V6]; [B1 B6 C2 P1 V3 V6] # -.ݎꥍ ++B; xn----s116e.xn--1ob6504fmf40i; [B3 B6 V3 V6]; [B3 B6 V3 V6] # -.ݎꥍ ++B; xn----s116e.xn--1ob387jy90hq459k; [B1 B6 C2 V3 V6]; [B1 B6 C2 V3 V6] # -.ݎꥍ ++B; 䃚蟥-。-񽒘⒈; [P1 V3 V6]; [P1 V3 V6] ++B; 䃚蟥-。-񽒘1.; [P1 V3 V6]; [P1 V3 V6] ++B; xn----n50a258u.xn---1-up07j.; [V3 V6]; [V3 V6] ++B; xn----n50a258u.xn----ecp33805f; [V3 V6]; [V3 V6] ++B; 𐹸䚵-ꡡ。⺇; [B1]; [B1] ++B; xn----bm3an932a1l5d.xn--xvj; [B1]; [B1] ++B; 𑄳。\u1ADC𐹻; [B1 B3 B5 B6 P1 V5 V6]; [B1 B3 B5 B6 P1 V5 V6] # 𑄳.𐹻 ++B; xn--v80d.xn--2rf1154i; [B1 B3 B5 B6 V5 V6]; [B1 B3 B5 B6 V5 V6] # 𑄳.𐹻 ++B; ≮𐹻.⒎𑂵\u06BA\u0602; [B1 P1 V6]; [B1 P1 V6] # ≮𐹻.⒎𑂵ں ++B; <\u0338𐹻.⒎𑂵\u06BA\u0602; [B1 P1 V6]; [B1 P1 V6] # ≮𐹻.⒎𑂵ں ++B; ≮𐹻.7.𑂵\u06BA\u0602; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≮𐹻.7.𑂵ں ++B; <\u0338𐹻.7.𑂵\u06BA\u0602; [B1 P1 V5 V6]; [B1 P1 V5 V6] # ≮𐹻.7.𑂵ں ++B; xn--gdhx904g.7.xn--kfb18an307d; [B1 V5 V6]; [B1 V5 V6] # ≮𐹻.7.𑂵ں ++B; xn--gdhx904g.xn--kfb18a325efm3s; [B1 V6]; [B1 V6] # ≮𐹻.⒎𑂵ں ++T; ᢔ≠􋉂.\u200D𐋢; [C2 P1 V6]; [P1 V6] # ᢔ≠.𐋢 ++N; ᢔ≠􋉂.\u200D𐋢; [C2 P1 V6]; [C2 P1 V6] # ᢔ≠.𐋢 ++T; ᢔ=\u0338􋉂.\u200D𐋢; [C2 P1 V6]; [P1 V6] # ᢔ≠.𐋢 ++N; ᢔ=\u0338􋉂.\u200D𐋢; [C2 P1 V6]; [C2 P1 V6] # ᢔ≠.𐋢 ++B; xn--ebf031cf7196a.xn--587c; [V6]; [V6] ++B; xn--ebf031cf7196a.xn--1ug9540g; [C2 V6]; [C2 V6] # ᢔ≠.𐋢 ++B; 𐩁≮񣊛≯.\u066C𞵕⳿; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 𐩁≮≯.٬⳿ ++B; 𐩁<\u0338񣊛>\u0338.\u066C𞵕⳿; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 𐩁≮≯.٬⳿ ++B; 𐩁≮񣊛≯.\u066C𞵕⳿; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 𐩁≮≯.٬⳿ ++B; 𐩁<\u0338񣊛>\u0338.\u066C𞵕⳿; [B1 B2 B3 P1 V6]; [B1 B2 B3 P1 V6] # 𐩁≮≯.٬⳿ ++B; xn--gdhc0519o0y27b.xn--lib468q0d21a; [B1 B2 B3 V6]; [B1 B2 B3 V6] # 𐩁≮≯.٬⳿ ++B; -。⺐; [V3]; [V3] ++B; -。⺐; [V3]; [V3] ++B; -.xn--6vj; [V3]; [V3] ++B; 󠰩𑲬.\u065C; [P1 V5 V6]; [P1 V5 V6] # 𑲬.ٜ ++B; 󠰩𑲬.\u065C; [P1 V5 V6]; [P1 V5 V6] # 𑲬.ٜ ++B; xn--sn3d59267c.xn--4hb; [V5 V6]; [V5 V6] # 𑲬.ٜ ++T; 𐍺.񚇃\u200C; [C1 P1 V5 V6]; [P1 V5 V6] # 𐍺. ++N; 𐍺.񚇃\u200C; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 𐍺. ++B; xn--ie8c.xn--2g51a; [V5 V6]; [V5 V6] ++B; xn--ie8c.xn--0ug03366c; [C1 V5 V6]; [C1 V5 V6] # 𐍺. ++B; \u063D\u06E3.𐨎; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ؽۣ.𐨎 ++B; xn--8gb64a.xn--mr9c; [B1 B3 B6 V5]; [B1 B3 B6 V5] # ؽۣ.𐨎 ++T; 漦Ⴙς.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] ++N; 漦Ⴙς.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] ++T; 漦ⴙς.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] ++N; 漦ⴙς.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] ++B; 漦ႹΣ.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] ++B; 漦ⴙσ.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] ++B; 漦Ⴙσ.񡻀𐴄; [B5 B6 P1 V6]; [B5 B6 P1 V6] ++B; xn--4xa947d717e.xn--9d0d3162t; [B5 B6 V6]; [B5 B6 V6] ++B; xn--4xa772sl47b.xn--9d0d3162t; [B5 B6 V6]; [B5 B6 V6] ++B; xn--3xa972sl47b.xn--9d0d3162t; [B5 B6 V6]; [B5 B6 V6] ++B; xn--3xa157d717e.xn--9d0d3162t; [B5 B6 V6]; [B5 B6 V6] ++B; 𐹫踧\u0CCD򫚇.󜀃⒈𝨤; [B1 P1 V6]; [B1 P1 V6] # 𐹫踧್.⒈𝨤 ++B; 𐹫踧\u0CCD򫚇.󜀃1.𝨤; [B1 B3 B6 P1 V5 V6]; [B1 B3 B6 P1 V5 V6] # 𐹫踧್.1.𝨤 ++B; xn--8tc1437dro0d6q06h.xn--1-p948l.xn--m82h; [B1 B3 B6 V5 V6]; [B1 B3 B6 V5 V6] # 𐹫踧್.1.𝨤 ++B; xn--8tc1437dro0d6q06h.xn--tsh2611ncu71e; [B1 V6]; [B1 V6] # 𐹫踧್.⒈𝨤 ++T; \u200D≮.󠟪𹫏-; [C2 P1 V3 V6]; [P1 V3 V6] # ≮.- ++N; \u200D≮.󠟪𹫏-; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ≮.- ++T; \u200D<\u0338.󠟪𹫏-; [C2 P1 V3 V6]; [P1 V3 V6] # ≮.- ++N; \u200D<\u0338.󠟪𹫏-; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ≮.- ++T; \u200D≮.󠟪𹫏-; [C2 P1 V3 V6]; [P1 V3 V6] # ≮.- ++N; \u200D≮.󠟪𹫏-; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ≮.- ++T; \u200D<\u0338.󠟪𹫏-; [C2 P1 V3 V6]; [P1 V3 V6] # ≮.- ++N; \u200D<\u0338.󠟪𹫏-; [C2 P1 V3 V6]; [C2 P1 V3 V6] # ≮.- ++B; xn--gdh.xn----cr99a1w710b; [V3 V6]; [V3 V6] ++B; xn--1ug95g.xn----cr99a1w710b; [C2 V3 V6]; [C2 V3 V6] # ≮.- ++T; \u200D\u200D襔。Ⴜ5ꡮ񵝏; [C2 P1 V6]; [P1 V6] # 襔.Ⴜ5ꡮ ++N; \u200D\u200D襔。Ⴜ5ꡮ񵝏; [C2 P1 V6]; [C2 P1 V6] # 襔.Ⴜ5ꡮ ++T; \u200D\u200D襔。ⴜ5ꡮ񵝏; [C2 P1 V6]; [P1 V6] # 襔.ⴜ5ꡮ ++N; \u200D\u200D襔。ⴜ5ꡮ񵝏; [C2 P1 V6]; [C2 P1 V6] # 襔.ⴜ5ꡮ ++B; xn--2u2a.xn--5-uws5848bpf44e; [V6]; [V6] ++B; xn--1uga7691f.xn--5-uws5848bpf44e; [C2 V6]; [C2 V6] # 襔.ⴜ5ꡮ ++B; xn--2u2a.xn--5-r1g7167ipfw8d; [V6]; [V6] ++B; xn--1uga7691f.xn--5-r1g7167ipfw8d; [C2 V6]; [C2 V6] # 襔.Ⴜ5ꡮ ++T; 𐫜𑌼\u200D.婀; [B3 C2]; xn--ix9c26l.xn--q0s # 𐫜𑌼.婀 ++N; 𐫜𑌼\u200D.婀; [B3 C2]; [B3 C2] # 𐫜𑌼.婀 ++T; 𐫜𑌼\u200D.婀; [B3 C2]; xn--ix9c26l.xn--q0s # 𐫜𑌼.婀 ++N; 𐫜𑌼\u200D.婀; [B3 C2]; [B3 C2] # 𐫜𑌼.婀 ++B; xn--ix9c26l.xn--q0s; 𐫜𑌼.婀; xn--ix9c26l.xn--q0s ++B; 𐫜𑌼.婀; ; xn--ix9c26l.xn--q0s ++B; xn--1ugx063g1if.xn--q0s; [B3 C2]; [B3 C2] # 𐫜𑌼.婀 ++B; 󠅽︒︒𐹯。⬳\u1A78; [B1 P1 V6]; [B1 P1 V6] # ︒︒𐹯.⬳᩸ ++B; 󠅽。。𐹯。⬳\u1A78; [B1 A4_2]; [B1 A4_2] # ..𐹯.⬳᩸ ++B; ..xn--no0d.xn--7of309e; [B1 A4_2]; [B1 A4_2] # ..𐹯.⬳᩸ ++B; xn--y86ca186j.xn--7of309e; [B1 V6]; [B1 V6] # ︒︒𐹯.⬳᩸ ++T; 𝟖ß.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ ++N; 𝟖ß.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ ++T; 8ß.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ ++N; 8ß.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ ++T; 8ß.󠄐-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-ⴏ ++N; 8ß.󠄐-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-ⴏ ++B; 8SS.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ ++B; 8ss.󠄐-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-ⴏ ++B; 8Ss.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ ++B; 8ss.-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ ++B; 8ss.-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-ⴏ ++B; 8SS.-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ ++B; 8Ss.-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ ++B; xn--8-qfa.-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-ⴏ ++B; XN--8-QFA.-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ ++B; Xn--8-Qfa.-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ ++B; xn--8-qfa.-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-Ⴏ ++T; 𝟖ß.󠄐-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-ⴏ ++N; 𝟖ß.󠄐-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ß.-ⴏ ++B; 𝟖SS.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ ++B; 𝟖ss.󠄐-\uDBDAⴏ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-ⴏ ++B; 𝟖Ss.󠄐-\uDBDAႯ; [P1 V3 V6]; [P1 V3 V6 A3] # 8ss.-Ⴏ ++T; -\u200D󠋟.\u200C𐹣Ⴅ; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V6] # -.𐹣Ⴅ ++N; -\u200D󠋟.\u200C𐹣Ⴅ; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # -.𐹣Ⴅ ++T; -\u200D󠋟.\u200C𐹣ⴅ; [B1 C1 C2 P1 V3 V6]; [B1 P1 V3 V6] # -.𐹣ⴅ ++N; -\u200D󠋟.\u200C𐹣ⴅ; [B1 C1 C2 P1 V3 V6]; [B1 C1 C2 P1 V3 V6] # -.𐹣ⴅ ++B; xn----s721m.xn--wkj1423e; [B1 V3 V6]; [B1 V3 V6] ++B; xn----ugnv7071n.xn--0ugz32cgr0p; [B1 C1 C2 V3 V6]; [B1 C1 C2 V3 V6] # -.𐹣ⴅ ++B; xn----s721m.xn--dnd9201k; [B1 V3 V6]; [B1 V3 V6] ++B; xn----ugnv7071n.xn--dnd999e4j4p; [B1 C1 C2 V3 V6]; [B1 C1 C2 V3 V6] # -.𐹣Ⴅ ++T; \uA9B9\u200D큷𻶡。₂; [C2 P1 V5 V6]; [P1 V5 V6] # ꦹ큷.2 ++N; \uA9B9\u200D큷𻶡。₂; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ꦹ큷.2 ++T; \uA9B9\u200D큷𻶡。₂; [C2 P1 V5 V6]; [P1 V5 V6] # ꦹ큷.2 ++N; \uA9B9\u200D큷𻶡。₂; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ꦹ큷.2 ++T; \uA9B9\u200D큷𻶡。2; [C2 P1 V5 V6]; [P1 V5 V6] # ꦹ큷.2 ++N; \uA9B9\u200D큷𻶡。2; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ꦹ큷.2 ++T; \uA9B9\u200D큷𻶡。2; [C2 P1 V5 V6]; [P1 V5 V6] # ꦹ큷.2 ++N; \uA9B9\u200D큷𻶡。2; [C2 P1 V5 V6]; [C2 P1 V5 V6] # ꦹ큷.2 ++B; xn--0m9as84e2e21c.2; [V5 V6]; [V5 V6] # ꦹ큷.2 ++B; xn--1ug1435cfkyaoi04d.2; [C2 V5 V6]; [C2 V5 V6] # ꦹ큷.2 ++B; \uDF4D.🄄𞯘; [B1 P1 V6]; [B1 P1 V6 A3] # .🄄 ++B; \uDF4D.3,𞯘; [B1 P1 V6]; [B1 P1 V6 A3] # .3, ++B; \uDF4D.xn--3,-tb22a; [B1 P1 V6]; [B1 P1 V6 A3] # .3, ++B; \uDF4D.XN--3,-TB22A; [B1 P1 V6]; [B1 P1 V6 A3] # .3, ++B; \uDF4D.Xn--3,-Tb22a; [B1 P1 V6]; [B1 P1 V6 A3] # .3, ++B; \uDF4D.xn--3x6hx6f; [B1 P1 V6]; [B1 P1 V6 A3] # .🄄 ++B; \uDF4D.XN--3X6HX6F; [B1 P1 V6]; [B1 P1 V6 A3] # .🄄 ++B; \uDF4D.Xn--3X6hx6f; [B1 P1 V6]; [B1 P1 V6 A3] # .🄄 ++B; 𝨖𐩙。\u06DD󀡶\uA8C5⒈; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𝨖.ꣅ⒈ ++B; 𝨖𐩙。\u06DD󀡶\uA8C51.; [B1 P1 V5 V6]; [B1 P1 V5 V6] # 𝨖.ꣅ1. ++B; xn--rt9cl956a.xn--1-dxc8545j0693i.; [B1 V5 V6]; [B1 V5 V6] # 𝨖.ꣅ1. ++B; xn--rt9cl956a.xn--tlb403mxv4g06s9i; [B1 V5 V6]; [B1 V5 V6] # 𝨖.ꣅ⒈ ++T; 򒈣\u05E1\u06B8。Ⴈ\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # סڸ.Ⴈ ++N; 򒈣\u05E1\u06B8。Ⴈ\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # סڸ.Ⴈ ++T; 򒈣\u05E1\u06B8。ⴈ\u200D; [B5 B6 C2 P1 V6]; [B5 B6 P1 V6] # סڸ.ⴈ ++N; 򒈣\u05E1\u06B8。ⴈ\u200D; [B5 B6 C2 P1 V6]; [B5 B6 C2 P1 V6] # סڸ.ⴈ ++B; xn--meb44b57607c.xn--zkj; [B5 B6 V6]; [B5 B6 V6] # סڸ.ⴈ ++B; xn--meb44b57607c.xn--1ug232c; [B5 B6 C2 V6]; [B5 B6 C2 V6] # סڸ.ⴈ ++B; xn--meb44b57607c.xn--gnd; [B5 B6 V6]; [B5 B6 V6] # סڸ.Ⴈ ++B; xn--meb44b57607c.xn--gnd699e; [B5 B6 C2 V6]; [B5 B6 C2 V6] # סڸ.Ⴈ ++T; 󀚶𝨱\u07E6⒈.𑗝髯\u200C; [B1 B5 C1 P1 V5 V6]; [B1 B5 P1 V5 V6] # 𝨱ߦ⒈.𑗝髯 ++N; 󀚶𝨱\u07E6⒈.𑗝髯\u200C; [B1 B5 C1 P1 V5 V6]; [B1 B5 C1 P1 V5 V6] # 𝨱ߦ⒈.𑗝髯 ++T; 󀚶𝨱\u07E61..𑗝髯\u200C; [B1 B5 C1 P1 V5 V6 A4_2]; [B1 B5 P1 V5 V6 A4_2] # 𝨱ߦ1..𑗝髯 ++N; 󀚶𝨱\u07E61..𑗝髯\u200C; [B1 B5 C1 P1 V5 V6 A4_2]; [B1 B5 C1 P1 V5 V6 A4_2] # 𝨱ߦ1..𑗝髯 ++B; xn--1-idd62296a1fr6e..xn--uj6at43v; [B1 B5 V5 V6 A4_2]; [B1 B5 V5 V6 A4_2] # 𝨱ߦ1..𑗝髯 ++B; xn--1-idd62296a1fr6e..xn--0ugx259bocxd; [B1 B5 C1 V5 V6 A4_2]; [B1 B5 C1 V5 V6 A4_2] # 𝨱ߦ1..𑗝髯 ++B; xn--etb477lq931a1f58e.xn--uj6at43v; [B1 B5 V5 V6]; [B1 B5 V5 V6] # 𝨱ߦ⒈.𑗝髯 ++B; xn--etb477lq931a1f58e.xn--0ugx259bocxd; [B1 B5 C1 V5 V6]; [B1 B5 C1 V5 V6] # 𝨱ߦ⒈.𑗝髯 ++B; 𐫀.\u0689𑌀; 𐫀.\u0689𑌀; xn--pw9c.xn--fjb8658k # 𐫀.ډ𑌀 ++B; 𐫀.\u0689𑌀; ; xn--pw9c.xn--fjb8658k # 𐫀.ډ𑌀 ++B; xn--pw9c.xn--fjb8658k; 𐫀.\u0689𑌀; xn--pw9c.xn--fjb8658k # 𐫀.ډ𑌀 ++B; 𑋪.𐳝; [B1 B3 B6 V5]; [B1 B3 B6 V5] ++B; 𑋪.𐳝; [B1 B3 B6 V5]; [B1 B3 B6 V5] ++B; 𑋪.𐲝; [B1 B3 B6 V5]; [B1 B3 B6 V5] ++B; xn--fm1d.xn--5c0d; [B1 B3 B6 V5]; [B1 B3 B6 V5] ++B; 𑋪.𐲝; [B1 B3 B6 V5]; [B1 B3 B6 V5] ++B; ≠膣。\u0F83; [P1 V5 V6]; [P1 V5 V6] # ≠膣.ྃ ++B; =\u0338膣。\u0F83; [P1 V5 V6]; [P1 V5 V6] # ≠膣.ྃ ++B; xn--1chy468a.xn--2ed; [V5 V6]; [V5 V6] # ≠膣.ྃ ++T; 񰀎-\u077D。ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ß ++N; 񰀎-\u077D。ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ß ++T; 񰀎-\u077D。ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ß ++N; 񰀎-\u077D。ß; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ß ++B; 񰀎-\u077D。SS; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ss ++B; 񰀎-\u077D。ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ss ++B; 񰀎-\u077D。Ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ss ++B; xn----j6c95618k.ss; [B5 B6 V6]; [B5 B6 V6] # -ݽ.ss ++B; xn----j6c95618k.xn--zca; [B5 B6 V6]; [B5 B6 V6] # -ݽ.ß ++B; 񰀎-\u077D。SS; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ss ++B; 񰀎-\u077D。ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ss ++B; 񰀎-\u077D。Ss; [B5 B6 P1 V6]; [B5 B6 P1 V6] # -ݽ.ss ++T; ς𐹠ᡚ𑄳.⾭𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] ++N; ς𐹠ᡚ𑄳.⾭𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] ++T; ς𐹠ᡚ𑄳.靑𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] ++N; ς𐹠ᡚ𑄳.靑𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] ++B; Σ𐹠ᡚ𑄳.靑𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] ++B; σ𐹠ᡚ𑄳.靑𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] ++B; xn--4xa656hp23pxmc.xn--es5a888tvjc2u15h; [B5 B6 V6]; [B5 B6 V6] ++B; xn--3xa856hp23pxmc.xn--es5a888tvjc2u15h; [B5 B6 V6]; [B5 B6 V6] ++B; Σ𐹠ᡚ𑄳.⾭𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] ++B; σ𐹠ᡚ𑄳.⾭𐹽𽐖𐫜; [B5 B6 P1 V6]; [B5 B6 P1 V6] ++T; 𐋷。\u200D; [C2]; xn--r97c. # 𐋷. ++N; 𐋷。\u200D; [C2]; [C2] # 𐋷. ++B; xn--r97c.; 𐋷.; xn--r97c.; NV8 ++B; 𐋷.; ; xn--r97c.; NV8 ++B; xn--r97c.xn--1ug; [C2]; [C2] # 𐋷. ++B; 𑰳𑈯。⥪; [V5]; [V5] ++B; xn--2g1d14o.xn--jti; [V5]; [V5] ++T; 𑆀䁴񤧣.Ⴕ𝟜\u200C\u0348; [C1 P1 V5 V6]; [P1 V5 V6] # 𑆀䁴.Ⴕ4͈ ++N; 𑆀䁴񤧣.Ⴕ𝟜\u200C\u0348; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 𑆀䁴.Ⴕ4͈ ++T; 𑆀䁴񤧣.Ⴕ4\u200C\u0348; [C1 P1 V5 V6]; [P1 V5 V6] # 𑆀䁴.Ⴕ4͈ ++N; 𑆀䁴񤧣.Ⴕ4\u200C\u0348; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 𑆀䁴.Ⴕ4͈ ++T; 𑆀䁴񤧣.ⴕ4\u200C\u0348; [C1 P1 V5 V6]; [P1 V5 V6] # 𑆀䁴.ⴕ4͈ ++N; 𑆀䁴񤧣.ⴕ4\u200C\u0348; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 𑆀䁴.ⴕ4͈ ++B; xn--1mnx647cg3x1b.xn--4-zfb5123a; [V5 V6]; [V5 V6] # 𑆀䁴.ⴕ4͈ ++B; xn--1mnx647cg3x1b.xn--4-zfb502tlsl; [C1 V5 V6]; [C1 V5 V6] # 𑆀䁴.ⴕ4͈ ++B; xn--1mnx647cg3x1b.xn--4-zfb324h; [V5 V6]; [V5 V6] # 𑆀䁴.Ⴕ4͈ ++B; xn--1mnx647cg3x1b.xn--4-zfb324h32o; [C1 V5 V6]; [C1 V5 V6] # 𑆀䁴.Ⴕ4͈ ++T; 𑆀䁴񤧣.ⴕ𝟜\u200C\u0348; [C1 P1 V5 V6]; [P1 V5 V6] # 𑆀䁴.ⴕ4͈ ++N; 𑆀䁴񤧣.ⴕ𝟜\u200C\u0348; [C1 P1 V5 V6]; [C1 P1 V5 V6] # 𑆀䁴.ⴕ4͈ ++T; 憡\uDF1F\u200CႴ.𐋮\u200D≠; [C1 C2 P1 V6]; [P1 V6 A3] # 憡Ⴔ.𐋮≠ ++N; 憡\uDF1F\u200CႴ.𐋮\u200D≠; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ ++T; 憡\uDF1F\u200CႴ.𐋮\u200D=\u0338; [C1 C2 P1 V6]; [P1 V6 A3] # 憡Ⴔ.𐋮≠ ++N; 憡\uDF1F\u200CႴ.𐋮\u200D=\u0338; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ ++T; 憡\uDF1F\u200Cⴔ.𐋮\u200D=\u0338; [C1 C2 P1 V6]; [P1 V6 A3] # 憡ⴔ.𐋮≠ ++N; 憡\uDF1F\u200Cⴔ.𐋮\u200D=\u0338; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡ⴔ.𐋮≠ ++T; 憡\uDF1F\u200Cⴔ.𐋮\u200D≠; [C1 C2 P1 V6]; [P1 V6 A3] # 憡ⴔ.𐋮≠ ++N; 憡\uDF1F\u200Cⴔ.𐋮\u200D≠; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡ⴔ.𐋮≠ ++B; 憡\uDF1Fⴔ.xn--1chz659f; [P1 V6]; [P1 V6 A3] # 憡ⴔ.𐋮≠ ++B; 憡\uDF1FႴ.XN--1CHZ659F; [P1 V6]; [P1 V6 A3] # 憡Ⴔ.𐋮≠ ++B; 憡\uDF1FႴ.xn--1Chz659f; [P1 V6]; [P1 V6 A3] # 憡Ⴔ.𐋮≠ ++B; 憡\uDF1FႴ.xn--1chz659f; [P1 V6]; [P1 V6 A3] # 憡Ⴔ.𐋮≠ ++T; 憡\uDF1F\u200Cⴔ.xn--1ug73gl146a; [C1 C2 P1 V6]; [C2 P1 V6 A3] # 憡ⴔ.𐋮≠ ++N; 憡\uDF1F\u200Cⴔ.xn--1ug73gl146a; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡ⴔ.𐋮≠ ++T; 憡\uDF1F\u200CႴ.XN--1UG73GL146A; [C1 C2 P1 V6]; [C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ ++N; 憡\uDF1F\u200CႴ.XN--1UG73GL146A; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ ++T; 憡\uDF1F\u200CႴ.xn--1Ug73gl146a; [C1 C2 P1 V6]; [C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ ++N; 憡\uDF1F\u200CႴ.xn--1Ug73gl146a; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ ++B; 憡\uDF1FႴ.xn--1ug73gl146a; [C2 P1 V6]; [C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ ++B; 憡\uDF1Fⴔ.xn--1ug73gl146a; [C2 P1 V6]; [C2 P1 V6 A3] # 憡ⴔ.𐋮≠ ++B; 憡\uDF1FႴ.XN--1UG73GL146A; [C2 P1 V6]; [C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ ++B; 憡\uDF1FႴ.xn--1Ug73gl146a; [C2 P1 V6]; [C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ ++T; 憡\uDF1F\u200CႴ.xn--1ug73gl146a; [C1 C2 P1 V6]; [C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ ++N; 憡\uDF1F\u200CႴ.xn--1ug73gl146a; [C1 C2 P1 V6]; [C1 C2 P1 V6 A3] # 憡Ⴔ.𐋮≠ diff --cc vendor/idna-0.1.4/tests/punycode.rs index 000000000,000000000..67988e80c new file mode 100644 --- /dev/null +++ b/vendor/idna-0.1.4/tests/punycode.rs @@@ -1,0 -1,0 +1,65 @@@ ++// Copyright 2013 The rust-url developers. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++use idna::punycode::{decode, encode_str}; ++use rustc_serialize::json::{Json, Object}; ++use test::TestFn; ++ ++fn one_test(decoded: &str, encoded: &str) { ++ match decode(encoded) { ++ None => panic!("Decoding {} failed.", encoded), ++ Some(result) => { ++ let result = result.into_iter().collect::(); ++ assert!(result == decoded, ++ format!("Incorrect decoding of \"{}\":\n \"{}\"\n!= \"{}\"\n", ++ encoded, result, decoded)) ++ } ++ } ++ ++ match encode_str(decoded) { ++ None => panic!("Encoding {} failed.", decoded), ++ Some(result) => { ++ assert!(result == encoded, ++ format!("Incorrect encoding of \"{}\":\n \"{}\"\n!= \"{}\"\n", ++ decoded, result, encoded)) ++ } ++ } ++} ++ ++fn get_string<'a>(map: &'a Object, key: &str) -> &'a str { ++ match map.get(&key.to_string()) { ++ Some(&Json::String(ref s)) => s, ++ None => "", ++ _ => panic!(), ++ } ++} ++ ++pub fn collect_tests(add_test: &mut F) { ++ match Json::from_str(include_str!("punycode_tests.json")) { ++ Ok(Json::Array(tests)) => for (i, test) in tests.into_iter().enumerate() { ++ match test { ++ Json::Object(o) => { ++ let test_name = { ++ let desc = get_string(&o, "description"); ++ if desc.is_empty() { ++ format!("Punycode {}", i + 1) ++ } else { ++ format!("Punycode {}: {}", i + 1, desc) ++ } ++ }; ++ add_test(test_name, TestFn::dyn_test_fn(move || one_test( ++ get_string(&o, "decoded"), ++ get_string(&o, "encoded"), ++ ))) ++ } ++ _ => panic!(), ++ } ++ }, ++ other => panic!("{:?}", other) ++ } ++} diff --cc vendor/idna-0.1.4/tests/punycode_tests.json index 000000000,000000000..86785b124 new file mode 100644 --- /dev/null +++ b/vendor/idna-0.1.4/tests/punycode_tests.json @@@ -1,0 -1,0 +1,120 @@@ ++[ ++{ ++ "description": "These tests are copied from https://github.com/bestiejs/punycode.js/blob/master/tests/tests.js , used under the MIT license.", ++ "decoded": "", ++ "encoded": "" ++}, ++{ ++ "description": "a single basic code point", ++ "decoded": "Bach", ++ "encoded": "Bach-" ++}, ++{ ++ "description": "a single non-ASCII character", ++ "decoded": "\u00FC", ++ "encoded": "tda" ++}, ++{ ++ "description": "multiple non-ASCII characters", ++ "decoded": "\u00FC\u00EB\u00E4\u00F6\u2665", ++ "encoded": "4can8av2009b" ++}, ++{ ++ "description": "mix of ASCII and non-ASCII characters", ++ "decoded": "b\u00FCcher", ++ "encoded": "bcher-kva" ++}, ++{ ++ "description": "long string with both ASCII and non-ASCII characters", ++ "decoded": "Willst du die Bl\u00FCthe des fr\u00FChen, die Fr\u00FCchte des sp\u00E4teren Jahres", ++ "encoded": "Willst du die Blthe des frhen, die Frchte des spteren Jahres-x9e96lkal" ++}, ++{ ++ "description": "Arabic (Egyptian)", ++ "decoded": "\u0644\u064A\u0647\u0645\u0627\u0628\u062A\u0643\u0644\u0645\u0648\u0634\u0639\u0631\u0628\u064A\u061F", ++ "encoded": "egbpdaj6bu4bxfgehfvwxn" ++}, ++{ ++ "description": "Chinese (simplified)", ++ "decoded": "\u4ED6\u4EEC\u4E3A\u4EC0\u4E48\u4E0D\u8BF4\u4E2d\u6587", ++ "encoded": "ihqwcrb4cv8a8dqg056pqjye" ++}, ++{ ++ "description": "Chinese (traditional)", ++ "decoded": "\u4ED6\u5011\u7232\u4EC0\u9EBD\u4E0D\u8AAA\u4E2D\u6587", ++ "encoded": "ihqwctvzc91f659drss3x8bo0yb" ++}, ++{ ++ "description": "Czech", ++ "decoded": "Pro\u010Dprost\u011Bnemluv\u00ED\u010Desky", ++ "encoded": "Proprostnemluvesky-uyb24dma41a" ++}, ++{ ++ "description": "Hebrew", ++ "decoded": "\u05DC\u05DE\u05D4\u05D4\u05DD\u05E4\u05E9\u05D5\u05D8\u05DC\u05D0\u05DE\u05D3\u05D1\u05E8\u05D9\u05DD\u05E2\u05D1\u05E8\u05D9\u05EA", ++ "encoded": "4dbcagdahymbxekheh6e0a7fei0b" ++}, ++{ ++ "description": "Hindi (Devanagari)", ++ "decoded": "\u092F\u0939\u0932\u094B\u0917\u0939\u093F\u0928\u094D\u0926\u0940\u0915\u094D\u092F\u094B\u0902\u0928\u0939\u0940\u0902\u092C\u094B\u0932\u0938\u0915\u0924\u0947\u0939\u0948\u0902", ++ "encoded": "i1baa7eci9glrd9b2ae1bj0hfcgg6iyaf8o0a1dig0cd" ++}, ++{ ++ "description": "Japanese (kanji and hiragana)", ++ "decoded": "\u306A\u305C\u307F\u3093\u306A\u65E5\u672C\u8A9E\u3092\u8A71\u3057\u3066\u304F\u308C\u306A\u3044\u306E\u304B", ++ "encoded": "n8jok5ay5dzabd5bym9f0cm5685rrjetr6pdxa" ++}, ++{ ++ "description": "Korean (Hangul syllables)", ++ "decoded": "\uC138\uACC4\uC758\uBAA8\uB4E0\uC0AC\uB78C\uB4E4\uC774\uD55C\uAD6D\uC5B4\uB97C\uC774\uD574\uD55C\uB2E4\uBA74\uC5BC\uB9C8\uB098\uC88B\uC744\uAE4C", ++ "encoded": "989aomsvi5e83db1d2a355cv1e0vak1dwrv93d5xbh15a0dt30a5jpsd879ccm6fea98c" ++}, ++{ ++ "description": "Russian (Cyrillic)", ++ "decoded": "\u043F\u043E\u0447\u0435\u043C\u0443\u0436\u0435\u043E\u043D\u0438\u043D\u0435\u0433\u043E\u0432\u043E\u0440\u044F\u0442\u043F\u043E\u0440\u0443\u0441\u0441\u043A\u0438", ++ "encoded": "b1abfaaepdrnnbgefbadotcwatmq2g4l" ++}, ++{ ++ "description": "Spanish", ++ "decoded": "Porqu\u00E9nopuedensimplementehablarenEspa\u00F1ol", ++ "encoded": "PorqunopuedensimplementehablarenEspaol-fmd56a" ++}, ++{ ++ "description": "Vietnamese", ++ "decoded": "T\u1EA1isaoh\u1ECDkh\u00F4ngth\u1EC3ch\u1EC9n\u00F3iti\u1EBFngVi\u1EC7t", ++ "encoded": "TisaohkhngthchnitingVit-kjcr8268qyxafd2f1b9g" ++}, ++{ ++ "decoded": "3\u5E74B\u7D44\u91D1\u516B\u5148\u751F", ++ "encoded": "3B-ww4c5e180e575a65lsy2b" ++}, ++{ ++ "decoded": "\u5B89\u5BA4\u5948\u7F8E\u6075-with-SUPER-MONKEYS", ++ "encoded": "-with-SUPER-MONKEYS-pc58ag80a8qai00g7n9n" ++}, ++{ ++ "decoded": "Hello-Another-Way-\u305D\u308C\u305E\u308C\u306E\u5834\u6240", ++ "encoded": "Hello-Another-Way--fc4qua05auwb3674vfr0b" ++}, ++{ ++ "decoded": "\u3072\u3068\u3064\u5C4B\u6839\u306E\u4E0B2", ++ "encoded": "2-u9tlzr9756bt3uc0v" ++}, ++{ ++ "decoded": "Maji\u3067Koi\u3059\u308B5\u79D2\u524D", ++ "encoded": "MajiKoi5-783gue6qz075azm5e" ++}, ++{ ++ "decoded": "\u30D1\u30D5\u30A3\u30FCde\u30EB\u30F3\u30D0", ++ "encoded": "de-jg4avhby1noc0d" ++}, ++{ ++ "decoded": "\u305D\u306E\u30B9\u30D4\u30FC\u30C9\u3067", ++ "encoded": "d9juau41awczczp" ++}, ++{ ++ "description": "ASCII string that breaks the existing rules for host-name labels (It's not a realistic example for IDNA, because IDNA never encodes pure ASCII labels.)", ++ "decoded": "-> $1.00 <-", ++ "encoded": "-> $1.00 <--" ++} ++] diff --cc vendor/idna-0.1.4/tests/tests.rs index 000000000,000000000..8ca218595 new file mode 100644 --- /dev/null +++ b/vendor/idna-0.1.4/tests/tests.rs @@@ -1,0 -1,0 +1,21 @@@ ++extern crate idna; ++extern crate rustc_serialize; ++extern crate test; ++ ++mod punycode; ++mod uts46; ++ ++fn main() { ++ let mut tests = Vec::new(); ++ { ++ let mut add_test = |name, run| { ++ tests.push(test::TestDescAndFn { ++ desc: test::TestDesc::new(test::DynTestName(name)), ++ testfn: run, ++ }) ++ }; ++ punycode::collect_tests(&mut add_test); ++ uts46::collect_tests(&mut add_test); ++ } ++ test::test_main(&std::env::args().collect::>(), tests) ++} diff --cc vendor/idna-0.1.4/tests/unit.rs index 000000000,000000000..a7d158d5c new file mode 100644 --- /dev/null +++ b/vendor/idna-0.1.4/tests/unit.rs @@@ -1,0 -1,0 +1,40 @@@ ++extern crate idna; ++extern crate unicode_normalization; ++ ++use idna::uts46; ++use unicode_normalization::char::is_combining_mark; ++ ++ ++fn _to_ascii(domain: &str) -> Result { ++ uts46::to_ascii(domain, uts46::Flags { ++ transitional_processing: false, ++ use_std3_ascii_rules: true, ++ verify_dns_length: true, ++ }) ++} ++ ++#[test] ++fn test_v5() { ++ // IdnaTest:784 蔏。𑰺 ++ assert!(is_combining_mark('\u{11C3A}')); ++ assert!(_to_ascii("\u{11C3A}").is_err()); ++ assert!(_to_ascii("\u{850f}.\u{11C3A}").is_err()); ++ assert!(_to_ascii("\u{850f}\u{ff61}\u{11C3A}").is_err()); ++} ++ ++#[test] ++fn test_v8_bidi_rules() { ++ assert_eq!(_to_ascii("abc").unwrap(), "abc"); ++ assert_eq!(_to_ascii("123").unwrap(), "123"); ++ assert_eq!(_to_ascii("אבּג").unwrap(), "xn--kdb3bdf"); ++ assert_eq!(_to_ascii("ابج").unwrap(), "xn--mgbcm"); ++ assert_eq!(_to_ascii("abc.ابج").unwrap(), "abc.xn--mgbcm"); ++ assert_eq!(_to_ascii("אבּג.ابج").unwrap(), "xn--kdb3bdf.xn--mgbcm"); ++ ++ // Bidi domain names cannot start with digits ++ assert!(_to_ascii("0a.\u{05D0}").is_err()); ++ assert!(_to_ascii("0à.\u{05D0}").is_err()); ++ ++ // Bidi chars may be punycode-encoded ++ assert!(_to_ascii("xn--0ca24w").is_err()); ++} diff --cc vendor/idna-0.1.4/tests/uts46.rs index 000000000,000000000..ddc8af989 new file mode 100644 --- /dev/null +++ b/vendor/idna-0.1.4/tests/uts46.rs @@@ -1,0 -1,0 +1,124 @@@ ++// Copyright 2013-2014 The rust-url developers. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++use std::char; ++use idna::uts46; ++use test::TestFn; ++ ++pub fn collect_tests(add_test: &mut F) { ++ // http://www.unicode.org/Public/idna/latest/IdnaTest.txt ++ for (i, line) in include_str!("IdnaTest.txt").lines().enumerate() { ++ if line == "" || line.starts_with("#") { ++ continue ++ } ++ // Remove comments ++ let mut line = match line.find("#") { ++ Some(index) => &line[0..index], ++ None => line ++ }; ++ ++ let mut expected_failure = false; ++ if line.starts_with("XFAIL") { ++ expected_failure = true; ++ line = &line[5..line.len()]; ++ }; ++ ++ let mut pieces = line.split(';').map(|x| x.trim()).collect::>(); ++ ++ let test_type = pieces.remove(0); ++ let original = pieces.remove(0); ++ let source = unescape(original); ++ let to_unicode = pieces.remove(0); ++ let to_ascii = pieces.remove(0); ++ let nv8 = if pieces.len() > 0 { pieces.remove(0) } else { "" }; ++ ++ if expected_failure { ++ continue; ++ } ++ ++ let test_name = format!("UTS #46 line {}", i + 1); ++ add_test(test_name, TestFn::dyn_test_fn(move || { ++ let result = uts46::to_ascii(&source, uts46::Flags { ++ use_std3_ascii_rules: true, ++ transitional_processing: test_type == "T", ++ verify_dns_length: true, ++ }); ++ ++ if to_ascii.starts_with("[") { ++ if to_ascii.starts_with("[C") { ++ // http://unicode.org/reports/tr46/#Deviations ++ // applications that perform IDNA2008 lookup are not required to check ++ // for these contexts ++ return; ++ } ++ if to_ascii == "[V2]" { ++ // Everybody ignores V2 ++ // https://github.com/servo/rust-url/pull/240 ++ // https://github.com/whatwg/url/issues/53#issuecomment-181528158 ++ // http://www.unicode.org/review/pri317/ ++ return; ++ } ++ let res = result.ok(); ++ assert!(res == None, "Expected error. result: {} | original: {} | source: {}", ++ res.unwrap(), original, source); ++ return; ++ } ++ ++ let to_ascii = if to_ascii.len() > 0 { ++ to_ascii.to_string() ++ } else { ++ if to_unicode.len() > 0 { ++ to_unicode.to_string() ++ } else { ++ source.clone() ++ } ++ }; ++ ++ if nv8 == "NV8" { ++ // This result isn't valid under IDNA2008. Skip it ++ return; ++ } ++ ++ assert!(result.is_ok(), "Couldn't parse {} | original: {} | error: {:?}", ++ source, original, result.err()); ++ let output = result.ok().unwrap(); ++ assert!(output == to_ascii, "result: {} | expected: {} | original: {} | source: {}", ++ output, to_ascii, original, source); ++ })) ++ } ++} ++ ++fn unescape(input: &str) -> String { ++ let mut output = String::new(); ++ let mut chars = input.chars(); ++ loop { ++ match chars.next() { ++ None => return output, ++ Some(c) => ++ if c == '\\' { ++ match chars.next().unwrap() { ++ '\\' => output.push('\\'), ++ 'u' => { ++ let c1 = chars.next().unwrap().to_digit(16).unwrap(); ++ let c2 = chars.next().unwrap().to_digit(16).unwrap(); ++ let c3 = chars.next().unwrap().to_digit(16).unwrap(); ++ let c4 = chars.next().unwrap().to_digit(16).unwrap(); ++ match char::from_u32((((c1 * 16 + c2) * 16 + c3) * 16 + c4)) ++ { ++ Some(c) => output.push(c), ++ None => { output.push_str(&format!("\\u{:X}{:X}{:X}{:X}",c1,c2,c3,c4)); } ++ }; ++ } ++ _ => panic!("Invalid test data input"), ++ } ++ } else { ++ output.push(c); ++ } ++ } ++ } ++} diff --cc vendor/ignore-0.2.2/.cargo-checksum.json index 000000000,000000000..e5f0dcf15 new file mode 100644 --- /dev/null +++ b/vendor/ignore-0.2.2/.cargo-checksum.json @@@ -1,0 -1,0 +1,1 @@@ ++{"files":{},"package":"b3fcaf2365eb14b28ec7603c98c06cc531f19de9eb283d89a3dff8417c8c99f5"} diff --cc vendor/ignore-0.2.2/.cargo-ok index 000000000,000000000..e69de29bb new file mode 100644 --- /dev/null +++ b/vendor/ignore-0.2.2/.cargo-ok diff --cc vendor/ignore-0.2.2/COPYING index 000000000,000000000..bb9c20a09 new file mode 100644 --- /dev/null +++ b/vendor/ignore-0.2.2/COPYING @@@ -1,0 -1,0 +1,3 @@@ ++This project is dual-licensed under the Unlicense and MIT licenses. ++ ++You may use this code under the terms of either license. diff --cc vendor/ignore-0.2.2/Cargo.toml index 000000000,000000000..a866aebc0 new file mode 100644 --- /dev/null +++ b/vendor/ignore-0.2.2/Cargo.toml @@@ -1,0 -1,0 +1,57 @@@ ++# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO ++# ++# When uploading crates to the registry Cargo will automatically ++# "normalize" Cargo.toml files for maximal compatibility ++# with all versions of Cargo and also rewrite `path` dependencies ++# to registry (e.g. crates.io) dependencies ++# ++# If you believe there's an error in this file please file an ++# issue against the rust-lang/cargo repository. If you're ++# editing this file be aware that the upstream Cargo.toml ++# will likely look very different (and much more reasonable) ++ ++[package] ++name = "ignore" ++version = "0.2.2" ++authors = ["Andrew Gallant "] ++description = "A fast library for efficiently matching ignore files such as `.gitignore`\nagainst file paths.\n" ++homepage = "https://github.com/BurntSushi/ripgrep/tree/master/ignore" ++documentation = "https://docs.rs/ignore" ++readme = "README.md" ++keywords = ["glob", "ignore", "gitignore", "pattern", "file"] ++license = "Unlicense/MIT" ++repository = "https://github.com/BurntSushi/ripgrep/tree/master/ignore" ++[profile.release] ++debug = true ++ ++[lib] ++name = "ignore" ++bench = false ++[dependencies.memchr] ++version = "1" ++ ++[dependencies.walkdir] ++version = "1.0.7" ++ ++[dependencies.crossbeam] ++version = "0.2" ++ ++[dependencies.log] ++version = "0.3" ++ ++[dependencies.regex] ++version = "0.2.1" ++ ++[dependencies.thread_local] ++version = "0.3.2" ++ ++[dependencies.globset] ++version = "0.2.0" ++ ++[dependencies.lazy_static] ++version = "0.2" ++[dev-dependencies.tempdir] ++version = "0.3.5" ++ ++[features] ++simd-accel = ["globset/simd-accel"] diff --cc vendor/ignore-0.2.2/LICENSE-MIT index 000000000,000000000..3b0a5dc09 new file mode 100644 --- /dev/null +++ b/vendor/ignore-0.2.2/LICENSE-MIT @@@ -1,0 -1,0 +1,21 @@@ ++The MIT License (MIT) ++ ++Copyright (c) 2015 Andrew Gallant ++ ++Permission is hereby granted, free of charge, to any person obtaining a copy ++of this software and associated documentation files (the "Software"), to deal ++in the Software without restriction, including without limitation the rights ++to use, copy, modify, merge, publish, distribute, sublicense, and/or sell ++copies of the Software, and to permit persons to whom the Software is ++furnished to do so, subject to the following conditions: ++ ++The above copyright notice and this permission notice shall be included in ++all copies or substantial portions of the Software. ++ ++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR ++IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, ++FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE ++AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER ++LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, ++OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN ++THE SOFTWARE. diff --cc vendor/ignore-0.2.2/README.md index 000000000,000000000..6759b2d68 new file mode 100644 --- /dev/null +++ b/vendor/ignore-0.2.2/README.md @@@ -1,0 -1,0 +1,66 @@@ ++ignore ++====== ++The ignore crate provides a fast recursive directory iterator that respects ++various filters such as globs, file types and `.gitignore` files. This crate ++also provides lower level direct access to gitignore and file type matchers. ++ ++[![Linux build status](https://api.travis-ci.org/BurntSushi/ripgrep.png)](https://travis-ci.org/BurntSushi/ripgrep) ++[![Windows build status](https://ci.appveyor.com/api/projects/status/github/BurntSushi/ripgrep?svg=true)](https://ci.appveyor.com/project/BurntSushi/ripgrep) ++[![](https://img.shields.io/crates/v/ignore.svg)](https://crates.io/crates/ignore) ++ ++Dual-licensed under MIT or the [UNLICENSE](http://unlicense.org). ++ ++### Documentation ++ ++[https://docs.rs/ignore](https://docs.rs/ignore) ++ ++### Usage ++ ++Add this to your `Cargo.toml`: ++ ++```toml ++[dependencies] ++ignore = "0.2" ++``` ++ ++and this to your crate root: ++ ++```rust ++extern crate ignore; ++``` ++ ++### Example ++ ++This example shows the most basic usage of this crate. This code will ++recursively traverse the current directory while automatically filtering out ++files and directories according to ignore globs found in files like ++`.ignore` and `.gitignore`: ++ ++ ++```rust,no_run ++use ignore::Walk; ++ ++for result in Walk::new("./") { ++ // Each item yielded by the iterator is either a directory entry or an ++ // error, so either print the path or the error. ++ match result { ++ Ok(entry) => println!("{}", entry.path().display()), ++ Err(err) => println!("ERROR: {}", err), ++ } ++} ++``` ++ ++### Example: advanced ++ ++By default, the recursive directory iterator will ignore hidden files and ++directories. This can be disabled by building the iterator with `WalkBuilder`: ++ ++```rust,no_run ++use ignore::WalkBuilder; ++ ++for result in WalkBuilder::new("./").hidden(false).build() { ++ println!("{:?}", result); ++} ++``` ++ ++See the documentation for `WalkBuilder` for many other options. diff --cc vendor/ignore-0.2.2/UNLICENSE index 000000000,000000000..68a49daad new file mode 100644 --- /dev/null +++ b/vendor/ignore-0.2.2/UNLICENSE @@@ -1,0 -1,0 +1,24 @@@ ++This is free and unencumbered software released into the public domain. ++ ++Anyone is free to copy, modify, publish, use, compile, sell, or ++distribute this software, either in source code form or as a compiled ++binary, for any purpose, commercial or non-commercial, and by any ++means. ++ ++In jurisdictions that recognize copyright laws, the author or authors ++of this software dedicate any and all copyright interest in the ++software to the public domain. We make this dedication for the benefit ++of the public at large and to the detriment of our heirs and ++successors. We intend this dedication to be an overt act of ++relinquishment in perpetuity of all present and future rights to this ++software under copyright law. ++ ++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, ++EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF ++MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. ++IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR ++OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ++ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR ++OTHER DEALINGS IN THE SOFTWARE. ++ ++For more information, please refer to diff --cc vendor/ignore-0.2.2/examples/walk.rs index 000000000,000000000..0ff4ea94e new file mode 100644 --- /dev/null +++ b/vendor/ignore-0.2.2/examples/walk.rs @@@ -1,0 -1,0 +1,92 @@@ ++#![allow(dead_code, unused_imports, unused_mut, unused_variables)] ++ ++extern crate crossbeam; ++extern crate ignore; ++extern crate walkdir; ++ ++use std::env; ++use std::io::{self, Write}; ++use std::path::Path; ++use std::sync::Arc; ++use std::sync::atomic::{AtomicUsize, Ordering}; ++use std::thread; ++ ++use crossbeam::sync::MsQueue; ++use ignore::WalkBuilder; ++use walkdir::WalkDir; ++ ++fn main() { ++ let mut path = env::args().nth(1).unwrap(); ++ let mut parallel = false; ++ let mut simple = false; ++ let queue: Arc>> = Arc::new(MsQueue::new()); ++ if path == "parallel" { ++ path = env::args().nth(2).unwrap(); ++ parallel = true; ++ } else if path == "walkdir" { ++ path = env::args().nth(2).unwrap(); ++ simple = true; ++ } ++ ++ let stdout_queue = queue.clone(); ++ let stdout_thread = thread::spawn(move || { ++ let mut stdout = io::BufWriter::new(io::stdout()); ++ while let Some(dent) = stdout_queue.pop() { ++ write_path(&mut stdout, dent.path()); ++ } ++ }); ++ ++ if parallel { ++ let walker = WalkBuilder::new(path).threads(6).build_parallel(); ++ walker.run(|| { ++ let queue = queue.clone(); ++ Box::new(move |result| { ++ use ignore::WalkState::*; ++ ++ queue.push(Some(DirEntry::Y(result.unwrap()))); ++ Continue ++ }) ++ }); ++ } else if simple { ++ let mut stdout = io::BufWriter::new(io::stdout()); ++ let walker = WalkDir::new(path); ++ for result in walker { ++ queue.push(Some(DirEntry::X(result.unwrap()))); ++ } ++ } else { ++ let mut stdout = io::BufWriter::new(io::stdout()); ++ let walker = WalkBuilder::new(path).build(); ++ for result in walker { ++ queue.push(Some(DirEntry::Y(result.unwrap()))); ++ } ++ } ++ queue.push(None); ++ stdout_thread.join().unwrap(); ++} ++ ++enum DirEntry { ++ X(walkdir::DirEntry), ++ Y(ignore::DirEntry), ++} ++ ++impl DirEntry { ++ fn path(&self) -> &Path { ++ match *self { ++ DirEntry::X(ref x) => x.path(), ++ DirEntry::Y(ref y) => y.path(), ++ } ++ } ++} ++ ++#[cfg(unix)] ++fn write_path(mut wtr: W, path: &Path) { ++ use std::os::unix::ffi::OsStrExt; ++ wtr.write(path.as_os_str().as_bytes()).unwrap(); ++ wtr.write(b"\n").unwrap(); ++} ++ ++#[cfg(not(unix))] ++fn write_path(mut wtr: W, path: &Path) { ++ wtr.write(path.to_string_lossy().as_bytes()).unwrap(); ++ wtr.write(b"\n").unwrap(); ++} diff --cc vendor/ignore-0.2.2/src/dir.rs index 000000000,000000000..95c718482 new file mode 100644 --- /dev/null +++ b/vendor/ignore-0.2.2/src/dir.rs @@@ -1,0 -1,0 +1,800 @@@ ++// This module provides a data structure, `Ignore`, that connects "directory ++// traversal" with "ignore matchers." Specifically, it knows about gitignore ++// semantics and precedence, and is organized based on directory hierarchy. ++// Namely, every matcher logically corresponds to ignore rules from a single ++// directory, and points to the matcher for its corresponding parent directory. ++// In this sense, `Ignore` is a *persistent* data structure. ++// ++// This design was specifically chosen to make it possible to use this data ++// structure in a parallel directory iterator. ++// ++// My initial intention was to expose this module as part of this crate's ++// public API, but I think the data structure's public API is too complicated ++// with non-obvious failure modes. Alas, such things haven't been documented ++// well. ++ ++use std::collections::HashMap; ++use std::ffi::OsString; ++use std::path::{Path, PathBuf}; ++use std::sync::{Arc, RwLock}; ++ ++use gitignore::{self, Gitignore, GitignoreBuilder}; ++use pathutil::{is_hidden, strip_prefix}; ++use overrides::{self, Override}; ++use types::{self, Types}; ++use {Error, Match, PartialErrorBuilder}; ++ ++/// IgnoreMatch represents information about where a match came from when using ++/// the `Ignore` matcher. ++#[derive(Clone, Debug)] ++pub struct IgnoreMatch<'a>(IgnoreMatchInner<'a>); ++ ++/// IgnoreMatchInner describes precisely where the match information came from. ++/// This is private to allow expansion to more matchers in the future. ++#[derive(Clone, Debug)] ++enum IgnoreMatchInner<'a> { ++ Override(overrides::Glob<'a>), ++ Gitignore(&'a gitignore::Glob), ++ Types(types::Glob<'a>), ++ Hidden, ++} ++ ++impl<'a> IgnoreMatch<'a> { ++ fn overrides(x: overrides::Glob<'a>) -> IgnoreMatch<'a> { ++ IgnoreMatch(IgnoreMatchInner::Override(x)) ++ } ++ ++ fn gitignore(x: &'a gitignore::Glob) -> IgnoreMatch<'a> { ++ IgnoreMatch(IgnoreMatchInner::Gitignore(x)) ++ } ++ ++ fn types(x: types::Glob<'a>) -> IgnoreMatch<'a> { ++ IgnoreMatch(IgnoreMatchInner::Types(x)) ++ } ++ ++ fn hidden() -> IgnoreMatch<'static> { ++ IgnoreMatch(IgnoreMatchInner::Hidden) ++ } ++} ++ ++/// Options for the ignore matcher, shared between the matcher itself and the ++/// builder. ++#[derive(Clone, Copy, Debug)] ++struct IgnoreOptions { ++ /// Whether to ignore hidden file paths or not. ++ hidden: bool, ++ /// Whether to read .ignore files. ++ ignore: bool, ++ /// Whether to read git's global gitignore file. ++ git_global: bool, ++ /// Whether to read .gitignore files. ++ git_ignore: bool, ++ /// Whether to read .git/info/exclude files. ++ git_exclude: bool, ++} ++ ++impl IgnoreOptions { ++ /// Returns true if at least one type of ignore rules should be matched. ++ fn has_any_ignore_options(&self) -> bool { ++ self.ignore || self.git_global || self.git_ignore || self.git_exclude ++ } ++} ++ ++/// Ignore is a matcher useful for recursively walking one or more directories. ++#[derive(Clone, Debug)] ++pub struct Ignore(Arc); ++ ++#[derive(Clone, Debug)] ++struct IgnoreInner { ++ /// A map of all existing directories that have already been ++ /// compiled into matchers. ++ /// ++ /// Note that this is never used during matching, only when adding new ++ /// parent directory matchers. This avoids needing to rebuild glob sets for ++ /// parent directories if many paths are being searched. ++ compiled: Arc>>, ++ /// The path to the directory that this matcher was built from. ++ dir: PathBuf, ++ /// An override matcher (default is empty). ++ overrides: Arc, ++ /// A file type matcher. ++ types: Arc, ++ /// The parent directory to match next. ++ /// ++ /// If this is the root directory or there are otherwise no more ++ /// directories to match, then `parent` is `None`. ++ parent: Option, ++ /// Whether this is an absolute parent matcher, as added by add_parent. ++ is_absolute_parent: bool, ++ /// The absolute base path of this matcher. Populated only if parent ++ /// directories are added. ++ absolute_base: Option>, ++ /// Explicit ignore matchers specified by the caller. ++ explicit_ignores: Arc>, ++ /// The matcher for .ignore files. ++ ignore_matcher: Gitignore, ++ /// A global gitignore matcher, usually from $XDG_CONFIG_HOME/git/ignore. ++ git_global_matcher: Arc, ++ /// The matcher for .gitignore files. ++ git_ignore_matcher: Gitignore, ++ /// Special matcher for `.git/info/exclude` files. ++ git_exclude_matcher: Gitignore, ++ /// Whether this directory contains a .git sub-directory. ++ has_git: bool, ++ /// Ignore config. ++ opts: IgnoreOptions, ++} ++ ++impl Ignore { ++ /// Return the directory path of this matcher. ++ #[allow(dead_code)] ++ pub fn path(&self) -> &Path { ++ &self.0.dir ++ } ++ ++ /// Return true if this matcher has no parent. ++ pub fn is_root(&self) -> bool { ++ self.0.parent.is_none() ++ } ++ ++ /// Returns true if this matcher was added via the `add_parents` method. ++ pub fn is_absolute_parent(&self) -> bool { ++ self.0.is_absolute_parent ++ } ++ ++ /// Return this matcher's parent, if one exists. ++ pub fn parent(&self) -> Option { ++ self.0.parent.clone() ++ } ++ ++ /// Create a new `Ignore` matcher with the parent directories of `dir`. ++ /// ++ /// Note that this can only be called on an `Ignore` matcher with no ++ /// parents (i.e., `is_root` returns `true`). This will panic otherwise. ++ pub fn add_parents>( ++ &self, ++ path: P, ++ ) -> (Ignore, Option) { ++ if !self.is_root() { ++ panic!("Ignore::add_parents called on non-root matcher"); ++ } ++ let absolute_base = match path.as_ref().canonicalize() { ++ Ok(path) => Arc::new(path), ++ Err(_) => { ++ // There's not much we can do here, so just return our ++ // existing matcher. We drop the error to be consistent ++ // with our general pattern of ignoring I/O errors when ++ // processing ignore files. ++ return (self.clone(), None); ++ } ++ }; ++ // List of parents, from child to root. ++ let mut parents = vec![]; ++ let mut path = &**absolute_base; ++ while let Some(parent) = path.parent() { ++ parents.push(parent); ++ path = parent; ++ } ++ let mut errs = PartialErrorBuilder::default(); ++ let mut ig = self.clone(); ++ for parent in parents.into_iter().rev() { ++ let mut compiled = self.0.compiled.write().unwrap(); ++ if let Some(prebuilt) = compiled.get(parent.as_os_str()) { ++ ig = prebuilt.clone(); ++ continue; ++ } ++ let (mut igtmp, err) = ig.add_child_path(parent); ++ errs.maybe_push(err); ++ igtmp.is_absolute_parent = true; ++ igtmp.absolute_base = Some(absolute_base.clone()); ++ ig = Ignore(Arc::new(igtmp)); ++ compiled.insert(parent.as_os_str().to_os_string(), ig.clone()); ++ } ++ (ig, errs.into_error_option()) ++ } ++ ++ /// Create a new `Ignore` matcher for the given child directory. ++ /// ++ /// Since building the matcher may require reading from multiple ++ /// files, it's possible that this method partially succeeds. Therefore, ++ /// a matcher is always returned (which may match nothing) and an error is ++ /// returned if it exists. ++ /// ++ /// Note that all I/O errors are completely ignored. ++ pub fn add_child>( ++ &self, ++ dir: P, ++ ) -> (Ignore, Option) { ++ let (ig, err) = self.add_child_path(dir.as_ref()); ++ (Ignore(Arc::new(ig)), err) ++ } ++ ++ /// Like add_child, but takes a full path and returns an IgnoreInner. ++ fn add_child_path(&self, dir: &Path) -> (IgnoreInner, Option) { ++ static IG_NAMES: &'static [&'static str] = &[".rgignore", ".ignore"]; ++ ++ let mut errs = PartialErrorBuilder::default(); ++ let ig_matcher = ++ if !self.0.opts.ignore { ++ Gitignore::empty() ++ } else { ++ let (m, err) = create_gitignore(&dir, IG_NAMES); ++ errs.maybe_push(err); ++ m ++ }; ++ let gi_matcher = ++ if !self.0.opts.git_ignore { ++ Gitignore::empty() ++ } else { ++ let (m, err) = create_gitignore(&dir, &[".gitignore"]); ++ errs.maybe_push(err); ++ m ++ }; ++ let gi_exclude_matcher = ++ if !self.0.opts.git_exclude { ++ Gitignore::empty() ++ } else { ++ let (m, err) = create_gitignore(&dir, &[".git/info/exclude"]); ++ errs.maybe_push(err); ++ m ++ }; ++ let ig = IgnoreInner { ++ compiled: self.0.compiled.clone(), ++ dir: dir.to_path_buf(), ++ overrides: self.0.overrides.clone(), ++ types: self.0.types.clone(), ++ parent: Some(self.clone()), ++ is_absolute_parent: false, ++ absolute_base: self.0.absolute_base.clone(), ++ explicit_ignores: self.0.explicit_ignores.clone(), ++ ignore_matcher: ig_matcher, ++ git_global_matcher: self.0.git_global_matcher.clone(), ++ git_ignore_matcher: gi_matcher, ++ git_exclude_matcher: gi_exclude_matcher, ++ has_git: dir.join(".git").is_dir(), ++ opts: self.0.opts, ++ }; ++ (ig, errs.into_error_option()) ++ } ++ ++ /// Returns a match indicating whether the given file path should be ++ /// ignored or not. ++ /// ++ /// The match contains information about its origin. ++ pub fn matched<'a, P: AsRef>( ++ &'a self, ++ path: P, ++ is_dir: bool, ++ ) -> Match> { ++ // We need to be careful with our path. If it has a leading ./, then ++ // strip it because it causes nothing but trouble. ++ let mut path = path.as_ref(); ++ if let Some(p) = strip_prefix("./", path) { ++ path = p; ++ } ++ // Match against the override patterns. If an override matches ++ // regardless of whether it's whitelist/ignore, then we quit and ++ // return that result immediately. Overrides have the highest ++ // precedence. ++ if !self.0.overrides.is_empty() { ++ let mat = ++ self.0.overrides.matched(path, is_dir) ++ .map(IgnoreMatch::overrides); ++ if !mat.is_none() { ++ return mat; ++ } ++ } ++ let mut whitelisted = Match::None; ++ if self.0.opts.has_any_ignore_options() { ++ let mat = self.matched_ignore(path, is_dir); ++ if mat.is_ignore() { ++ return mat; ++ } else if mat.is_whitelist() { ++ whitelisted = mat; ++ } ++ } ++ if !self.0.types.is_empty() { ++ let mat = ++ self.0.types.matched(path, is_dir).map(IgnoreMatch::types); ++ if mat.is_ignore() { ++ return mat; ++ } else if mat.is_whitelist() { ++ whitelisted = mat; ++ } ++ } ++ if whitelisted.is_none() && self.0.opts.hidden && is_hidden(path) { ++ return Match::Ignore(IgnoreMatch::hidden()); ++ } ++ whitelisted ++ } ++ ++ /// Performs matching only on the ignore files for this directory and ++ /// all parent directories. ++ fn matched_ignore<'a>( ++ &'a self, ++ path: &Path, ++ is_dir: bool, ++ ) -> Match> { ++ let (mut m_ignore, mut m_gi, mut m_gi_exclude, mut m_explicit) = ++ (Match::None, Match::None, Match::None, Match::None); ++ let mut saw_git = false; ++ for ig in self.parents().take_while(|ig| !ig.0.is_absolute_parent) { ++ if m_ignore.is_none() { ++ m_ignore = ++ ig.0.ignore_matcher.matched(path, is_dir) ++ .map(IgnoreMatch::gitignore); ++ } ++ if !saw_git && m_gi.is_none() { ++ m_gi = ++ ig.0.git_ignore_matcher.matched(path, is_dir) ++ .map(IgnoreMatch::gitignore); ++ } ++ if !saw_git && m_gi_exclude.is_none() { ++ m_gi_exclude = ++ ig.0.git_exclude_matcher.matched(path, is_dir) ++ .map(IgnoreMatch::gitignore); ++ } ++ saw_git = saw_git || ig.0.has_git; ++ } ++ if let Some(abs_parent_path) = self.absolute_base() { ++ let path = abs_parent_path.join(path); ++ for ig in self.parents().skip_while(|ig|!ig.0.is_absolute_parent) { ++ if m_ignore.is_none() { ++ m_ignore = ++ ig.0.ignore_matcher.matched(&path, is_dir) ++ .map(IgnoreMatch::gitignore); ++ } ++ if !saw_git && m_gi.is_none() { ++ m_gi = ++ ig.0.git_ignore_matcher.matched(&path, is_dir) ++ .map(IgnoreMatch::gitignore); ++ } ++ if !saw_git && m_gi_exclude.is_none() { ++ m_gi_exclude = ++ ig.0.git_exclude_matcher.matched(&path, is_dir) ++ .map(IgnoreMatch::gitignore); ++ } ++ saw_git = saw_git || ig.0.has_git; ++ } ++ } ++ for gi in self.0.explicit_ignores.iter().rev() { ++ if !m_explicit.is_none() { ++ break; ++ } ++ m_explicit = gi.matched(&path, is_dir).map(IgnoreMatch::gitignore); ++ } ++ let m_global = self.0.git_global_matcher.matched(&path, is_dir) ++ .map(IgnoreMatch::gitignore); ++ ++ m_ignore.or(m_gi).or(m_gi_exclude).or(m_global).or(m_explicit) ++ } ++ ++ /// Returns an iterator over parent ignore matchers, including this one. ++ pub fn parents(&self) -> Parents { ++ Parents(Some(self)) ++ } ++ ++ /// Returns the first absolute path of the first absolute parent, if ++ /// one exists. ++ fn absolute_base(&self) -> Option<&Path> { ++ self.0.absolute_base.as_ref().map(|p| &***p) ++ } ++} ++ ++/// An iterator over all parents of an ignore matcher, including itself. ++/// ++/// The lifetime `'a` refers to the lifetime of the initial `Ignore` matcher. ++pub struct Parents<'a>(Option<&'a Ignore>); ++ ++impl<'a> Iterator for Parents<'a> { ++ type Item = &'a Ignore; ++ ++ fn next(&mut self) -> Option<&'a Ignore> { ++ match self.0.take() { ++ None => None, ++ Some(ig) => { ++ self.0 = ig.0.parent.as_ref(); ++ Some(ig) ++ } ++ } ++ } ++} ++ ++/// A builder for creating an Ignore matcher. ++#[derive(Clone, Debug)] ++pub struct IgnoreBuilder { ++ /// The root directory path for this ignore matcher. ++ dir: PathBuf, ++ /// An override matcher (default is empty). ++ overrides: Arc, ++ /// A type matcher (default is empty). ++ types: Arc, ++ /// Explicit ignore matchers. ++ explicit_ignores: Vec, ++ /// Ignore config. ++ opts: IgnoreOptions, ++} ++ ++impl IgnoreBuilder { ++ /// Create a new builder for an `Ignore` matcher. ++ /// ++ /// All relative file paths are resolved with respect to the current ++ /// working directory. ++ pub fn new() -> IgnoreBuilder { ++ IgnoreBuilder { ++ dir: Path::new("").to_path_buf(), ++ overrides: Arc::new(Override::empty()), ++ types: Arc::new(Types::empty()), ++ explicit_ignores: vec![], ++ opts: IgnoreOptions { ++ hidden: true, ++ ignore: true, ++ git_global: true, ++ git_ignore: true, ++ git_exclude: true, ++ }, ++ } ++ } ++ ++ /// Builds a new `Ignore` matcher. ++ /// ++ /// The matcher returned won't match anything until ignore rules from ++ /// directories are added to it. ++ pub fn build(&self) -> Ignore { ++ let git_global_matcher = ++ if !self.opts.git_global { ++ Gitignore::empty() ++ } else { ++ let (gi, err) = Gitignore::global(); ++ if let Some(err) = err { ++ debug!("{}", err); ++ } ++ gi ++ }; ++ Ignore(Arc::new(IgnoreInner { ++ compiled: Arc::new(RwLock::new(HashMap::new())), ++ dir: self.dir.clone(), ++ overrides: self.overrides.clone(), ++ types: self.types.clone(), ++ parent: None, ++ is_absolute_parent: true, ++ absolute_base: None, ++ explicit_ignores: Arc::new(self.explicit_ignores.clone()), ++ ignore_matcher: Gitignore::empty(), ++ git_global_matcher: Arc::new(git_global_matcher), ++ git_ignore_matcher: Gitignore::empty(), ++ git_exclude_matcher: Gitignore::empty(), ++ has_git: false, ++ opts: self.opts, ++ })) ++ } ++ ++ /// Add an override matcher. ++ /// ++ /// By default, no override matcher is used. ++ /// ++ /// This overrides any previous setting. ++ pub fn overrides(&mut self, overrides: Override) -> &mut IgnoreBuilder { ++ self.overrides = Arc::new(overrides); ++ self ++ } ++ ++ /// Add a file type matcher. ++ /// ++ /// By default, no file type matcher is used. ++ /// ++ /// This overrides any previous setting. ++ pub fn types(&mut self, types: Types) -> &mut IgnoreBuilder { ++ self.types = Arc::new(types); ++ self ++ } ++ ++ /// Adds a new global ignore matcher from the ignore file path given. ++ pub fn add_ignore(&mut self, ig: Gitignore) -> &mut IgnoreBuilder { ++ self.explicit_ignores.push(ig); ++ self ++ } ++ ++ /// Enables ignoring hidden files. ++ /// ++ /// This is enabled by default. ++ pub fn hidden(&mut self, yes: bool) -> &mut IgnoreBuilder { ++ self.opts.hidden = yes; ++ self ++ } ++ ++ /// Enables reading `.ignore` files. ++ /// ++ /// `.ignore` files have the same semantics as `gitignore` files and are ++ /// supported by search tools such as ripgrep and The Silver Searcher. ++ /// ++ /// This is enabled by default. ++ pub fn ignore(&mut self, yes: bool) -> &mut IgnoreBuilder { ++ self.opts.ignore = yes; ++ self ++ } ++ ++ /// Add a global gitignore matcher. ++ /// ++ /// Its precedence is lower than both normal `.gitignore` files and ++ /// `.git/info/exclude` files. ++ /// ++ /// This overwrites any previous global gitignore setting. ++ /// ++ /// This is enabled by default. ++ pub fn git_global(&mut self, yes: bool) -> &mut IgnoreBuilder { ++ self.opts.git_global = yes; ++ self ++ } ++ ++ /// Enables reading `.gitignore` files. ++ /// ++ /// `.gitignore` files have match semantics as described in the `gitignore` ++ /// man page. ++ /// ++ /// This is enabled by default. ++ pub fn git_ignore(&mut self, yes: bool) -> &mut IgnoreBuilder { ++ self.opts.git_ignore = yes; ++ self ++ } ++ ++ /// Enables reading `.git/info/exclude` files. ++ /// ++ /// `.git/info/exclude` files have match semantics as described in the ++ /// `gitignore` man page. ++ /// ++ /// This is enabled by default. ++ pub fn git_exclude(&mut self, yes: bool) -> &mut IgnoreBuilder { ++ self.opts.git_exclude = yes; ++ self ++ } ++} ++ ++/// Creates a new gitignore matcher for the directory given. ++/// ++/// Ignore globs are extracted from each of the file names in `dir` in the ++/// order given (earlier names have lower precedence than later names). ++/// ++/// I/O errors are ignored. ++pub fn create_gitignore( ++ dir: &Path, ++ names: &[&str], ++) -> (Gitignore, Option) { ++ let mut builder = GitignoreBuilder::new(dir); ++ let mut errs = PartialErrorBuilder::default(); ++ for name in names { ++ let gipath = dir.join(name); ++ errs.maybe_push_ignore_io(builder.add(gipath)); ++ } ++ let gi = match builder.build() { ++ Ok(gi) => gi, ++ Err(err) => { ++ errs.push(err); ++ GitignoreBuilder::new(dir).build().unwrap() ++ } ++ }; ++ (gi, errs.into_error_option()) ++} ++ ++#[cfg(test)] ++mod tests { ++ use std::fs::{self, File}; ++ use std::io::Write; ++ use std::path::Path; ++ ++ use tempdir::TempDir; ++ ++ use dir::IgnoreBuilder; ++ use gitignore::Gitignore; ++ use Error; ++ ++ fn wfile>(path: P, contents: &str) { ++ let mut file = File::create(path).unwrap(); ++ file.write_all(contents.as_bytes()).unwrap(); ++ } ++ ++ fn mkdirp>(path: P) { ++ fs::create_dir_all(path).unwrap(); ++ } ++ ++ fn partial(err: Error) -> Vec { ++ match err { ++ Error::Partial(errs) => errs, ++ _ => panic!("expected partial error but got {:?}", err), ++ } ++ } ++ ++ #[test] ++ fn explicit_ignore() { ++ let td = TempDir::new("ignore-test-").unwrap(); ++ wfile(td.path().join("not-an-ignore"), "foo\n!bar"); ++ ++ let (gi, err) = Gitignore::new(td.path().join("not-an-ignore")); ++ assert!(err.is_none()); ++ let (ig, err) = IgnoreBuilder::new() ++ .add_ignore(gi).build().add_child(td.path()); ++ assert!(err.is_none()); ++ assert!(ig.matched("foo", false).is_ignore()); ++ assert!(ig.matched("bar", false).is_whitelist()); ++ assert!(ig.matched("baz", false).is_none()); ++ } ++ ++ #[test] ++ fn git_exclude() { ++ let td = TempDir::new("ignore-test-").unwrap(); ++ mkdirp(td.path().join(".git/info")); ++ wfile(td.path().join(".git/info/exclude"), "foo\n!bar"); ++ ++ let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); ++ assert!(err.is_none()); ++ assert!(ig.matched("foo", false).is_ignore()); ++ assert!(ig.matched("bar", false).is_whitelist()); ++ assert!(ig.matched("baz", false).is_none()); ++ } ++ ++ #[test] ++ fn gitignore() { ++ let td = TempDir::new("ignore-test-").unwrap(); ++ wfile(td.path().join(".gitignore"), "foo\n!bar"); ++ ++ let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); ++ assert!(err.is_none()); ++ assert!(ig.matched("foo", false).is_ignore()); ++ assert!(ig.matched("bar", false).is_whitelist()); ++ assert!(ig.matched("baz", false).is_none()); ++ } ++ ++ #[test] ++ fn ignore() { ++ let td = TempDir::new("ignore-test-").unwrap(); ++ wfile(td.path().join(".ignore"), "foo\n!bar"); ++ ++ let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); ++ assert!(err.is_none()); ++ assert!(ig.matched("foo", false).is_ignore()); ++ assert!(ig.matched("bar", false).is_whitelist()); ++ assert!(ig.matched("baz", false).is_none()); ++ } ++ ++ // Tests that an .ignore will override a .gitignore. ++ #[test] ++ fn ignore_over_gitignore() { ++ let td = TempDir::new("ignore-test-").unwrap(); ++ wfile(td.path().join(".gitignore"), "foo"); ++ wfile(td.path().join(".ignore"), "!foo"); ++ ++ let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); ++ assert!(err.is_none()); ++ assert!(ig.matched("foo", false).is_whitelist()); ++ } ++ ++ // Tests that exclude has lower precedent than both .ignore and .gitignore. ++ #[test] ++ fn exclude_lowest() { ++ let td = TempDir::new("ignore-test-").unwrap(); ++ wfile(td.path().join(".gitignore"), "!foo"); ++ wfile(td.path().join(".ignore"), "!bar"); ++ mkdirp(td.path().join(".git/info")); ++ wfile(td.path().join(".git/info/exclude"), "foo\nbar\nbaz"); ++ ++ let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); ++ assert!(err.is_none()); ++ assert!(ig.matched("baz", false).is_ignore()); ++ assert!(ig.matched("foo", false).is_whitelist()); ++ assert!(ig.matched("bar", false).is_whitelist()); ++ } ++ ++ #[test] ++ fn errored() { ++ let td = TempDir::new("ignore-test-").unwrap(); ++ wfile(td.path().join(".gitignore"), "f**oo"); ++ ++ let (_, err) = IgnoreBuilder::new().build().add_child(td.path()); ++ assert!(err.is_some()); ++ } ++ ++ #[test] ++ fn errored_both() { ++ let td = TempDir::new("ignore-test-").unwrap(); ++ wfile(td.path().join(".gitignore"), "f**oo"); ++ wfile(td.path().join(".ignore"), "fo**o"); ++ ++ let (_, err) = IgnoreBuilder::new().build().add_child(td.path()); ++ assert_eq!(2, partial(err.expect("an error")).len()); ++ } ++ ++ #[test] ++ fn errored_partial() { ++ let td = TempDir::new("ignore-test-").unwrap(); ++ wfile(td.path().join(".gitignore"), "f**oo\nbar"); ++ ++ let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); ++ assert!(err.is_some()); ++ assert!(ig.matched("bar", false).is_ignore()); ++ } ++ ++ #[test] ++ fn errored_partial_and_ignore() { ++ let td = TempDir::new("ignore-test-").unwrap(); ++ wfile(td.path().join(".gitignore"), "f**oo\nbar"); ++ wfile(td.path().join(".ignore"), "!bar"); ++ ++ let (ig, err) = IgnoreBuilder::new().build().add_child(td.path()); ++ assert!(err.is_some()); ++ assert!(ig.matched("bar", false).is_whitelist()); ++ } ++ ++ #[test] ++ fn not_present_empty() { ++ let td = TempDir::new("ignore-test-").unwrap(); ++ ++ let (_, err) = IgnoreBuilder::new().build().add_child(td.path()); ++ assert!(err.is_none()); ++ } ++ ++ #[test] ++ fn stops_at_git_dir() { ++ // This tests that .gitignore files beyond a .git barrier aren't ++ // matched, but .ignore files are. ++ let td = TempDir::new("ignore-test-").unwrap(); ++ mkdirp(td.path().join(".git")); ++ mkdirp(td.path().join("foo/.git")); ++ wfile(td.path().join(".gitignore"), "foo"); ++ wfile(td.path().join(".ignore"), "bar"); ++ ++ let ig0 = IgnoreBuilder::new().build(); ++ let (ig1, err) = ig0.add_child(td.path()); ++ assert!(err.is_none()); ++ let (ig2, err) = ig1.add_child(ig1.path().join("foo")); ++ assert!(err.is_none()); ++ ++ assert!(ig1.matched("foo", false).is_ignore()); ++ assert!(ig2.matched("foo", false).is_none()); ++ ++ assert!(ig1.matched("bar", false).is_ignore()); ++ assert!(ig2.matched("bar", false).is_ignore()); ++ } ++ ++ #[test] ++ fn absolute_parent() { ++ let td = TempDir::new("ignore-test-").unwrap(); ++ mkdirp(td.path().join(".git")); ++ mkdirp(td.path().join("foo")); ++ wfile(td.path().join(".gitignore"), "bar"); ++ ++ // First, check that the parent gitignore file isn't detected if the ++ // parent isn't added. This establishes a baseline. ++ let ig0 = IgnoreBuilder::new().build(); ++ let (ig1, err) = ig0.add_child(td.path().join("foo")); ++ assert!(err.is_none()); ++ assert!(ig1.matched("bar", false).is_none()); ++ ++ // Second, check that adding a parent directory actually works. ++ let ig0 = IgnoreBuilder::new().build(); ++ let (ig1, err) = ig0.add_parents(td.path().join("foo")); ++ assert!(err.is_none()); ++ let (ig2, err) = ig1.add_child(td.path().join("foo")); ++ assert!(err.is_none()); ++ assert!(ig2.matched("bar", false).is_ignore()); ++ } ++ ++ #[test] ++ fn absolute_parent_anchored() { ++ let td = TempDir::new("ignore-test-").unwrap(); ++ mkdirp(td.path().join(".git")); ++ mkdirp(td.path().join("src/llvm")); ++ wfile(td.path().join(".gitignore"), "/llvm/\nfoo"); ++ ++ let ig0 = IgnoreBuilder::new().build(); ++ let (ig1, err) = ig0.add_parents(td.path().join("src")); ++ assert!(err.is_none()); ++ let (ig2, err) = ig1.add_child("src"); ++ assert!(err.is_none()); ++ ++ assert!(ig1.matched("llvm", true).is_none()); ++ assert!(ig2.matched("llvm", true).is_none()); ++ assert!(ig2.matched("src/llvm", true).is_none()); ++ assert!(ig2.matched("foo", false).is_ignore()); ++ assert!(ig2.matched("src/foo", false).is_ignore()); ++ } ++} diff --cc vendor/ignore-0.2.2/src/gitignore.rs index 000000000,000000000..85109559c new file mode 100644 --- /dev/null +++ b/vendor/ignore-0.2.2/src/gitignore.rs @@@ -1,0 -1,0 +1,692 @@@ ++/*! ++The gitignore module provides a way to match globs from a gitignore file ++against file paths. ++ ++Note that this module implements the specification as described in the ++`gitignore` man page from scratch. That is, this module does *not* shell out to ++the `git` command line tool. ++*/ ++ ++use std::cell::RefCell; ++use std::env; ++use std::fs::File; ++use std::io::{self, BufRead, Read}; ++use std::path::{Path, PathBuf}; ++use std::str; ++use std::sync::Arc; ++ ++use globset::{Candidate, GlobBuilder, GlobSet, GlobSetBuilder}; ++use regex::bytes::Regex; ++use thread_local::ThreadLocal; ++ ++use pathutil::{is_file_name, strip_prefix}; ++use {Error, Match, PartialErrorBuilder}; ++ ++/// Glob represents a single glob in a gitignore file. ++/// ++/// This is used to report information about the highest precedent glob that ++/// matched in one or more gitignore files. ++#[derive(Clone, Debug)] ++pub struct Glob { ++ /// The file path that this glob was extracted from. ++ from: Option, ++ /// The original glob string. ++ original: String, ++ /// The actual glob string used to convert to a regex. ++ actual: String, ++ /// Whether this is a whitelisted glob or not. ++ is_whitelist: bool, ++ /// Whether this glob should only match directories or not. ++ is_only_dir: bool, ++} ++ ++impl Glob { ++ /// Returns the file path that defined this glob. ++ pub fn from(&self) -> Option<&Path> { ++ self.from.as_ref().map(|p| &**p) ++ } ++ ++ /// The original glob as it was defined in a gitignore file. ++ pub fn original(&self) -> &str { ++ &self.original ++ } ++ ++ /// The actual glob that was compiled to respect gitignore ++ /// semantics. ++ pub fn actual(&self) -> &str { ++ &self.actual ++ } ++ ++ /// Whether this was a whitelisted glob or not. ++ pub fn is_whitelist(&self) -> bool { ++ self.is_whitelist ++ } ++ ++ /// Whether this glob must match a directory or not. ++ pub fn is_only_dir(&self) -> bool { ++ self.is_only_dir ++ } ++} ++ ++/// Gitignore is a matcher for the globs in one or more gitignore files ++/// in the same directory. ++#[derive(Clone, Debug)] ++pub struct Gitignore { ++ set: GlobSet, ++ root: PathBuf, ++ globs: Vec, ++ num_ignores: u64, ++ num_whitelists: u64, ++ matches: Arc>>>, ++} ++ ++impl Gitignore { ++ /// Creates a new gitignore matcher from the gitignore file path given. ++ /// ++ /// If it's desirable to include multiple gitignore files in a single ++ /// matcher, or read gitignore globs from a different source, then ++ /// use `GitignoreBuilder`. ++ /// ++ /// This always returns a valid matcher, even if it's empty. In particular, ++ /// a Gitignore file can be partially valid, e.g., when one glob is invalid ++ /// but the rest aren't. ++ /// ++ /// Note that I/O errors are ignored. For more granular control over ++ /// errors, use `GitignoreBuilder`. ++ pub fn new>( ++ gitignore_path: P, ++ ) -> (Gitignore, Option) { ++ let path = gitignore_path.as_ref(); ++ let parent = path.parent().unwrap_or(Path::new("/")); ++ let mut builder = GitignoreBuilder::new(parent); ++ let mut errs = PartialErrorBuilder::default(); ++ errs.maybe_push_ignore_io(builder.add(path)); ++ match builder.build() { ++ Ok(gi) => (gi, errs.into_error_option()), ++ Err(err) => { ++ errs.push(err); ++ (Gitignore::empty(), errs.into_error_option()) ++ } ++ } ++ } ++ ++ /// Creates a new gitignore matcher from the global ignore file, if one ++ /// exists. ++ /// ++ /// The global config file path is specified by git's `core.excludesFile` ++ /// config option. ++ /// ++ /// Git's config file location is `$HOME/.gitconfig`. If `$HOME/.gitconfig` ++ /// does not exist or does not specify `core.excludesFile`, then ++ /// `$XDG_CONFIG_HOME/git/ignore` is read. If `$XDG_CONFIG_HOME` is not ++ /// set or is empty, then `$HOME/.config/git/ignore` is used instead. ++ pub fn global() -> (Gitignore, Option) { ++ match gitconfig_excludes_path() { ++ None => (Gitignore::empty(), None), ++ Some(path) => { ++ if !path.is_file() { ++ (Gitignore::empty(), None) ++ } else { ++ Gitignore::new(path) ++ } ++ } ++ } ++ } ++ ++ /// Creates a new empty gitignore matcher that never matches anything. ++ /// ++ /// Its path is empty. ++ pub fn empty() -> Gitignore { ++ GitignoreBuilder::new("").build().unwrap() ++ } ++ ++ /// Returns the directory containing this gitignore matcher. ++ /// ++ /// All matches are done relative to this path. ++ pub fn path(&self) -> &Path { ++ &*self.root ++ } ++ ++ /// Returns true if and only if this gitignore has zero globs, and ++ /// therefore never matches any file path. ++ pub fn is_empty(&self) -> bool { ++ self.set.is_empty() ++ } ++ ++ /// Returns the total number of globs, which should be equivalent to ++ /// `num_ignores + num_whitelists`. ++ pub fn len(&self) -> usize { ++ self.set.len() ++ } ++ ++ /// Returns the total number of ignore globs. ++ pub fn num_ignores(&self) -> u64 { ++ self.num_ignores ++ } ++ ++ /// Returns the total number of whitelisted globs. ++ pub fn num_whitelists(&self) -> u64 { ++ self.num_whitelists ++ } ++ ++ /// Returns whether the given path (file or directory) matched a pattern in ++ /// this gitignore matcher. ++ /// ++ /// `is_dir` should be true if the path refers to a directory and false ++ /// otherwise. ++ /// ++ /// The given path is matched relative to the path given when building ++ /// the matcher. Specifically, before matching `path`, its prefix (as ++ /// determined by a common suffix of the directory containing this ++ /// gitignore) is stripped. If there is no common suffix/prefix overlap, ++ /// then `path` is assumed to be relative to this matcher. ++ pub fn matched>( ++ &self, ++ path: P, ++ is_dir: bool, ++ ) -> Match<&Glob> { ++ if self.is_empty() { ++ return Match::None; ++ } ++ self.matched_stripped(self.strip(path.as_ref()), is_dir) ++ } ++ ++ /// Returns whether the given path (file or directory, and expected to be ++ /// under the root) or any of its parent directories (up to the root) ++ /// matched a pattern in this gitignore matcher. ++ /// ++ /// NOTE: This method is more expensive than walking the directory hierarchy ++ /// top-to-bottom and matching the entries. But, is easier to use in cases ++ /// when a list of paths are available without a hierarchy. ++ /// ++ /// `is_dir` should be true if the path refers to a directory and false ++ /// otherwise. ++ /// ++ /// The given path is matched relative to the path given when building ++ /// the matcher. Specifically, before matching `path`, its prefix (as ++ /// determined by a common suffix of the directory containing this ++ /// gitignore) is stripped. If there is no common suffix/prefix overlap, ++ /// then `path` is assumed to be relative to this matcher. ++ pub fn matched_path_or_any_parents>( ++ &self, ++ path: P, ++ is_dir: bool, ++ ) -> Match<&Glob> { ++ if self.is_empty() { ++ return Match::None; ++ } ++ let mut path = self.strip(path.as_ref()); ++ debug_assert!( ++ !path.has_root(), ++ "path is expect to be under the root" ++ ); ++ match self.matched_stripped(path, is_dir) { ++ Match::None => (), // walk up ++ a_match => return a_match, ++ } ++ while let Some(parent) = path.parent() { ++ match self.matched_stripped(parent, /* is_dir */ true) { ++ Match::None => path = parent, // walk up ++ a_match => return a_match, ++ } ++ } ++ Match::None ++ } ++ ++ /// Like matched, but takes a path that has already been stripped. ++ fn matched_stripped>( ++ &self, ++ path: P, ++ is_dir: bool, ++ ) -> Match<&Glob> { ++ if self.is_empty() { ++ return Match::None; ++ } ++ let path = path.as_ref(); ++ let _matches = self.matches.get_default(); ++ let mut matches = _matches.borrow_mut(); ++ let candidate = Candidate::new(path); ++ self.set.matches_candidate_into(&candidate, &mut *matches); ++ for &i in matches.iter().rev() { ++ let glob = &self.globs[i]; ++ if !glob.is_only_dir() || is_dir { ++ return if glob.is_whitelist() { ++ Match::Whitelist(glob) ++ } else { ++ Match::Ignore(glob) ++ }; ++ } ++ } ++ Match::None ++ } ++ ++ /// Strips the given path such that it's suitable for matching with this ++ /// gitignore matcher. ++ fn strip<'a, P: 'a + AsRef + ?Sized>( ++ &'a self, ++ path: &'a P, ++ ) -> &'a Path { ++ let mut path = path.as_ref(); ++ // A leading ./ is completely superfluous. We also strip it from ++ // our gitignore root path, so we need to strip it from our candidate ++ // path too. ++ if let Some(p) = strip_prefix("./", path) { ++ path = p; ++ } ++ // Strip any common prefix between the candidate path and the root ++ // of the gitignore, to make sure we get relative matching right. ++ // BUT, a file name might not have any directory components to it, ++ // in which case, we don't want to accidentally strip any part of the ++ // file name. ++ if !is_file_name(path) { ++ if let Some(p) = strip_prefix(&self.root, path) { ++ path = p; ++ // If we're left with a leading slash, get rid of it. ++ if let Some(p) = strip_prefix("/", path) { ++ path = p; ++ } ++ } ++ } ++ path ++ } ++} ++ ++/// Builds a matcher for a single set of globs from a .gitignore file. ++pub struct GitignoreBuilder { ++ builder: GlobSetBuilder, ++ root: PathBuf, ++ globs: Vec, ++ case_insensitive: bool, ++} ++ ++impl GitignoreBuilder { ++ /// Create a new builder for a gitignore file. ++ /// ++ /// The path given should be the path at which the globs for this gitignore ++ /// file should be matched. Note that paths are always matched relative ++ /// to the root path given here. Generally, the root path should correspond ++ /// to the *directory* containing a `.gitignore` file. ++ pub fn new>(root: P) -> GitignoreBuilder { ++ let root = root.as_ref(); ++ GitignoreBuilder { ++ builder: GlobSetBuilder::new(), ++ root: strip_prefix("./", root).unwrap_or(root).to_path_buf(), ++ globs: vec![], ++ case_insensitive: false, ++ } ++ } ++ ++ /// Builds a new matcher from the globs added so far. ++ /// ++ /// Once a matcher is built, no new globs can be added to it. ++ pub fn build(&self) -> Result { ++ let nignore = self.globs.iter().filter(|g| !g.is_whitelist()).count(); ++ let nwhite = self.globs.iter().filter(|g| g.is_whitelist()).count(); ++ let set = try!( ++ self.builder.build().map_err(|err| { ++ Error::Glob { ++ glob: None, ++ err: err.to_string(), ++ } ++ })); ++ Ok(Gitignore { ++ set: set, ++ root: self.root.clone(), ++ globs: self.globs.clone(), ++ num_ignores: nignore as u64, ++ num_whitelists: nwhite as u64, ++ matches: Arc::new(ThreadLocal::default()), ++ }) ++ } ++ ++ /// Add each glob from the file path given. ++ /// ++ /// The file given should be formatted as a `gitignore` file. ++ /// ++ /// Note that partial errors can be returned. For example, if there was ++ /// a problem adding one glob, an error for that will be returned, but ++ /// all other valid globs will still be added. ++ pub fn add>(&mut self, path: P) -> Option { ++ let path = path.as_ref(); ++ let file = match File::open(path) { ++ Err(err) => return Some(Error::Io(err).with_path(path)), ++ Ok(file) => file, ++ }; ++ let rdr = io::BufReader::new(file); ++ let mut errs = PartialErrorBuilder::default(); ++ for (i, line) in rdr.lines().enumerate() { ++ let lineno = (i + 1) as u64; ++ let line = match line { ++ Ok(line) => line, ++ Err(err) => { ++ errs.push(Error::Io(err).tagged(path, lineno)); ++ break; ++ } ++ }; ++ if let Err(err) = self.add_line(Some(path.to_path_buf()), &line) { ++ errs.push(err.tagged(path, lineno)); ++ } ++ } ++ errs.into_error_option() ++ } ++ ++ /// Add each glob line from the string given. ++ /// ++ /// If this string came from a particular `gitignore` file, then its path ++ /// should be provided here. ++ /// ++ /// The string given should be formatted as a `gitignore` file. ++ #[cfg(test)] ++ fn add_str( ++ &mut self, ++ from: Option, ++ gitignore: &str, ++ ) -> Result<&mut GitignoreBuilder, Error> { ++ for line in gitignore.lines() { ++ try!(self.add_line(from.clone(), line)); ++ } ++ Ok(self) ++ } ++ ++ /// Add a line from a gitignore file to this builder. ++ /// ++ /// If this line came from a particular `gitignore` file, then its path ++ /// should be provided here. ++ /// ++ /// If the line could not be parsed as a glob, then an error is returned. ++ pub fn add_line( ++ &mut self, ++ from: Option, ++ mut line: &str, ++ ) -> Result<&mut GitignoreBuilder, Error> { ++ if line.starts_with("#") { ++ return Ok(self); ++ } ++ if !line.ends_with("\\ ") { ++ line = line.trim_right(); ++ } ++ if line.is_empty() { ++ return Ok(self); ++ } ++ let mut glob = Glob { ++ from: from, ++ original: line.to_string(), ++ actual: String::new(), ++ is_whitelist: false, ++ is_only_dir: false, ++ }; ++ let mut literal_separator = false; ++ let has_slash = line.chars().any(|c| c == '/'); ++ let mut is_absolute = false; ++ if line.starts_with("\\!") || line.starts_with("\\#") { ++ line = &line[1..]; ++ is_absolute = line.chars().nth(0) == Some('/'); ++ } else { ++ if line.starts_with("!") { ++ glob.is_whitelist = true; ++ line = &line[1..]; ++ } ++ if line.starts_with("/") { ++ // `man gitignore` says that if a glob starts with a slash, ++ // then the glob can only match the beginning of a path ++ // (relative to the location of gitignore). We achieve this by ++ // simply banning wildcards from matching /. ++ literal_separator = true; ++ line = &line[1..]; ++ is_absolute = true; ++ } ++ } ++ // If it ends with a slash, then this should only match directories, ++ // but the slash should otherwise not be used while globbing. ++ if let Some((i, c)) = line.char_indices().rev().nth(0) { ++ if c == '/' { ++ glob.is_only_dir = true; ++ line = &line[..i]; ++ } ++ } ++ // If there is a literal slash, then we note that so that globbing ++ // doesn't let wildcards match slashes. ++ glob.actual = line.to_string(); ++ if has_slash { ++ literal_separator = true; ++ } ++ // If there was a leading slash, then this is a glob that must ++ // match the entire path name. Otherwise, we should let it match ++ // anywhere, so use a **/ prefix. ++ if !is_absolute { ++ // ... but only if we don't already have a **/ prefix. ++ if !glob.actual.starts_with("**/") { ++ glob.actual = format!("**/{}", glob.actual); ++ } ++ } ++ // If the glob ends with `/**`, then we should only match everything ++ // inside a directory, but not the directory itself. Standard globs ++ // will match the directory. So we add `/*` to force the issue. ++ if glob.actual.ends_with("/**") { ++ glob.actual = format!("{}/*", glob.actual); ++ } ++ let parsed = try!( ++ GlobBuilder::new(&glob.actual) ++ .literal_separator(literal_separator) ++ .case_insensitive(self.case_insensitive) ++ .build() ++ .map_err(|err| { ++ Error::Glob { ++ glob: Some(glob.original.clone()), ++ err: err.kind().to_string(), ++ } ++ })); ++ self.builder.add(parsed); ++ self.globs.push(glob); ++ Ok(self) ++ } ++ ++ /// Toggle whether the globs should be matched case insensitively or not. ++ /// ++ /// This is disabled by default. ++ pub fn case_insensitive( ++ &mut self, yes: bool ++ ) -> Result<&mut GitignoreBuilder, Error> { ++ self.case_insensitive = yes; ++ Ok(self) ++ } ++} ++ ++/// Return the file path of the current environment's global gitignore file. ++/// ++/// Note that the file path returned may not exist. ++fn gitconfig_excludes_path() -> Option { ++ gitconfig_contents() ++ .and_then(|data| parse_excludes_file(&data)) ++ .or_else(excludes_file_default) ++} ++ ++/// Returns the file contents of git's global config file, if one exists. ++fn gitconfig_contents() -> Option> { ++ let home = match env::var_os("HOME") { ++ None => return None, ++ Some(home) => PathBuf::from(home), ++ }; ++ let mut file = match File::open(home.join(".gitconfig")) { ++ Err(_) => return None, ++ Ok(file) => io::BufReader::new(file), ++ }; ++ let mut contents = vec![]; ++ file.read_to_end(&mut contents).ok().map(|_| contents) ++} ++ ++/// Returns the default file path for a global .gitignore file. ++/// ++/// Specifically, this respects XDG_CONFIG_HOME. ++fn excludes_file_default() -> Option { ++ env::var_os("XDG_CONFIG_HOME") ++ .and_then(|x| if x.is_empty() { None } else { Some(PathBuf::from(x)) }) ++ .or_else(|| env::home_dir().map(|p| p.join(".config"))) ++ .map(|x| x.join("git/ignore")) ++} ++ ++/// Extract git's `core.excludesfile` config setting from the raw file contents ++/// given. ++fn parse_excludes_file(data: &[u8]) -> Option { ++ // N.B. This is the lazy approach, and isn't technically correct, but ++ // probably works in more circumstances. I guess we would ideally have ++ // a full INI parser. Yuck. ++ lazy_static! { ++ static ref RE: Regex = Regex::new( ++ r"(?ium)^\s*excludesfile\s*=\s*(.+)\s*$").unwrap(); ++ }; ++ let caps = match RE.captures(data) { ++ None => return None, ++ Some(caps) => caps, ++ }; ++ str::from_utf8(&caps[1]).ok().map(|s| PathBuf::from(expand_tilde(s))) ++} ++ ++/// Expands ~ in file paths to the value of $HOME. ++fn expand_tilde(path: &str) -> String { ++ let home = match env::var("HOME") { ++ Err(_) => return path.to_string(), ++ Ok(home) => home, ++ }; ++ path.replace("~", &home) ++} ++ ++#[cfg(test)] ++mod tests { ++ use std::path::Path; ++ use super::{Gitignore, GitignoreBuilder}; ++ ++ fn gi_from_str>(root: P, s: &str) -> Gitignore { ++ let mut builder = GitignoreBuilder::new(root); ++ builder.add_str(None, s).unwrap(); ++ builder.build().unwrap() ++ } ++ ++ macro_rules! ignored { ++ ($name:ident, $root:expr, $gi:expr, $path:expr) => { ++ ignored!($name, $root, $gi, $path, false); ++ }; ++ ($name:ident, $root:expr, $gi:expr, $path:expr, $is_dir:expr) => { ++ #[test] ++ fn $name() { ++ let gi = gi_from_str($root, $gi); ++ assert!(gi.matched($path, $is_dir).is_ignore()); ++ } ++ }; ++ } ++ ++ macro_rules! not_ignored { ++ ($name:ident, $root:expr, $gi:expr, $path:expr) => { ++ not_ignored!($name, $root, $gi, $path, false); ++ }; ++ ($name:ident, $root:expr, $gi:expr, $path:expr, $is_dir:expr) => { ++ #[test] ++ fn $name() { ++ let gi = gi_from_str($root, $gi); ++ assert!(!gi.matched($path, $is_dir).is_ignore()); ++ } ++ }; ++ } ++ ++ const ROOT: &'static str = "/home/foobar/rust/rg"; ++ ++ ignored!(ig1, ROOT, "months", "months"); ++ ignored!(ig2, ROOT, "*.lock", "Cargo.lock"); ++ ignored!(ig3, ROOT, "*.rs", "src/main.rs"); ++ ignored!(ig4, ROOT, "src/*.rs", "src/main.rs"); ++ ignored!(ig5, ROOT, "/*.c", "cat-file.c"); ++ ignored!(ig6, ROOT, "/src/*.rs", "src/main.rs"); ++ ignored!(ig7, ROOT, "!src/main.rs\n*.rs", "src/main.rs"); ++ ignored!(ig8, ROOT, "foo/", "foo", true); ++ ignored!(ig9, ROOT, "**/foo", "foo"); ++ ignored!(ig10, ROOT, "**/foo", "src/foo"); ++ ignored!(ig11, ROOT, "**/foo/**", "src/foo/bar"); ++ ignored!(ig12, ROOT, "**/foo/**", "wat/src/foo/bar/baz"); ++ ignored!(ig13, ROOT, "**/foo/bar", "foo/bar"); ++ ignored!(ig14, ROOT, "**/foo/bar", "src/foo/bar"); ++ ignored!(ig15, ROOT, "abc/**", "abc/x"); ++ ignored!(ig16, ROOT, "abc/**", "abc/x/y"); ++ ignored!(ig17, ROOT, "abc/**", "abc/x/y/z"); ++ ignored!(ig18, ROOT, "a/**/b", "a/b"); ++ ignored!(ig19, ROOT, "a/**/b", "a/x/b"); ++ ignored!(ig20, ROOT, "a/**/b", "a/x/y/b"); ++ ignored!(ig21, ROOT, r"\!xy", "!xy"); ++ ignored!(ig22, ROOT, r"\#foo", "#foo"); ++ ignored!(ig23, ROOT, "foo", "./foo"); ++ ignored!(ig24, ROOT, "target", "grep/target"); ++ ignored!(ig25, ROOT, "Cargo.lock", "./tabwriter-bin/Cargo.lock"); ++ ignored!(ig26, ROOT, "/foo/bar/baz", "./foo/bar/baz"); ++ ignored!(ig27, ROOT, "foo/", "xyz/foo", true); ++ ignored!(ig28, ROOT, "src/*.rs", "src/grep/src/main.rs"); ++ ignored!(ig29, "./src", "/llvm/", "./src/llvm", true); ++ ignored!(ig30, ROOT, "node_modules/ ", "node_modules", true); ++ ++ not_ignored!(ignot1, ROOT, "amonths", "months"); ++ not_ignored!(ignot2, ROOT, "monthsa", "months"); ++ not_ignored!(ignot3, ROOT, "/src/*.rs", "src/grep/src/main.rs"); ++ not_ignored!(ignot4, ROOT, "/*.c", "mozilla-sha1/sha1.c"); ++ not_ignored!(ignot5, ROOT, "/src/*.rs", "src/grep/src/main.rs"); ++ not_ignored!(ignot6, ROOT, "*.rs\n!src/main.rs", "src/main.rs"); ++ not_ignored!(ignot7, ROOT, "foo/", "foo", false); ++ not_ignored!(ignot8, ROOT, "**/foo/**", "wat/src/afoo/bar/baz"); ++ not_ignored!(ignot9, ROOT, "**/foo/**", "wat/src/fooa/bar/baz"); ++ not_ignored!(ignot10, ROOT, "**/foo/bar", "foo/src/bar"); ++ not_ignored!(ignot11, ROOT, "#foo", "#foo"); ++ not_ignored!(ignot12, ROOT, "\n\n\n", "foo"); ++ not_ignored!(ignot13, ROOT, "foo/**", "foo", true); ++ not_ignored!( ++ ignot14, "./third_party/protobuf", "m4/ltoptions.m4", ++ "./third_party/protobuf/csharp/src/packages/repositories.config"); ++ not_ignored!(ignot15, ROOT, "!/bar", "foo/bar"); ++ ++ fn bytes(s: &str) -> Vec { ++ s.to_string().into_bytes() ++ } ++ ++ fn path_string>(path: P) -> String { ++ path.as_ref().to_str().unwrap().to_string() ++ } ++ ++ #[test] ++ fn parse_excludes_file1() { ++ let data = bytes("[core]\nexcludesFile = /foo/bar"); ++ let got = super::parse_excludes_file(&data).unwrap(); ++ assert_eq!(path_string(got), "/foo/bar"); ++ } ++ ++ #[test] ++ fn parse_excludes_file2() { ++ let data = bytes("[core]\nexcludesFile = ~/foo/bar"); ++ let got = super::parse_excludes_file(&data).unwrap(); ++ assert_eq!(path_string(got), super::expand_tilde("~/foo/bar")); ++ } ++ ++ #[test] ++ fn parse_excludes_file3() { ++ let data = bytes("[core]\nexcludeFile = /foo/bar"); ++ assert!(super::parse_excludes_file(&data).is_none()); ++ } ++ ++ // See: https://github.com/BurntSushi/ripgrep/issues/106 ++ #[test] ++ fn regression_106() { ++ gi_from_str("/", " "); ++ } ++ ++ #[test] ++ fn case_insensitive() { ++ let gi = GitignoreBuilder::new(ROOT) ++ .case_insensitive(true).unwrap() ++ .add_str(None, "*.html").unwrap() ++ .build().unwrap(); ++ assert!(gi.matched("foo.html", false).is_ignore()); ++ assert!(gi.matched("foo.HTML", false).is_ignore()); ++ assert!(!gi.matched("foo.htm", false).is_ignore()); ++ assert!(!gi.matched("foo.HTM", false).is_ignore()); ++ } ++ ++ ignored!(cs1, ROOT, "*.html", "foo.html"); ++ not_ignored!(cs2, ROOT, "*.html", "foo.HTML"); ++ not_ignored!(cs3, ROOT, "*.html", "foo.htm"); ++ not_ignored!(cs4, ROOT, "*.html", "foo.HTM"); ++} diff --cc vendor/ignore-0.2.2/src/lib.rs index 000000000,000000000..d053014cc new file mode 100644 --- /dev/null +++ b/vendor/ignore-0.2.2/src/lib.rs @@@ -1,0 -1,0 +1,404 @@@ ++/*! ++The ignore crate provides a fast recursive directory iterator that respects ++various filters such as globs, file types and `.gitignore` files. The precise ++matching rules and precedence is explained in the documentation for ++`WalkBuilder`. ++ ++Secondarily, this crate exposes gitignore and file type matchers for use cases ++that demand more fine-grained control. ++ ++# Example ++ ++This example shows the most basic usage of this crate. This code will ++recursively traverse the current directory while automatically filtering out ++files and directories according to ignore globs found in files like ++`.ignore` and `.gitignore`: ++ ++ ++```rust,no_run ++use ignore::Walk; ++ ++for result in Walk::new("./") { ++ // Each item yielded by the iterator is either a directory entry or an ++ // error, so either print the path or the error. ++ match result { ++ Ok(entry) => println!("{}", entry.path().display()), ++ Err(err) => println!("ERROR: {}", err), ++ } ++} ++``` ++ ++# Example: advanced ++ ++By default, the recursive directory iterator will ignore hidden files and ++directories. This can be disabled by building the iterator with `WalkBuilder`: ++ ++```rust,no_run ++use ignore::WalkBuilder; ++ ++for result in WalkBuilder::new("./").hidden(false).build() { ++ println!("{:?}", result); ++} ++``` ++ ++See the documentation for `WalkBuilder` for many other options. ++*/ ++ ++#![deny(missing_docs)] ++ ++extern crate crossbeam; ++extern crate globset; ++#[macro_use] ++extern crate lazy_static; ++#[macro_use] ++extern crate log; ++extern crate memchr; ++extern crate regex; ++#[cfg(test)] ++extern crate tempdir; ++extern crate thread_local; ++extern crate walkdir; ++ ++use std::error; ++use std::fmt; ++use std::io; ++use std::path::{Path, PathBuf}; ++ ++pub use walk::{DirEntry, Walk, WalkBuilder, WalkParallel, WalkState}; ++ ++mod dir; ++pub mod gitignore; ++mod pathutil; ++pub mod overrides; ++pub mod types; ++mod walk; ++ ++/// Represents an error that can occur when parsing a gitignore file. ++#[derive(Debug)] ++pub enum Error { ++ /// A collection of "soft" errors. These occur when adding an ignore ++ /// file partially succeeded. ++ Partial(Vec), ++ /// An error associated with a specific line number. ++ WithLineNumber { ++ /// The line number. ++ line: u64, ++ /// The underlying error. ++ err: Box, ++ }, ++ /// An error associated with a particular file path. ++ WithPath { ++ /// The file path. ++ path: PathBuf, ++ /// The underlying error. ++ err: Box, ++ }, ++ /// An error associated with a particular directory depth when recursively ++ /// walking a directory. ++ WithDepth { ++ /// The directory depth. ++ depth: usize, ++ /// The underlying error. ++ err: Box, ++ }, ++ /// An error that occurs when a file loop is detected when traversing ++ /// symbolic links. ++ Loop { ++ /// The ancestor file path in the loop. ++ ancestor: PathBuf, ++ /// The child file path in the loop. ++ child: PathBuf, ++ }, ++ /// An error that occurs when doing I/O, such as reading an ignore file. ++ Io(io::Error), ++ /// An error that occurs when trying to parse a glob. ++ Glob { ++ /// The original glob that caused this error. This glob, when ++ /// available, always corresponds to the glob provided by an end user. ++ /// e.g., It is the glob as writtein in a `.gitignore` file. ++ /// ++ /// (This glob may be distinct from the glob that is actually ++ /// compiled, after accounting for `gitignore` semantics.) ++ glob: Option, ++ /// The underlying glob error as a string. ++ err: String, ++ }, ++ /// A type selection for a file type that is not defined. ++ UnrecognizedFileType(String), ++ /// A user specified file type definition could not be parsed. ++ InvalidDefinition, ++} ++ ++impl Error { ++ /// Returns true if this is a partial error. ++ /// ++ /// A partial error occurs when only some operations failed while others ++ /// may have succeeded. For example, an ignore file may contain an invalid ++ /// glob among otherwise valid globs. ++ pub fn is_partial(&self) -> bool { ++ match *self { ++ Error::Partial(_) => true, ++ Error::WithLineNumber { ref err, .. } => err.is_partial(), ++ Error::WithPath { ref err, .. } => err.is_partial(), ++ Error::WithDepth { ref err, .. } => err.is_partial(), ++ _ => false, ++ } ++ } ++ ++ /// Returns true if this error is exclusively an I/O error. ++ pub fn is_io(&self) -> bool { ++ match *self { ++ Error::Partial(ref errs) => errs.len() == 1 && errs[0].is_io(), ++ Error::WithLineNumber { ref err, .. } => err.is_io(), ++ Error::WithPath { ref err, .. } => err.is_io(), ++ Error::WithDepth { ref err, .. } => err.is_io(), ++ Error::Loop { .. } => false, ++ Error::Io(_) => true, ++ Error::Glob { .. } => false, ++ Error::UnrecognizedFileType(_) => false, ++ Error::InvalidDefinition => false, ++ } ++ } ++ ++ /// Returns a depth associated with recursively walking a directory (if ++ /// this error was generated from a recursive directory iterator). ++ pub fn depth(&self) -> Option { ++ match *self { ++ Error::WithPath { ref err, .. } => err.depth(), ++ Error::WithDepth { depth, .. } => Some(depth), ++ _ => None, ++ } ++ } ++ ++ /// Turn an error into a tagged error with the given file path. ++ fn with_path>(self, path: P) -> Error { ++ Error::WithPath { ++ path: path.as_ref().to_path_buf(), ++ err: Box::new(self), ++ } ++ } ++ ++ /// Turn an error into a tagged error with the given depth. ++ fn with_depth(self, depth: usize) -> Error { ++ Error::WithDepth { ++ depth: depth, ++ err: Box::new(self), ++ } ++ } ++ ++ /// Turn an error into a tagged error with the given file path and line ++ /// number. If path is empty, then it is omitted from the error. ++ fn tagged>(self, path: P, lineno: u64) -> Error { ++ let errline = Error::WithLineNumber { ++ line: lineno, ++ err: Box::new(self), ++ }; ++ if path.as_ref().as_os_str().is_empty() { ++ return errline; ++ } ++ errline.with_path(path) ++ } ++} ++ ++impl error::Error for Error { ++ fn description(&self) -> &str { ++ match *self { ++ Error::Partial(_) => "partial error", ++ Error::WithLineNumber { ref err, .. } => err.description(), ++ Error::WithPath { ref err, .. } => err.description(), ++ Error::WithDepth { ref err, .. } => err.description(), ++ Error::Loop { .. } => "file system loop found", ++ Error::Io(ref err) => err.description(), ++ Error::Glob { ref err, .. } => err, ++ Error::UnrecognizedFileType(_) => "unrecognized file type", ++ Error::InvalidDefinition => "invalid definition", ++ } ++ } ++} ++ ++impl fmt::Display for Error { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ match *self { ++ Error::Partial(ref errs) => { ++ let msgs: Vec = ++ errs.iter().map(|err| err.to_string()).collect(); ++ write!(f, "{}", msgs.join("\n")) ++ } ++ Error::WithLineNumber { line, ref err } => { ++ write!(f, "line {}: {}", line, err) ++ } ++ Error::WithPath { ref path, ref err } => { ++ write!(f, "{}: {}", path.display(), err) ++ } ++ Error::WithDepth { ref err, .. } => err.fmt(f), ++ Error::Loop { ref ancestor, ref child } => { ++ write!(f, "File system loop found: \ ++ {} points to an ancestor {}", ++ child.display(), ancestor.display()) ++ } ++ Error::Io(ref err) => err.fmt(f), ++ Error::Glob { glob: None, ref err } => write!(f, "{}", err), ++ Error::Glob { glob: Some(ref glob), ref err } => { ++ write!(f, "error parsing glob '{}': {}", glob, err) ++ } ++ Error::UnrecognizedFileType(ref ty) => { ++ write!(f, "unrecognized file type: {}", ty) ++ } ++ Error::InvalidDefinition => { ++ write!(f, "invalid definition (format is type:glob, e.g., \ ++ html:*.html)") ++ } ++ } ++ } ++} ++ ++impl From for Error { ++ fn from(err: io::Error) -> Error { ++ Error::Io(err) ++ } ++} ++ ++impl From for Error { ++ fn from(err: walkdir::Error) -> Error { ++ let depth = err.depth(); ++ if let (Some(anc), Some(child)) = (err.loop_ancestor(), err.path()) { ++ return Error::WithDepth { ++ depth: depth, ++ err: Box::new(Error::Loop { ++ ancestor: anc.to_path_buf(), ++ child: child.to_path_buf(), ++ }), ++ }; ++ } ++ let path = err.path().map(|p| p.to_path_buf()); ++ let mut ig_err = Error::Io(io::Error::from(err)); ++ if let Some(path) = path { ++ ig_err = Error::WithPath { ++ path: path, ++ err: Box::new(ig_err), ++ }; ++ } ++ ig_err ++ } ++} ++ ++#[derive(Debug, Default)] ++struct PartialErrorBuilder(Vec); ++ ++impl PartialErrorBuilder { ++ fn push(&mut self, err: Error) { ++ self.0.push(err); ++ } ++ ++ fn push_ignore_io(&mut self, err: Error) { ++ if !err.is_io() { ++ self.push(err); ++ } ++ } ++ ++ fn maybe_push(&mut self, err: Option) { ++ if let Some(err) = err { ++ self.push(err); ++ } ++ } ++ ++ fn maybe_push_ignore_io(&mut self, err: Option) { ++ if let Some(err) = err { ++ self.push_ignore_io(err); ++ } ++ } ++ ++ fn into_error_option(mut self) -> Option { ++ if self.0.is_empty() { ++ None ++ } else if self.0.len() == 1 { ++ Some(self.0.pop().unwrap()) ++ } else { ++ Some(Error::Partial(self.0)) ++ } ++ } ++} ++ ++/// The result of a glob match. ++/// ++/// The type parameter `T` typically refers to a type that provides more ++/// information about a particular match. For example, it might identify ++/// the specific gitignore file and the specific glob pattern that caused ++/// the match. ++#[derive(Clone, Debug)] ++pub enum Match { ++ /// The path didn't match any glob. ++ None, ++ /// The highest precedent glob matched indicates the path should be ++ /// ignored. ++ Ignore(T), ++ /// The highest precedent glob matched indicates the path should be ++ /// whitelisted. ++ Whitelist(T), ++} ++ ++impl Match { ++ /// Returns true if the match result didn't match any globs. ++ pub fn is_none(&self) -> bool { ++ match *self { ++ Match::None => true, ++ Match::Ignore(_) | Match::Whitelist(_) => false, ++ } ++ } ++ ++ /// Returns true if the match result implies the path should be ignored. ++ pub fn is_ignore(&self) -> bool { ++ match *self { ++ Match::Ignore(_) => true, ++ Match::None | Match::Whitelist(_) => false, ++ } ++ } ++ ++ /// Returns true if the match result implies the path should be ++ /// whitelisted. ++ pub fn is_whitelist(&self) -> bool { ++ match *self { ++ Match::Whitelist(_) => true, ++ Match::None | Match::Ignore(_) => false, ++ } ++ } ++ ++ /// Inverts the match so that `Ignore` becomes `Whitelist` and ++ /// `Whitelist` becomes `Ignore`. A non-match remains the same. ++ pub fn invert(self) -> Match { ++ match self { ++ Match::None => Match::None, ++ Match::Ignore(t) => Match::Whitelist(t), ++ Match::Whitelist(t) => Match::Ignore(t), ++ } ++ } ++ ++ /// Return the value inside this match if it exists. ++ pub fn inner(&self) -> Option<&T> { ++ match *self { ++ Match::None => None, ++ Match::Ignore(ref t) => Some(t), ++ Match::Whitelist(ref t) => Some(t), ++ } ++ } ++ ++ /// Apply the given function to the value inside this match. ++ /// ++ /// If the match has no value, then return the match unchanged. ++ pub fn map U>(self, f: F) -> Match { ++ match self { ++ Match::None => Match::None, ++ Match::Ignore(t) => Match::Ignore(f(t)), ++ Match::Whitelist(t) => Match::Whitelist(f(t)), ++ } ++ } ++ ++ /// Return the match if it is not none. Otherwise, return other. ++ pub fn or(self, other: Self) -> Self { ++ if self.is_none() { ++ other ++ } else { ++ self ++ } ++ } ++} diff --cc vendor/ignore-0.2.2/src/overrides.rs index 000000000,000000000..453066f93 new file mode 100644 --- /dev/null +++ b/vendor/ignore-0.2.2/src/overrides.rs @@@ -1,0 -1,0 +1,256 @@@ ++/*! ++The overrides module provides a way to specify a set of override globs. ++This provides functionality similar to `--include` or `--exclude` in command ++line tools. ++*/ ++ ++use std::path::Path; ++ ++use gitignore::{self, Gitignore, GitignoreBuilder}; ++use {Error, Match}; ++ ++/// Glob represents a single glob in an override matcher. ++/// ++/// This is used to report information about the highest precedent glob ++/// that matched. ++/// ++/// Note that not all matches necessarily correspond to a specific glob. For ++/// example, if there are one or more whitelist globs and a file path doesn't ++/// match any glob in the set, then the file path is considered to be ignored. ++/// ++/// The lifetime `'a` refers to the lifetime of the matcher that produced ++/// this glob. ++#[derive(Clone, Debug)] ++pub struct Glob<'a>(GlobInner<'a>); ++ ++#[derive(Clone, Debug)] ++enum GlobInner<'a> { ++ /// No glob matched, but the file path should still be ignored. ++ UnmatchedIgnore, ++ /// A glob matched. ++ Matched(&'a gitignore::Glob), ++} ++ ++impl<'a> Glob<'a> { ++ fn unmatched() -> Glob<'a> { ++ Glob(GlobInner::UnmatchedIgnore) ++ } ++} ++ ++/// Manages a set of overrides provided explicitly by the end user. ++#[derive(Clone, Debug)] ++pub struct Override(Gitignore); ++ ++impl Override { ++ /// Returns an empty matcher that never matches any file path. ++ pub fn empty() -> Override { ++ Override(Gitignore::empty()) ++ } ++ ++ /// Returns the directory of this override set. ++ /// ++ /// All matches are done relative to this path. ++ pub fn path(&self) -> &Path { ++ self.0.path() ++ } ++ ++ /// Returns true if and only if this matcher is empty. ++ /// ++ /// When a matcher is empty, it will never match any file path. ++ pub fn is_empty(&self) -> bool { ++ self.0.is_empty() ++ } ++ ++ /// Returns the total number of ignore globs. ++ pub fn num_ignores(&self) -> u64 { ++ self.0.num_whitelists() ++ } ++ ++ /// Returns the total number of whitelisted globs. ++ pub fn num_whitelists(&self) -> u64 { ++ self.0.num_ignores() ++ } ++ ++ /// Returns whether the given file path matched a pattern in this override ++ /// matcher. ++ /// ++ /// `is_dir` should be true if the path refers to a directory and false ++ /// otherwise. ++ /// ++ /// If there are no overrides, then this always returns `Match::None`. ++ /// ++ /// If there is at least one whitelist override and `is_dir` is false, then ++ /// this never returns `Match::None`, since non-matches are interpreted as ++ /// ignored. ++ /// ++ /// The given path is matched to the globs relative to the path given ++ /// when building the override matcher. Specifically, before matching ++ /// `path`, its prefix (as determined by a common suffix of the directory ++ /// given) is stripped. If there is no common suffix/prefix overlap, then ++ /// `path` is assumed to reside in the same directory as the root path for ++ /// this set of overrides. ++ pub fn matched<'a, P: AsRef>( ++ &'a self, ++ path: P, ++ is_dir: bool, ++ ) -> Match> { ++ if self.is_empty() { ++ return Match::None; ++ } ++ let mat = self.0.matched(path, is_dir).invert(); ++ if mat.is_none() && self.num_whitelists() > 0 && !is_dir { ++ return Match::Ignore(Glob::unmatched()); ++ } ++ mat.map(move |giglob| Glob(GlobInner::Matched(giglob))) ++ } ++} ++ ++/// Builds a matcher for a set of glob overrides. ++pub struct OverrideBuilder { ++ builder: GitignoreBuilder, ++} ++ ++impl OverrideBuilder { ++ /// Create a new override builder. ++ /// ++ /// Matching is done relative to the directory path provided. ++ pub fn new>(path: P) -> OverrideBuilder { ++ OverrideBuilder { ++ builder: GitignoreBuilder::new(path), ++ } ++ } ++ ++ /// Builds a new override matcher from the globs added so far. ++ /// ++ /// Once a matcher is built, no new globs can be added to it. ++ pub fn build(&self) -> Result { ++ Ok(Override(try!(self.builder.build()))) ++ } ++ ++ /// Add a glob to the set of overrides. ++ /// ++ /// Globs provided here have precisely the same semantics as a single ++ /// line in a `gitignore` file, where the meaning of `!` is inverted: ++ /// namely, `!` at the beginning of a glob will ignore a file. Without `!`, ++ /// all matches of the glob provided are treated as whitelist matches. ++ pub fn add(&mut self, glob: &str) -> Result<&mut OverrideBuilder, Error> { ++ try!(self.builder.add_line(None, glob)); ++ Ok(self) ++ } ++ ++ /// Toggle whether the globs should be matched case insensitively or not. ++ /// ++ /// This is disabled by default. ++ pub fn case_insensitive( ++ &mut self, yes: bool ++ ) -> Result<&mut OverrideBuilder, Error> { ++ try!(self.builder.case_insensitive(yes)); ++ Ok(self) ++ } ++} ++ ++#[cfg(test)] ++mod tests { ++ use super::{Override, OverrideBuilder}; ++ ++ const ROOT: &'static str = "/home/andrew/foo"; ++ ++ fn ov(globs: &[&str]) -> Override { ++ let mut builder = OverrideBuilder::new(ROOT); ++ for glob in globs { ++ builder.add(glob).unwrap(); ++ } ++ builder.build().unwrap() ++ } ++ ++ #[test] ++ fn empty() { ++ let ov = ov(&[]); ++ assert!(ov.matched("a.foo", false).is_none()); ++ assert!(ov.matched("a", false).is_none()); ++ assert!(ov.matched("", false).is_none()); ++ } ++ ++ #[test] ++ fn simple() { ++ let ov = ov(&["*.foo", "!*.bar"]); ++ assert!(ov.matched("a.foo", false).is_whitelist()); ++ assert!(ov.matched("a.foo", true).is_whitelist()); ++ assert!(ov.matched("a.rs", false).is_ignore()); ++ assert!(ov.matched("a.rs", true).is_none()); ++ assert!(ov.matched("a.bar", false).is_ignore()); ++ assert!(ov.matched("a.bar", true).is_ignore()); ++ } ++ ++ #[test] ++ fn only_ignores() { ++ let ov = ov(&["!*.bar"]); ++ assert!(ov.matched("a.rs", false).is_none()); ++ assert!(ov.matched("a.rs", true).is_none()); ++ assert!(ov.matched("a.bar", false).is_ignore()); ++ assert!(ov.matched("a.bar", true).is_ignore()); ++ } ++ ++ #[test] ++ fn precedence() { ++ let ov = ov(&["*.foo", "!*.bar.foo"]); ++ assert!(ov.matched("a.foo", false).is_whitelist()); ++ assert!(ov.matched("a.baz", false).is_ignore()); ++ assert!(ov.matched("a.bar.foo", false).is_ignore()); ++ } ++ ++ #[test] ++ fn gitignore() { ++ let ov = ov(&["/foo", "bar/*.rs", "baz/**"]); ++ assert!(ov.matched("bar/wat/lib.rs", false).is_ignore()); ++ assert!(ov.matched("wat/bar/lib.rs", false).is_whitelist()); ++ assert!(ov.matched("foo", false).is_whitelist()); ++ assert!(ov.matched("wat/foo", false).is_ignore()); ++ assert!(ov.matched("baz", false).is_ignore()); ++ assert!(ov.matched("baz/a", false).is_whitelist()); ++ assert!(ov.matched("baz/a/b", false).is_whitelist()); ++ } ++ ++ #[test] ++ fn allow_directories() { ++ // This tests that directories are NOT ignored when they are unmatched. ++ let ov = ov(&["*.rs"]); ++ assert!(ov.matched("foo.rs", false).is_whitelist()); ++ assert!(ov.matched("foo.c", false).is_ignore()); ++ assert!(ov.matched("foo", false).is_ignore()); ++ assert!(ov.matched("foo", true).is_none()); ++ assert!(ov.matched("src/foo.rs", false).is_whitelist()); ++ assert!(ov.matched("src/foo.c", false).is_ignore()); ++ assert!(ov.matched("src/foo", false).is_ignore()); ++ assert!(ov.matched("src/foo", true).is_none()); ++ } ++ ++ #[test] ++ fn absolute_path() { ++ let ov = ov(&["!/bar"]); ++ assert!(ov.matched("./foo/bar", false).is_none()); ++ } ++ ++ #[test] ++ fn case_insensitive() { ++ let ov = OverrideBuilder::new(ROOT) ++ .case_insensitive(true).unwrap() ++ .add("*.html").unwrap() ++ .build().unwrap(); ++ assert!(ov.matched("foo.html", false).is_whitelist()); ++ assert!(ov.matched("foo.HTML", false).is_whitelist()); ++ assert!(ov.matched("foo.htm", false).is_ignore()); ++ assert!(ov.matched("foo.HTM", false).is_ignore()); ++ } ++ ++ #[test] ++ fn default_case_sensitive() { ++ let ov = OverrideBuilder::new(ROOT) ++ .add("*.html").unwrap() ++ .build().unwrap(); ++ assert!(ov.matched("foo.html", false).is_whitelist()); ++ assert!(ov.matched("foo.HTML", false).is_ignore()); ++ assert!(ov.matched("foo.htm", false).is_ignore()); ++ assert!(ov.matched("foo.HTM", false).is_ignore()); ++ } ++} diff --cc vendor/ignore-0.2.2/src/pathutil.rs index 000000000,000000000..bfd43de3e new file mode 100644 --- /dev/null +++ b/vendor/ignore-0.2.2/src/pathutil.rs @@@ -1,0 -1,0 +1,108 @@@ ++use std::ffi::OsStr; ++use std::path::Path; ++ ++/// Returns true if and only if this file path is considered to be hidden. ++#[cfg(unix)] ++pub fn is_hidden>(path: P) -> bool { ++ use std::os::unix::ffi::OsStrExt; ++ ++ if let Some(name) = file_name(path.as_ref()) { ++ name.as_bytes().get(0) == Some(&b'.') ++ } else { ++ false ++ } ++} ++ ++/// Returns true if and only if this file path is considered to be hidden. ++#[cfg(not(unix))] ++pub fn is_hidden>(path: P) -> bool { ++ if let Some(name) = file_name(path.as_ref()) { ++ name.to_str().map(|s| s.starts_with(".")).unwrap_or(false) ++ } else { ++ false ++ } ++} ++ ++/// Strip `prefix` from the `path` and return the remainder. ++/// ++/// If `path` doesn't have a prefix `prefix`, then return `None`. ++#[cfg(unix)] ++pub fn strip_prefix<'a, P: AsRef + ?Sized>( ++ prefix: &'a P, ++ path: &'a Path, ++) -> Option<&'a Path> { ++ use std::os::unix::ffi::OsStrExt; ++ ++ let prefix = prefix.as_ref().as_os_str().as_bytes(); ++ let path = path.as_os_str().as_bytes(); ++ if prefix.len() > path.len() || prefix != &path[0..prefix.len()] { ++ None ++ } else { ++ Some(&Path::new(OsStr::from_bytes(&path[prefix.len()..]))) ++ } ++} ++ ++/// Strip `prefix` from the `path` and return the remainder. ++/// ++/// If `path` doesn't have a prefix `prefix`, then return `None`. ++#[cfg(not(unix))] ++pub fn strip_prefix<'a, P: AsRef + ?Sized>( ++ prefix: &'a P, ++ path: &'a Path, ++) -> Option<&'a Path> { ++ path.strip_prefix(prefix).ok() ++} ++ ++/// Returns true if this file path is just a file name. i.e., Its parent is ++/// the empty string. ++#[cfg(unix)] ++pub fn is_file_name>(path: P) -> bool { ++ use std::os::unix::ffi::OsStrExt; ++ use memchr::memchr; ++ ++ let path = path.as_ref().as_os_str().as_bytes(); ++ memchr(b'/', path).is_none() ++} ++ ++/// Returns true if this file path is just a file name. i.e., Its parent is ++/// the empty string. ++#[cfg(not(unix))] ++pub fn is_file_name>(path: P) -> bool { ++ path.as_ref().parent().map(|p| p.as_os_str().is_empty()).unwrap_or(false) ++} ++ ++/// The final component of the path, if it is a normal file. ++/// ++/// If the path terminates in ., .., or consists solely of a root of prefix, ++/// file_name will return None. ++#[cfg(unix)] ++pub fn file_name<'a, P: AsRef + ?Sized>( ++ path: &'a P, ++) -> Option<&'a OsStr> { ++ use std::os::unix::ffi::OsStrExt; ++ use memchr::memrchr; ++ ++ let path = path.as_ref().as_os_str().as_bytes(); ++ if path.is_empty() { ++ return None; ++ } else if path.len() == 1 && path[0] == b'.' { ++ return None; ++ } else if path.last() == Some(&b'.') { ++ return None; ++ } else if path.len() >= 2 && &path[path.len() - 2..] == &b".."[..] { ++ return None; ++ } ++ let last_slash = memrchr(b'/', path).map(|i| i + 1).unwrap_or(0); ++ Some(OsStr::from_bytes(&path[last_slash..])) ++} ++ ++/// The final component of the path, if it is a normal file. ++/// ++/// If the path terminates in ., .., or consists solely of a root of prefix, ++/// file_name will return None. ++#[cfg(not(unix))] ++pub fn file_name<'a, P: AsRef + ?Sized>( ++ path: &'a P, ++) -> Option<&'a OsStr> { ++ path.as_ref().file_name() ++} diff --cc vendor/ignore-0.2.2/src/types.rs index 000000000,000000000..791008ffc new file mode 100644 --- /dev/null +++ b/vendor/ignore-0.2.2/src/types.rs @@@ -1,0 -1,0 +1,686 @@@ ++/*! ++The types module provides a way of associating globs on file names to file ++types. ++ ++This can be used to match specific types of files. For example, among ++the default file types provided, the Rust file type is defined to be `*.rs` ++with name `rust`. Similarly, the C file type is defined to be `*.{c,h}` with ++name `c`. ++ ++Note that the set of default types may change over time. ++ ++# Example ++ ++This shows how to create and use a simple file type matcher using the default ++file types defined in this crate. ++ ++``` ++use ignore::types::TypesBuilder; ++ ++let mut builder = TypesBuilder::new(); ++builder.add_defaults(); ++builder.select("rust"); ++let matcher = builder.build().unwrap(); ++ ++assert!(matcher.matched("foo.rs", false).is_whitelist()); ++assert!(matcher.matched("foo.c", false).is_ignore()); ++``` ++ ++# Example: negation ++ ++This is like the previous example, but shows how negating a file type works. ++That is, this will let us match file paths that *don't* correspond to a ++particular file type. ++ ++``` ++use ignore::types::TypesBuilder; ++ ++let mut builder = TypesBuilder::new(); ++builder.add_defaults(); ++builder.negate("c"); ++let matcher = builder.build().unwrap(); ++ ++assert!(matcher.matched("foo.rs", false).is_none()); ++assert!(matcher.matched("foo.c", false).is_ignore()); ++``` ++ ++# Example: custom file type definitions ++ ++This shows how to extend this library default file type definitions with ++your own. ++ ++``` ++use ignore::types::TypesBuilder; ++ ++let mut builder = TypesBuilder::new(); ++builder.add_defaults(); ++builder.add("foo", "*.foo"); ++// Another way of adding a file type definition. ++// This is useful when accepting input from an end user. ++builder.add_def("bar:*.bar"); ++// Note: we only select `foo`, not `bar`. ++builder.select("foo"); ++let matcher = builder.build().unwrap(); ++ ++assert!(matcher.matched("x.foo", false).is_whitelist()); ++// This is ignored because we only selected the `foo` file type. ++assert!(matcher.matched("x.bar", false).is_ignore()); ++``` ++ ++We can also add file type definitions based on other definitions. ++ ++``` ++use ignore::types::TypesBuilder; ++ ++let mut builder = TypesBuilder::new(); ++builder.add_defaults(); ++builder.add("foo", "*.foo"); ++builder.add_def("bar:include:foo,cpp"); ++builder.select("bar"); ++let matcher = builder.build().unwrap(); ++ ++assert!(matcher.matched("x.foo", false).is_whitelist()); ++assert!(matcher.matched("y.cpp", false).is_whitelist()); ++``` ++*/ ++ ++use std::cell::RefCell; ++use std::collections::HashMap; ++use std::path::Path; ++use std::sync::Arc; ++ ++use globset::{GlobBuilder, GlobSet, GlobSetBuilder}; ++use regex::Regex; ++use thread_local::ThreadLocal; ++ ++use pathutil::file_name; ++use {Error, Match}; ++ ++const DEFAULT_TYPES: &'static [(&'static str, &'static [&'static str])] = &[ ++ ("agda", &["*.agda", "*.lagda"]), ++ ("asciidoc", &["*.adoc", "*.asc", "*.asciidoc"]), ++ ("asm", &["*.asm", "*.s", "*.S"]), ++ ("awk", &["*.awk"]), ++ ("c", &["*.c", "*.h", "*.H"]), ++ ("cabal", &["*.cabal"]), ++ ("cbor", &["*.cbor"]), ++ ("ceylon", &["*.ceylon"]), ++ ("clojure", &["*.clj", "*.cljc", "*.cljs", "*.cljx"]), ++ ("cmake", &["*.cmake", "CMakeLists.txt"]), ++ ("coffeescript", &["*.coffee"]), ++ ("creole", &["*.creole"]), ++ ("config", &["*.config"]), ++ ("cpp", &[ ++ "*.C", "*.cc", "*.cpp", "*.cxx", ++ "*.h", "*.H", "*.hh", "*.hpp", "*.inl", ++ ]), ++ ("crystal", &["Projectfile", "*.cr"]), ++ ("cs", &["*.cs"]), ++ ("csharp", &["*.cs"]), ++ ("cshtml", &["*.cshtml"]), ++ ("css", &["*.css", "*.scss"]), ++ ("cython", &["*.pyx"]), ++ ("dart", &["*.dart"]), ++ ("d", &["*.d"]), ++ ("elisp", &["*.el"]), ++ ("elixir", &["*.ex", "*.eex", "*.exs"]), ++ ("erlang", &["*.erl", "*.hrl"]), ++ ("fish", &["*.fish"]), ++ ("fortran", &[ ++ "*.f", "*.F", "*.f77", "*.F77", "*.pfo", ++ "*.f90", "*.F90", "*.f95", "*.F95", ++ ]), ++ ("fsharp", &["*.fs", "*.fsx", "*.fsi"]), ++ ("go", &["*.go"]), ++ ("groovy", &["*.groovy", "*.gradle"]), ++ ("h", &["*.h", "*.hpp"]), ++ ("hbs", &["*.hbs"]), ++ ("haskell", &["*.hs", "*.lhs"]), ++ ("html", &["*.htm", "*.html", "*.ejs"]), ++ ("java", &["*.java"]), ++ ("jinja", &["*.jinja", "*.jinja2"]), ++ ("js", &[ ++ "*.js", "*.jsx", "*.vue", ++ ]), ++ ("json", &["*.json"]), ++ ("jsonl", &["*.jsonl"]), ++ ("julia", &["*.jl"]), ++ ("kotlin", &["*.kt", "*.kts"]), ++ ("less", &["*.less"]), ++ ("lisp", &["*.el", "*.jl", "*.lisp", "*.lsp", "*.sc", "*.scm"]), ++ ("log", &["*.log"]), ++ ("lua", &["*.lua"]), ++ ("m4", &["*.ac", "*.m4"]), ++ ("make", &["gnumakefile", "Gnumakefile", "makefile", "Makefile", "*.mk", "*.mak"]), ++ ("markdown", &["*.markdown", "*.md", "*.mdown", "*.mkdn"]), ++ ("md", &["*.markdown", "*.md", "*.mdown", "*.mkdn"]), ++ ("matlab", &["*.m"]), ++ ("mk", &["mkfile"]), ++ ("ml", &["*.ml"]), ++ ("msbuild", &["*.csproj", "*.fsproj", "*.vcxproj", "*.proj", "*.props", "*.targets"]), ++ ("nim", &["*.nim"]), ++ ("nix", &["*.nix"]), ++ ("objc", &["*.h", "*.m"]), ++ ("objcpp", &["*.h", "*.mm"]), ++ ("ocaml", &["*.ml", "*.mli", "*.mll", "*.mly"]), ++ ("org", &["*.org"]), ++ ("perl", &["*.perl", "*.pl", "*.PL", "*.plh", "*.plx", "*.pm", "*.t"]), ++ ("pdf", &["*.pdf"]), ++ ("php", &["*.php", "*.php3", "*.php4", "*.php5", "*.phtml"]), ++ ("pod", &["*.pod"]), ++ ("ps", &["*.cdxml", "*.ps1", "*.ps1xml", "*.psd1", "*.psm1"]), ++ ("py", &["*.py"]), ++ ("qmake", &["*.pro", "*.pri"]), ++ ("readme", &["README*", "*README"]), ++ ("r", &["*.R", "*.r", "*.Rmd", "*.Rnw"]), ++ ("rdoc", &["*.rdoc"]), ++ ("rst", &["*.rst"]), ++ ("ruby", &["Gemfile", "*.gemspec", ".irbrc", "Rakefile", "*.rb"]), ++ ("rust", &["*.rs"]), ++ ("sass", &["*.sass", "*.scss"]), ++ ("scala", &["*.scala"]), ++ ("sh", &["*.bash", "*.csh", "*.ksh", "*.sh", "*.tcsh"]), ++ ("spark", &["*.spark"]), ++ ("stylus", &["*.styl"]), ++ ("sql", &["*.sql"]), ++ ("sv", &["*.v", "*.vg", "*.sv", "*.svh", "*.h"]), ++ ("svg", &["*.svg"]), ++ ("swift", &["*.swift"]), ++ ("swig", &["*.def", "*.i"]), ++ ("taskpaper", &["*.taskpaper"]), ++ ("tcl", &["*.tcl"]), ++ ("tex", &["*.tex", "*.ltx", "*.cls", "*.sty", "*.bib"]), ++ ("textile", &["*.textile"]), ++ ("ts", &["*.ts", "*.tsx"]), ++ ("txt", &["*.txt"]), ++ ("toml", &["*.toml", "Cargo.lock"]), ++ ("twig", &["*.twig"]), ++ ("vala", &["*.vala"]), ++ ("vb", &["*.vb"]), ++ ("vim", &["*.vim"]), ++ ("vimscript", &["*.vim"]), ++ ("wiki", &["*.mediawiki", "*.wiki"]), ++ ("xml", &["*.xml"]), ++ ("yacc", &["*.y"]), ++ ("yaml", &["*.yaml", "*.yml"]), ++ ("yocto", &["*.bb", "*.bbappend", "*.bbclass"]), ++ ("zsh", &["zshenv", ".zshenv", "zprofile", ".zprofile", "zshrc", ".zshrc", "zlogin", ".zlogin", "zlogout", ".zlogout", "*.zsh"]), ++]; ++ ++/// Glob represents a single glob in a set of file type definitions. ++/// ++/// There may be more than one glob for a particular file type. ++/// ++/// This is used to report information about the highest precedent glob ++/// that matched. ++/// ++/// Note that not all matches necessarily correspond to a specific glob. ++/// For example, if there are one or more selections and a file path doesn't ++/// match any of those selections, then the file path is considered to be ++/// ignored. ++/// ++/// The lifetime `'a` refers to the lifetime of the underlying file type ++/// definition, which corresponds to the lifetime of the file type matcher. ++#[derive(Clone, Debug)] ++pub struct Glob<'a>(GlobInner<'a>); ++ ++#[derive(Clone, Debug)] ++enum GlobInner<'a> { ++ /// No glob matched, but the file path should still be ignored. ++ UnmatchedIgnore, ++ /// A glob matched. ++ Matched { ++ /// The file type definition which provided the glob. ++ def: &'a FileTypeDef, ++ /// The index of the glob that matched inside the file type definition. ++ which: usize, ++ /// Whether the selection was negated or not. ++ negated: bool, ++ } ++} ++ ++impl<'a> Glob<'a> { ++ fn unmatched() -> Glob<'a> { ++ Glob(GlobInner::UnmatchedIgnore) ++ } ++} ++ ++/// A single file type definition. ++/// ++/// File type definitions can be retrieved in aggregate from a file type ++/// matcher. File type definitions are also reported when its responsible ++/// for a match. ++#[derive(Clone, Debug, Eq, PartialEq)] ++pub struct FileTypeDef { ++ name: String, ++ globs: Vec, ++} ++ ++impl FileTypeDef { ++ /// Return the name of this file type. ++ pub fn name(&self) -> &str { ++ &self.name ++ } ++ ++ /// Return the globs used to recognize this file type. ++ pub fn globs(&self) -> &[String] { ++ &self.globs ++ } ++} ++ ++/// Types is a file type matcher. ++#[derive(Clone, Debug)] ++pub struct Types { ++ /// All of the file type definitions, sorted lexicographically by name. ++ defs: Vec, ++ /// All of the selections made by the user. ++ selections: Vec>, ++ /// Whether there is at least one Selection::Select in our selections. ++ /// When this is true, a Match::None is converted to Match::Ignore. ++ has_selected: bool, ++ /// A mapping from glob index in the set to two indices. The first is an ++ /// index into `selections` and the second is an index into the ++ /// corresponding file type definition's list of globs. ++ glob_to_selection: Vec<(usize, usize)>, ++ /// The set of all glob selections, used for actual matching. ++ set: GlobSet, ++ /// Temporary storage for globs that match. ++ matches: Arc>>>, ++} ++ ++/// Indicates the type of a selection for a particular file type. ++#[derive(Clone, Debug)] ++enum Selection { ++ Select(String, T), ++ Negate(String, T), ++} ++ ++impl Selection { ++ fn is_negated(&self) -> bool { ++ match *self { ++ Selection::Select(..) => false, ++ Selection::Negate(..) => true, ++ } ++ } ++ ++ fn name(&self) -> &str { ++ match *self { ++ Selection::Select(ref name, _) => name, ++ Selection::Negate(ref name, _) => name, ++ } ++ } ++ ++ fn map U>(self, f: F) -> Selection { ++ match self { ++ Selection::Select(name, inner) => { ++ Selection::Select(name, f(inner)) ++ } ++ Selection::Negate(name, inner) => { ++ Selection::Negate(name, f(inner)) ++ } ++ } ++ } ++ ++ fn inner(&self) -> &T { ++ match *self { ++ Selection::Select(_, ref inner) => inner, ++ Selection::Negate(_, ref inner) => inner, ++ } ++ } ++} ++ ++impl Types { ++ /// Creates a new file type matcher that never matches any path and ++ /// contains no file type definitions. ++ pub fn empty() -> Types { ++ Types { ++ defs: vec![], ++ selections: vec![], ++ has_selected: false, ++ glob_to_selection: vec![], ++ set: GlobSetBuilder::new().build().unwrap(), ++ matches: Arc::new(ThreadLocal::default()), ++ } ++ } ++ ++ /// Returns true if and only if this matcher has zero selections. ++ pub fn is_empty(&self) -> bool { ++ self.selections.is_empty() ++ } ++ ++ /// Returns the number of selections used in this matcher. ++ pub fn len(&self) -> usize { ++ self.selections.len() ++ } ++ ++ /// Return the set of current file type definitions. ++ /// ++ /// Definitions and globs are sorted. ++ pub fn definitions(&self) -> &[FileTypeDef] { ++ &self.defs ++ } ++ ++ /// Returns a match for the given path against this file type matcher. ++ /// ++ /// The path is considered whitelisted if it matches a selected file type. ++ /// The path is considered ignored if it matches a negated file type. ++ /// If at least one file type is selected and `path` doesn't match, then ++ /// the path is also considered ignored. ++ pub fn matched<'a, P: AsRef>( ++ &'a self, ++ path: P, ++ is_dir: bool, ++ ) -> Match> { ++ // File types don't apply to directories, and we can't do anything ++ // if our glob set is empty. ++ if is_dir || self.set.is_empty() { ++ return Match::None; ++ } ++ // We only want to match against the file name, so extract it. ++ // If one doesn't exist, then we can't match it. ++ let name = match file_name(path.as_ref()) { ++ Some(name) => name, ++ None if self.has_selected => { ++ return Match::Ignore(Glob::unmatched()); ++ } ++ None => { ++ return Match::None; ++ } ++ }; ++ let mut matches = self.matches.get_default().borrow_mut(); ++ self.set.matches_into(name, &mut *matches); ++ // The highest precedent match is the last one. ++ if let Some(&i) = matches.last() { ++ let (isel, iglob) = self.glob_to_selection[i]; ++ let sel = &self.selections[isel]; ++ let glob = Glob(GlobInner::Matched { ++ def: sel.inner(), ++ which: iglob, ++ negated: sel.is_negated(), ++ }); ++ return if sel.is_negated() { ++ Match::Ignore(glob) ++ } else { ++ Match::Whitelist(glob) ++ }; ++ } ++ if self.has_selected { ++ Match::Ignore(Glob::unmatched()) ++ } else { ++ Match::None ++ } ++ } ++} ++ ++/// TypesBuilder builds a type matcher from a set of file type definitions and ++/// a set of file type selections. ++pub struct TypesBuilder { ++ types: HashMap, ++ selections: Vec>, ++} ++ ++impl TypesBuilder { ++ /// Create a new builder for a file type matcher. ++ /// ++ /// The builder contains *no* type definitions to start with. A set ++ /// of default type definitions can be added with `add_defaults`, and ++ /// additional type definitions can be added with `select` and `negate`. ++ pub fn new() -> TypesBuilder { ++ TypesBuilder { ++ types: HashMap::new(), ++ selections: vec![], ++ } ++ } ++ ++ /// Build the current set of file type definitions *and* selections into ++ /// a file type matcher. ++ pub fn build(&self) -> Result { ++ let defs = self.definitions(); ++ let has_selected = self.selections.iter().any(|s| !s.is_negated()); ++ ++ let mut selections = vec![]; ++ let mut glob_to_selection = vec![]; ++ let mut build_set = GlobSetBuilder::new(); ++ for (isel, selection) in self.selections.iter().enumerate() { ++ let def = match self.types.get(selection.name()) { ++ Some(def) => def.clone(), ++ None => { ++ let name = selection.name().to_string(); ++ return Err(Error::UnrecognizedFileType(name)); ++ } ++ }; ++ for (iglob, glob) in def.globs.iter().enumerate() { ++ build_set.add(try!( ++ GlobBuilder::new(glob) ++ .literal_separator(true) ++ .build() ++ .map_err(|err| { ++ Error::Glob { ++ glob: Some(glob.to_string()), ++ err: err.kind().to_string(), ++ } ++ }))); ++ glob_to_selection.push((isel, iglob)); ++ } ++ selections.push(selection.clone().map(move |_| def)); ++ } ++ let set = try!(build_set.build().map_err(|err| { ++ Error::Glob { glob: None, err: err.to_string() } ++ })); ++ Ok(Types { ++ defs: defs, ++ selections: selections, ++ has_selected: has_selected, ++ glob_to_selection: glob_to_selection, ++ set: set, ++ matches: Arc::new(ThreadLocal::default()), ++ }) ++ } ++ ++ /// Return the set of current file type definitions. ++ /// ++ /// Definitions and globs are sorted. ++ pub fn definitions(&self) -> Vec { ++ let mut defs = vec![]; ++ for def in self.types.values() { ++ let mut def = def.clone(); ++ def.globs.sort(); ++ defs.push(def); ++ } ++ defs.sort_by(|def1, def2| def1.name().cmp(def2.name())); ++ defs ++ } ++ ++ /// Select the file type given by `name`. ++ /// ++ /// If `name` is `all`, then all file types currently defined are selected. ++ pub fn select(&mut self, name: &str) -> &mut TypesBuilder { ++ if name == "all" { ++ for name in self.types.keys() { ++ self.selections.push(Selection::Select(name.to_string(), ())); ++ } ++ } else { ++ self.selections.push(Selection::Select(name.to_string(), ())); ++ } ++ self ++ } ++ ++ /// Ignore the file type given by `name`. ++ /// ++ /// If `name` is `all`, then all file types currently defined are negated. ++ pub fn negate(&mut self, name: &str) -> &mut TypesBuilder { ++ if name == "all" { ++ for name in self.types.keys() { ++ self.selections.push(Selection::Negate(name.to_string(), ())); ++ } ++ } else { ++ self.selections.push(Selection::Negate(name.to_string(), ())); ++ } ++ self ++ } ++ ++ /// Clear any file type definitions for the type name given. ++ pub fn clear(&mut self, name: &str) -> &mut TypesBuilder { ++ self.types.remove(name); ++ self ++ } ++ ++ /// Add a new file type definition. `name` can be arbitrary and `pat` ++ /// should be a glob recognizing file paths belonging to the `name` type. ++ /// ++ /// If `name` is `all` or otherwise contains any character that is not a ++ /// Unicode letter or number, then an error is returned. ++ pub fn add(&mut self, name: &str, glob: &str) -> Result<(), Error> { ++ lazy_static! { ++ static ref RE: Regex = Regex::new(r"^[\pL\pN]+$").unwrap(); ++ }; ++ if name == "all" || !RE.is_match(name) { ++ return Err(Error::InvalidDefinition); ++ } ++ let (key, glob) = (name.to_string(), glob.to_string()); ++ self.types.entry(key).or_insert_with(|| { ++ FileTypeDef { name: name.to_string(), globs: vec![] } ++ }).globs.push(glob); ++ Ok(()) ++ } ++ ++ /// Add a new file type definition specified in string form. There are two ++ /// valid formats: ++ /// 1. `{name}:{glob}`. This defines a 'root' definition that associates the ++ /// given name with the given glob. ++ /// 2. `{name}:include:{comma-separated list of already defined names}. ++ /// This defines an 'include' definition that associates the given name ++ /// with the definitions of the given existing types. ++ /// Names may not include any characters that are not ++ /// Unicode letters or numbers. ++ pub fn add_def(&mut self, def: &str) -> Result<(), Error> { ++ let parts: Vec<&str> = def.split(':').collect(); ++ match parts.len() { ++ 2 => { ++ let name = parts[0]; ++ let glob = parts[1]; ++ if name.is_empty() || glob.is_empty() { ++ return Err(Error::InvalidDefinition); ++ } ++ self.add(name, glob) ++ } ++ 3 => { ++ let name = parts[0]; ++ let types_string = parts[2]; ++ if name.is_empty() || parts[1] != "include" || types_string.is_empty() { ++ return Err(Error::InvalidDefinition); ++ } ++ let types = types_string.split(','); ++ // Check ahead of time to ensure that all types specified are ++ // present and fail fast if not. ++ if types.clone().any(|t| !self.types.contains_key(t)) { ++ return Err(Error::InvalidDefinition); ++ } ++ for type_name in types { ++ let globs = self.types.get(type_name).unwrap().globs.clone(); ++ for glob in globs { ++ try!(self.add(name, &glob)); ++ } ++ } ++ Ok(()) ++ } ++ _ => Err(Error::InvalidDefinition) ++ } ++ } ++ ++ /// Add a set of default file type definitions. ++ pub fn add_defaults(&mut self) -> &mut TypesBuilder { ++ static MSG: &'static str = "adding a default type should never fail"; ++ for &(name, exts) in DEFAULT_TYPES { ++ for ext in exts { ++ self.add(name, ext).expect(MSG); ++ } ++ } ++ self ++ } ++} ++ ++#[cfg(test)] ++mod tests { ++ use super::TypesBuilder; ++ ++ macro_rules! matched { ++ ($name:ident, $types:expr, $sel:expr, $selnot:expr, ++ $path:expr) => { ++ matched!($name, $types, $sel, $selnot, $path, true); ++ }; ++ (not, $name:ident, $types:expr, $sel:expr, $selnot:expr, ++ $path:expr) => { ++ matched!($name, $types, $sel, $selnot, $path, false); ++ }; ++ ($name:ident, $types:expr, $sel:expr, $selnot:expr, ++ $path:expr, $matched:expr) => { ++ #[test] ++ fn $name() { ++ let mut btypes = TypesBuilder::new(); ++ for tydef in $types { ++ btypes.add_def(tydef).unwrap(); ++ } ++ for sel in $sel { ++ btypes.select(sel); ++ } ++ for selnot in $selnot { ++ btypes.negate(selnot); ++ } ++ let types = btypes.build().unwrap(); ++ let mat = types.matched($path, false); ++ assert_eq!($matched, !mat.is_ignore()); ++ } ++ }; ++ } ++ ++ fn types() -> Vec<&'static str> { ++ vec![ ++ "html:*.html", ++ "html:*.htm", ++ "rust:*.rs", ++ "js:*.js", ++ "foo:*.{rs,foo}", ++ "combo:include:html,rust" ++ ] ++ } ++ ++ matched!(match1, types(), vec!["rust"], vec![], "lib.rs"); ++ matched!(match2, types(), vec!["html"], vec![], "index.html"); ++ matched!(match3, types(), vec!["html"], vec![], "index.htm"); ++ matched!(match4, types(), vec!["html", "rust"], vec![], "main.rs"); ++ matched!(match5, types(), vec![], vec![], "index.html"); ++ matched!(match6, types(), vec![], vec!["rust"], "index.html"); ++ matched!(match7, types(), vec!["foo"], vec!["rust"], "main.foo"); ++ matched!(match8, types(), vec!["combo"], vec![], "index.html"); ++ matched!(match9, types(), vec!["combo"], vec![], "lib.rs"); ++ ++ matched!(not, matchnot1, types(), vec!["rust"], vec![], "index.html"); ++ matched!(not, matchnot2, types(), vec![], vec!["rust"], "main.rs"); ++ matched!(not, matchnot3, types(), vec!["foo"], vec!["rust"], "main.rs"); ++ matched!(not, matchnot4, types(), vec!["rust"], vec!["foo"], "main.rs"); ++ matched!(not, matchnot5, types(), vec!["rust"], vec!["foo"], "main.foo"); ++ matched!(not, matchnot6, types(), vec!["combo"], vec![], "leftpad.js"); ++ ++ #[test] ++ fn test_invalid_defs() { ++ let mut btypes = TypesBuilder::new(); ++ for tydef in types() { ++ btypes.add_def(tydef).unwrap(); ++ } ++ // Preserve the original definitions for later comparison. ++ let original_defs = btypes.definitions(); ++ let bad_defs = vec![ ++ // Reference to type that does not exist ++ "combo:include:html,python", ++ // Bad format ++ "combo:foobar:html,rust", ++ "" ++ ]; ++ for def in bad_defs { ++ assert!(btypes.add_def(def).is_err()); ++ // Ensure that nothing changed, even if some of the includes were valid. ++ assert_eq!(btypes.definitions(), original_defs); ++ } ++ } ++} diff --cc vendor/ignore-0.2.2/src/walk.rs index 000000000,000000000..fdb128323 new file mode 100644 --- /dev/null +++ b/vendor/ignore-0.2.2/src/walk.rs @@@ -1,0 -1,0 +1,1565 @@@ ++use std::cmp; ++use std::ffi::{OsStr, OsString}; ++use std::fmt; ++use std::fs::{self, FileType, Metadata}; ++use std::io; ++use std::path::{Path, PathBuf}; ++use std::sync::Arc; ++use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; ++use std::thread; ++use std::time::Duration; ++use std::vec; ++ ++use crossbeam::sync::MsQueue; ++use walkdir::{self, WalkDir, WalkDirIterator, is_same_file}; ++ ++use dir::{Ignore, IgnoreBuilder}; ++use gitignore::GitignoreBuilder; ++use overrides::Override; ++use types::Types; ++use {Error, PartialErrorBuilder}; ++ ++/// A directory entry with a possible error attached. ++/// ++/// The error typically refers to a problem parsing ignore files in a ++/// particular directory. ++#[derive(Debug)] ++pub struct DirEntry { ++ dent: DirEntryInner, ++ err: Option, ++} ++ ++impl DirEntry { ++ /// The full path that this entry represents. ++ pub fn path(&self) -> &Path { ++ self.dent.path() ++ } ++ ++ /// Whether this entry corresponds to a symbolic link or not. ++ pub fn path_is_symbolic_link(&self) -> bool { ++ self.dent.path_is_symbolic_link() ++ } ++ ++ /// Returns true if and only if this entry corresponds to stdin. ++ /// ++ /// i.e., The entry has depth 0 and its file name is `-`. ++ pub fn is_stdin(&self) -> bool { ++ self.dent.is_stdin() ++ } ++ ++ /// Return the metadata for the file that this entry points to. ++ pub fn metadata(&self) -> Result { ++ self.dent.metadata() ++ } ++ ++ /// Return the file type for the file that this entry points to. ++ /// ++ /// This entry doesn't have a file type if it corresponds to stdin. ++ pub fn file_type(&self) -> Option { ++ self.dent.file_type() ++ } ++ ++ /// Return the file name of this entry. ++ /// ++ /// If this entry has no file name (e.g., `/`), then the full path is ++ /// returned. ++ pub fn file_name(&self) -> &OsStr { ++ self.dent.file_name() ++ } ++ ++ /// Returns the depth at which this entry was created relative to the root. ++ pub fn depth(&self) -> usize { ++ self.dent.depth() ++ } ++ ++ /// Returns the underlying inode number if one exists. ++ /// ++ /// If this entry doesn't have an inode number, then `None` is returned. ++ #[cfg(unix)] ++ pub fn ino(&self) -> Option { ++ self.dent.ino() ++ } ++ ++ /// Returns an error, if one exists, associated with processing this entry. ++ /// ++ /// An example of an error is one that occurred while parsing an ignore ++ /// file. ++ pub fn error(&self) -> Option<&Error> { ++ self.err.as_ref() ++ } ++ ++ fn new_stdin() -> DirEntry { ++ DirEntry { ++ dent: DirEntryInner::Stdin, ++ err: None, ++ } ++ } ++ ++ fn new_walkdir(dent: walkdir::DirEntry, err: Option) -> DirEntry { ++ DirEntry { ++ dent: DirEntryInner::Walkdir(dent), ++ err: err, ++ } ++ } ++ ++ fn new_raw(dent: DirEntryRaw, err: Option) -> DirEntry { ++ DirEntry { ++ dent: DirEntryInner::Raw(dent), ++ err: err, ++ } ++ } ++} ++ ++/// DirEntryInner is the implementation of DirEntry. ++/// ++/// It specifically represents three distinct sources of directory entries: ++/// ++/// 1. From the walkdir crate. ++/// 2. Special entries that represent things like stdin. ++/// 3. From a path. ++/// ++/// Specifically, (3) has to essentially re-create the DirEntry implementation ++/// from WalkDir. ++#[derive(Debug)] ++enum DirEntryInner { ++ Stdin, ++ Walkdir(walkdir::DirEntry), ++ Raw(DirEntryRaw), ++} ++ ++impl DirEntryInner { ++ fn path(&self) -> &Path { ++ use self::DirEntryInner::*; ++ match *self { ++ Stdin => Path::new(""), ++ Walkdir(ref x) => x.path(), ++ Raw(ref x) => x.path(), ++ } ++ } ++ ++ fn path_is_symbolic_link(&self) -> bool { ++ use self::DirEntryInner::*; ++ match *self { ++ Stdin => false, ++ Walkdir(ref x) => x.path_is_symbolic_link(), ++ Raw(ref x) => x.path_is_symbolic_link(), ++ } ++ } ++ ++ fn is_stdin(&self) -> bool { ++ match *self { ++ DirEntryInner::Stdin => true, ++ _ => false, ++ } ++ } ++ ++ fn metadata(&self) -> Result { ++ use self::DirEntryInner::*; ++ match *self { ++ Stdin => { ++ let err = Error::Io(io::Error::new( ++ io::ErrorKind::Other, " has no metadata")); ++ Err(err.with_path("")) ++ } ++ Walkdir(ref x) => { ++ x.metadata().map_err(|err| { ++ Error::Io(io::Error::from(err)).with_path(x.path()) ++ }) ++ } ++ Raw(ref x) => x.metadata(), ++ } ++ } ++ ++ fn file_type(&self) -> Option { ++ use self::DirEntryInner::*; ++ match *self { ++ Stdin => None, ++ Walkdir(ref x) => Some(x.file_type()), ++ Raw(ref x) => Some(x.file_type()), ++ } ++ } ++ ++ fn file_name(&self) -> &OsStr { ++ use self::DirEntryInner::*; ++ match *self { ++ Stdin => OsStr::new(""), ++ Walkdir(ref x) => x.file_name(), ++ Raw(ref x) => x.file_name(), ++ } ++ } ++ ++ fn depth(&self) -> usize { ++ use self::DirEntryInner::*; ++ match *self { ++ Stdin => 0, ++ Walkdir(ref x) => x.depth(), ++ Raw(ref x) => x.depth(), ++ } ++ } ++ ++ #[cfg(unix)] ++ fn ino(&self) -> Option { ++ use self::DirEntryInner::*; ++ match *self { ++ Stdin => None, ++ Walkdir(ref x) => Some(x.ino()), ++ Raw(ref x) => Some(x.ino()), ++ } ++ } ++} ++ ++/// DirEntryRaw is essentially copied from the walkdir crate so that we can ++/// build `DirEntry`s from whole cloth in the parallel iterator. ++struct DirEntryRaw { ++ /// The path as reported by the `fs::ReadDir` iterator (even if it's a ++ /// symbolic link). ++ path: PathBuf, ++ /// The file type. Necessary for recursive iteration, so store it. ++ ty: FileType, ++ /// Is set when this entry was created from a symbolic link and the user ++ /// expects the iterator to follow symbolic links. ++ follow_link: bool, ++ /// The depth at which this entry was generated relative to the root. ++ depth: usize, ++ /// The underlying inode number (Unix only). ++ #[cfg(unix)] ++ ino: u64, ++} ++ ++impl fmt::Debug for DirEntryRaw { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ // Leaving out FileType because it doesn't have a debug impl ++ // in Rust 1.9. We could add it if we really wanted to by manually ++ // querying each possibly file type. Meh. ---AG ++ f.debug_struct("DirEntryRaw") ++ .field("path", &self.path) ++ .field("follow_link", &self.follow_link) ++ .field("depth", &self.depth) ++ .finish() ++ } ++} ++ ++impl DirEntryRaw { ++ fn path(&self) -> &Path { ++ &self.path ++ } ++ ++ fn path_is_symbolic_link(&self) -> bool { ++ self.ty.is_symlink() || self.follow_link ++ } ++ ++ fn metadata(&self) -> Result { ++ if self.follow_link { ++ fs::metadata(&self.path) ++ } else { ++ fs::symlink_metadata(&self.path) ++ }.map_err(|err| Error::Io(io::Error::from(err)).with_path(&self.path)) ++ } ++ ++ fn file_type(&self) -> FileType { ++ self.ty ++ } ++ ++ fn file_name(&self) -> &OsStr { ++ self.path.file_name().unwrap_or_else(|| self.path.as_os_str()) ++ } ++ ++ fn depth(&self) -> usize { ++ self.depth ++ } ++ ++ #[cfg(unix)] ++ fn ino(&self) -> u64 { ++ self.ino ++ } ++ ++ fn from_entry( ++ depth: usize, ++ ent: &fs::DirEntry, ++ ) -> Result { ++ let ty = try!(ent.file_type().map_err(|err| { ++ let err = Error::Io(io::Error::from(err)).with_path(ent.path()); ++ Error::WithDepth { ++ depth: depth, ++ err: Box::new(err), ++ } ++ })); ++ Ok(DirEntryRaw::from_entry_os(depth, ent, ty)) ++ } ++ ++ #[cfg(not(unix))] ++ fn from_entry_os( ++ depth: usize, ++ ent: &fs::DirEntry, ++ ty: fs::FileType, ++ ) -> DirEntryRaw { ++ DirEntryRaw { ++ path: ent.path(), ++ ty: ty, ++ follow_link: false, ++ depth: depth, ++ } ++ } ++ ++ #[cfg(unix)] ++ fn from_entry_os( ++ depth: usize, ++ ent: &fs::DirEntry, ++ ty: fs::FileType, ++ ) -> DirEntryRaw { ++ use std::os::unix::fs::DirEntryExt; ++ ++ DirEntryRaw { ++ path: ent.path(), ++ ty: ty, ++ follow_link: false, ++ depth: depth, ++ ino: ent.ino(), ++ } ++ } ++ ++ #[cfg(not(unix))] ++ fn from_link(depth: usize, pb: PathBuf) -> Result { ++ let md = try!(fs::metadata(&pb).map_err(|err| { ++ Error::Io(err).with_path(&pb) ++ })); ++ Ok(DirEntryRaw { ++ path: pb, ++ ty: md.file_type(), ++ follow_link: true, ++ depth: depth, ++ }) ++ } ++ ++ #[cfg(unix)] ++ fn from_link(depth: usize, pb: PathBuf) -> Result { ++ use std::os::unix::fs::MetadataExt; ++ ++ let md = try!(fs::metadata(&pb).map_err(|err| { ++ Error::Io(err).with_path(&pb) ++ })); ++ Ok(DirEntryRaw { ++ path: pb, ++ ty: md.file_type(), ++ follow_link: true, ++ depth: depth, ++ ino: md.ino(), ++ }) ++ } ++} ++ ++/// WalkBuilder builds a recursive directory iterator. ++/// ++/// The builder supports a large number of configurable options. This includes ++/// specific glob overrides, file type matching, toggling whether hidden ++/// files are ignored or not, and of course, support for respecting gitignore ++/// files. ++/// ++/// By default, all ignore files found are respected. This includes `.ignore`, ++/// `.gitignore`, `.git/info/exclude` and even your global gitignore ++/// globs, usually found in `$XDG_CONFIG_HOME/git/ignore`. ++/// ++/// Some standard recursive directory options are also supported, such as ++/// limiting the recursive depth or whether to follow symbolic links (disabled ++/// by default). ++/// ++/// # Ignore rules ++/// ++/// There are many rules that influence whether a particular file or directory ++/// is skipped by this iterator. Those rules are documented here. Note that ++/// the rules assume a default configuration. ++/// ++/// * First, glob overrides are checked. If a path matches a glob override, ++/// then matching stops. The path is then only skipped if the glob that matched ++/// the path is an ignore glob. (An override glob is a whitelist glob unless it ++/// starts with a `!`, in which case it is an ignore glob.) ++/// * Second, ignore files are checked. Ignore files currently only come from ++/// git ignore files (`.gitignore`, `.git/info/exclude` and the configured ++/// global gitignore file), plain `.ignore` files, which have the same format ++/// as gitignore files, or explicitly added ignore files. The precedence order ++/// is: `.ignore`, `.gitignore`, `.git/info/exclude`, global gitignore and ++/// finally explicitly added ignore files. Note that precedence between ++/// different types of ignore files is not impacted by the directory hierarchy; ++/// any `.ignore` file overrides all `.gitignore` files. Within each precedence ++/// level, more nested ignore files have a higher precedence than less nested ++/// ignore files. ++/// * Third, if the previous step yields an ignore match, then all matching ++/// is stopped and the path is skipped. If it yields a whitelist match, then ++/// matching continues. A whitelist match can be overridden by a later matcher. ++/// * Fourth, unless the path is a directory, the file type matcher is run on ++/// the path. As above, if it yields an ignore match, then all matching is ++/// stopped and the path is skipped. If it yields a whitelist match, then ++/// matching continues. ++/// * Fifth, if the path hasn't been whitelisted and it is hidden, then the ++/// path is skipped. ++/// * Sixth, unless the path is a directory, the size of the file is compared ++/// against the max filesize limit. If it exceeds the limit, it is skipped. ++/// * Seventh, if the path has made it this far then it is yielded in the ++/// iterator. ++#[derive(Clone)] ++pub struct WalkBuilder { ++ paths: Vec, ++ ig_builder: IgnoreBuilder, ++ parents: bool, ++ max_depth: Option, ++ max_filesize: Option, ++ follow_links: bool, ++ sorter: Option cmp::Ordering + 'static>>, ++ threads: usize, ++} ++ ++impl fmt::Debug for WalkBuilder { ++ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { ++ f.debug_struct("WalkBuilder") ++ .field("paths", &self.paths) ++ .field("ig_builder", &self.ig_builder) ++ .field("parents", &self.parents) ++ .field("max_depth", &self.max_depth) ++ .field("max_filesize", &self.max_filesize) ++ .field("follow_links", &self.follow_links) ++ .field("threads", &self.threads) ++ .finish() ++ } ++} ++ ++impl WalkBuilder { ++ /// Create a new builder for a recursive directory iterator for the ++ /// directory given. ++ /// ++ /// Note that if you want to traverse multiple different directories, it ++ /// is better to call `add` on this builder than to create multiple ++ /// `Walk` values. ++ pub fn new>(path: P) -> WalkBuilder { ++ WalkBuilder { ++ paths: vec![path.as_ref().to_path_buf()], ++ ig_builder: IgnoreBuilder::new(), ++ parents: true, ++ max_depth: None, ++ max_filesize: None, ++ follow_links: false, ++ sorter: None, ++ threads: 0, ++ } ++ } ++ ++ /// Build a new `Walk` iterator. ++ pub fn build(&self) -> Walk { ++ let follow_links = self.follow_links; ++ let max_depth = self.max_depth; ++ let cmp = self.sorter.clone(); ++ let its = self.paths.iter().map(move |p| { ++ if p == Path::new("-") { ++ (p.to_path_buf(), None) ++ } else { ++ let mut wd = WalkDir::new(p); ++ wd = wd.follow_links(follow_links || p.is_file()); ++ if let Some(max_depth) = max_depth { ++ wd = wd.max_depth(max_depth); ++ } ++ if let Some(ref cmp) = cmp { ++ let cmp = cmp.clone(); ++ wd = wd.sort_by(move |a, b| cmp(a, b)); ++ } ++ (p.to_path_buf(), Some(WalkEventIter::from(wd))) ++ } ++ }).collect::>().into_iter(); ++ let ig_root = self.ig_builder.build(); ++ Walk { ++ its: its, ++ it: None, ++ ig_root: ig_root.clone(), ++ ig: ig_root.clone(), ++ max_filesize: self.max_filesize, ++ parents: self.parents, ++ } ++ } ++ ++ /// Build a new `WalkParallel` iterator. ++ /// ++ /// Note that this *doesn't* return something that implements `Iterator`. ++ /// Instead, the returned value must be run with a closure. e.g., ++ /// `builder.build_parallel().run(|| |path| println!("{:?}", path))`. ++ pub fn build_parallel(&self) -> WalkParallel { ++ WalkParallel { ++ paths: self.paths.clone().into_iter(), ++ ig_root: self.ig_builder.build(), ++ max_depth: self.max_depth, ++ max_filesize: self.max_filesize, ++ follow_links: self.follow_links, ++ parents: self.parents, ++ threads: self.threads, ++ } ++ } ++ ++ /// Add a file path to the iterator. ++ /// ++ /// Each additional file path added is traversed recursively. This should ++ /// be preferred over building multiple `Walk` iterators since this ++ /// enables reusing resources across iteration. ++ pub fn add>(&mut self, path: P) -> &mut WalkBuilder { ++ self.paths.push(path.as_ref().to_path_buf()); ++ self ++ } ++ ++ /// The maximum depth to recurse. ++ /// ++ /// The default, `None`, imposes no depth restriction. ++ pub fn max_depth(&mut self, depth: Option) -> &mut WalkBuilder { ++ self.max_depth = depth; ++ self ++ } ++ ++ /// Whether to follow symbolic links or not. ++ pub fn follow_links(&mut self, yes: bool) -> &mut WalkBuilder { ++ self.follow_links = yes; ++ self ++ } ++ ++ /// Whether to ignore files above the specified limit. ++ pub fn max_filesize(&mut self, filesize: Option) -> &mut WalkBuilder { ++ self.max_filesize = filesize; ++ self ++ } ++ ++ /// The number of threads to use for traversal. ++ /// ++ /// Note that this only has an effect when using `build_parallel`. ++ /// ++ /// The default setting is `0`, which chooses the number of threads ++ /// automatically using heuristics. ++ pub fn threads(&mut self, n: usize) -> &mut WalkBuilder { ++ self.threads = n; ++ self ++ } ++ ++ /// Add an ignore file to the matcher. ++ /// ++ /// This has lower precedence than all other sources of ignore rules. ++ /// ++ /// If there was a problem adding the ignore file, then an error is ++ /// returned. Note that the error may indicate *partial* failure. For ++ /// example, if an ignore file contains an invalid glob, all other globs ++ /// are still applied. ++ pub fn add_ignore>(&mut self, path: P) -> Option { ++ let mut builder = GitignoreBuilder::new(""); ++ let mut errs = PartialErrorBuilder::default(); ++ errs.maybe_push(builder.add(path)); ++ match builder.build() { ++ Ok(gi) => { self.ig_builder.add_ignore(gi); } ++ Err(err) => { errs.push(err); } ++ } ++ errs.into_error_option() ++ } ++ ++ /// Add an override matcher. ++ /// ++ /// By default, no override matcher is used. ++ /// ++ /// This overrides any previous setting. ++ pub fn overrides(&mut self, overrides: Override) -> &mut WalkBuilder { ++ self.ig_builder.overrides(overrides); ++ self ++ } ++ ++ /// Add a file type matcher. ++ /// ++ /// By default, no file type matcher is used. ++ /// ++ /// This overrides any previous setting. ++ pub fn types(&mut self, types: Types) -> &mut WalkBuilder { ++ self.ig_builder.types(types); ++ self ++ } ++ ++ /// Enables ignoring hidden files. ++ /// ++ /// This is enabled by default. ++ pub fn hidden(&mut self, yes: bool) -> &mut WalkBuilder { ++ self.ig_builder.hidden(yes); ++ self ++ } ++ ++ /// Enables reading ignore files from parent directories. ++ /// ++ /// If this is enabled, then the parent directories of each file path given ++ /// are traversed for ignore files (subject to the ignore settings on ++ /// this builder). Note that file paths are canonicalized with respect to ++ /// the current working directory in order to determine parent directories. ++ /// ++ /// This is enabled by default. ++ pub fn parents(&mut self, yes: bool) -> &mut WalkBuilder { ++ self.parents = yes; ++ self ++ } ++ ++ /// Enables reading `.ignore` files. ++ /// ++ /// `.ignore` files have the same semantics as `gitignore` files and are ++ /// supported by search tools such as ripgrep and The Silver Searcher. ++ /// ++ /// This is enabled by default. ++ pub fn ignore(&mut self, yes: bool) -> &mut WalkBuilder { ++ self.ig_builder.ignore(yes); ++ self ++ } ++ ++ /// Enables reading a global gitignore file, whose path is specified in ++ /// git's `core.excludesFile` config option. ++ /// ++ /// Git's config file location is `$HOME/.gitconfig`. If `$HOME/.gitconfig` ++ /// does not exist or does not specify `core.excludesFile`, then ++ /// `$XDG_CONFIG_HOME/git/ignore` is read. If `$XDG_CONFIG_HOME` is not ++ /// set or is empty, then `$HOME/.config/git/ignore` is used instead. ++ pub fn git_global(&mut self, yes: bool) -> &mut WalkBuilder { ++ self.ig_builder.git_global(yes); ++ self ++ } ++ ++ /// Enables reading `.gitignore` files. ++ /// ++ /// `.gitignore` files have match semantics as described in the `gitignore` ++ /// man page. ++ /// ++ /// This is enabled by default. ++ pub fn git_ignore(&mut self, yes: bool) -> &mut WalkBuilder { ++ self.ig_builder.git_ignore(yes); ++ self ++ } ++ ++ /// Enables reading `.git/info/exclude` files. ++ /// ++ /// `.git/info/exclude` files have match semantics as described in the ++ /// `gitignore` man page. ++ /// ++ /// This is enabled by default. ++ pub fn git_exclude(&mut self, yes: bool) -> &mut WalkBuilder { ++ self.ig_builder.git_exclude(yes); ++ self ++ } ++ ++ /// Set a function for sorting directory entries. ++ /// ++ /// If a compare function is set, the resulting iterator will return all ++ /// paths in sorted order. The compare function will be called to compare ++ /// names from entries from the same directory using only the name of the ++ /// entry. ++ /// ++ /// Note that this is not used in the parallel iterator. ++ pub fn sort_by(&mut self, cmp: F) -> &mut WalkBuilder ++ where F: Fn(&OsString, &OsString) -> cmp::Ordering + 'static { ++ self.sorter = Some(Arc::new(cmp)); ++ self ++ } ++} ++ ++/// Walk is a recursive directory iterator over file paths in one or more ++/// directories. ++/// ++/// Only file and directory paths matching the rules are returned. By default, ++/// ignore files like `.gitignore` are respected. The precise matching rules ++/// and precedence is explained in the documentation for `WalkBuilder`. ++pub struct Walk { ++ its: vec::IntoIter<(PathBuf, Option)>, ++ it: Option, ++ ig_root: Ignore, ++ ig: Ignore, ++ max_filesize: Option, ++ parents: bool, ++} ++ ++impl Walk { ++ /// Creates a new recursive directory iterator for the file path given. ++ /// ++ /// Note that this uses default settings, which include respecting ++ /// `.gitignore` files. To configure the iterator, use `WalkBuilder` ++ /// instead. ++ pub fn new>(path: P) -> Walk { ++ WalkBuilder::new(path).build() ++ } ++ ++ fn skip_entry(&self, ent: &walkdir::DirEntry) -> bool { ++ if ent.depth() == 0 { ++ return false; ++ } ++ ++ let is_dir = ent.file_type().is_dir(); ++ let max_size = self.max_filesize; ++ let should_skip_path = skip_path(&self.ig, ent.path(), is_dir); ++ let should_skip_filesize = if !is_dir && max_size.is_some() { ++ skip_filesize(max_size.unwrap(), ent.path(), &ent.metadata().ok()) ++ } else { ++ false ++ }; ++ ++ should_skip_path || should_skip_filesize ++ } ++} ++ ++impl Iterator for Walk { ++ type Item = Result; ++ ++ #[inline(always)] ++ fn next(&mut self) -> Option> { ++ loop { ++ let ev = match self.it.as_mut().and_then(|it| it.next()) { ++ Some(ev) => ev, ++ None => { ++ match self.its.next() { ++ None => return None, ++ Some((_, None)) => { ++ return Some(Ok(DirEntry::new_stdin())); ++ } ++ Some((path, Some(it))) => { ++ self.it = Some(it); ++ if self.parents && path.is_dir() { ++ let (ig, err) = self.ig_root.add_parents(path); ++ self.ig = ig; ++ if let Some(err) = err { ++ return Some(Err(err)); ++ } ++ } else { ++ self.ig = self.ig_root.clone(); ++ } ++ } ++ } ++ continue; ++ } ++ }; ++ match ev { ++ Err(err) => { ++ return Some(Err(Error::from(err))); ++ } ++ Ok(WalkEvent::Exit) => { ++ self.ig = self.ig.parent().unwrap(); ++ } ++ Ok(WalkEvent::Dir(ent)) => { ++ if self.skip_entry(&ent) { ++ self.it.as_mut().unwrap().it.skip_current_dir(); ++ // Still need to push this on the stack because ++ // we'll get a WalkEvent::Exit event for this dir. ++ // We don't care if it errors though. ++ let (igtmp, _) = self.ig.add_child(ent.path()); ++ self.ig = igtmp; ++ continue; ++ } ++ let (igtmp, err) = self.ig.add_child(ent.path()); ++ self.ig = igtmp; ++ return Some(Ok(DirEntry::new_walkdir(ent, err))); ++ } ++ Ok(WalkEvent::File(ent)) => { ++ if self.skip_entry(&ent) { ++ continue; ++ } ++ return Some(Ok(DirEntry::new_walkdir(ent, None))); ++ } ++ } ++ } ++ } ++} ++ ++/// WalkEventIter transforms a WalkDir iterator into an iterator that more ++/// accurately describes the directory tree. Namely, it emits events that are ++/// one of three types: directory, file or "exit." An "exit" event means that ++/// the entire contents of a directory have been enumerated. ++struct WalkEventIter { ++ depth: usize, ++ it: walkdir::Iter, ++ next: Option>, ++} ++ ++#[derive(Debug)] ++enum WalkEvent { ++ Dir(walkdir::DirEntry), ++ File(walkdir::DirEntry), ++ Exit, ++} ++ ++impl From for WalkEventIter { ++ fn from(it: WalkDir) -> WalkEventIter { ++ WalkEventIter { depth: 0, it: it.into_iter(), next: None } ++ } ++} ++ ++impl Iterator for WalkEventIter { ++ type Item = walkdir::Result; ++ ++ #[inline(always)] ++ fn next(&mut self) -> Option> { ++ let dent = self.next.take().or_else(|| self.it.next()); ++ let depth = match dent { ++ None => 0, ++ Some(Ok(ref dent)) => dent.depth(), ++ Some(Err(ref err)) => err.depth(), ++ }; ++ if depth < self.depth { ++ self.depth -= 1; ++ self.next = dent; ++ return Some(Ok(WalkEvent::Exit)); ++ } ++ self.depth = depth; ++ match dent { ++ None => None, ++ Some(Err(err)) => Some(Err(err)), ++ Some(Ok(dent)) => { ++ if dent.file_type().is_dir() { ++ self.depth += 1; ++ Some(Ok(WalkEvent::Dir(dent))) ++ } else { ++ Some(Ok(WalkEvent::File(dent))) ++ } ++ } ++ } ++ } ++} ++ ++/// WalkState is used in the parallel recursive directory iterator to indicate ++/// whether walking should continue as normal, skip descending into a ++/// particular directory or quit the walk entirely. ++#[derive(Clone, Copy, Debug, Eq, PartialEq)] ++pub enum WalkState { ++ /// Continue walking as normal. ++ Continue, ++ /// If the directory entry given is a directory, don't descend into it. ++ /// In all other cases, this has no effect. ++ Skip, ++ /// Quit the entire iterator as soon as possible. ++ /// ++ /// Note that this is an inherently asynchronous action. It is possible ++ /// for more entries to be yielded even after instructing the iterator ++ /// to quit. ++ Quit, ++} ++ ++impl WalkState { ++ fn is_quit(&self) -> bool { ++ *self == WalkState::Quit ++ } ++} ++ ++/// WalkParallel is a parallel recursive directory iterator over files paths ++/// in one or more directories. ++/// ++/// Only file and directory paths matching the rules are returned. By default, ++/// ignore files like `.gitignore` are respected. The precise matching rules ++/// and precedence is explained in the documentation for `WalkBuilder`. ++/// ++/// Unlike `Walk`, this uses multiple threads for traversing a directory. ++pub struct WalkParallel { ++ paths: vec::IntoIter, ++ ig_root: Ignore, ++ parents: bool, ++ max_filesize: Option, ++ max_depth: Option, ++ follow_links: bool, ++ threads: usize, ++} ++ ++impl WalkParallel { ++ /// Execute the parallel recursive directory iterator. `mkf` is called ++ /// for each thread used for iteration. The function produced by `mkf` ++ /// is then in turn called for each visited file path. ++ pub fn run( ++ self, ++ mut mkf: F, ++ ) where F: FnMut() -> Box) -> WalkState + Send + 'static> { ++ let mut f = mkf(); ++ let threads = self.threads(); ++ let queue = Arc::new(MsQueue::new()); ++ let mut any_work = false; ++ // Send the initial set of root paths to the pool of workers. ++ // Note that we only send directories. For files, we send to them the ++ // callback directly. ++ for path in self.paths { ++ let dent = ++ if path == Path::new("-") { ++ DirEntry::new_stdin() ++ } else { ++ match DirEntryRaw::from_link(0, path) { ++ Ok(dent) => DirEntry::new_raw(dent, None), ++ Err(err) => { ++ if f(Err(err)).is_quit() { ++ return; ++ } ++ continue; ++ } ++ } ++ }; ++ queue.push(Message::Work(Work { ++ dent: dent, ++ ignore: self.ig_root.clone(), ++ })); ++ any_work = true; ++ } ++ // ... but there's no need to start workers if we don't need them. ++ if !any_work { ++ return; ++ } ++ // Create the workers and then wait for them to finish. ++ let num_waiting = Arc::new(AtomicUsize::new(0)); ++ let num_quitting = Arc::new(AtomicUsize::new(0)); ++ let quit_now = Arc::new(AtomicBool::new(false)); ++ let mut handles = vec![]; ++ for _ in 0..threads { ++ let worker = Worker { ++ f: mkf(), ++ queue: queue.clone(), ++ quit_now: quit_now.clone(), ++ is_waiting: false, ++ is_quitting: false, ++ num_waiting: num_waiting.clone(), ++ num_quitting: num_quitting.clone(), ++ threads: threads, ++ parents: self.parents, ++ max_depth: self.max_depth, ++ max_filesize: self.max_filesize, ++ follow_links: self.follow_links, ++ }; ++ handles.push(thread::spawn(|| worker.run())); ++ } ++ for handle in handles { ++ handle.join().unwrap(); ++ } ++ } ++ ++ fn threads(&self) -> usize { ++ if self.threads == 0 { ++ 2 ++ } else { ++ self.threads ++ } ++ } ++} ++ ++/// Message is the set of instructions that a worker knows how to process. ++enum Message { ++ /// A work item corresponds to a directory that should be descended into. ++ /// Work items for entries that should be skipped or ignored should not ++ /// be produced. ++ Work(Work), ++ /// This instruction indicates that the worker should start quitting. ++ Quit, ++} ++ ++/// A unit of work for each worker to process. ++/// ++/// Each unit of work corresponds to a directory that should be descended ++/// into. ++struct Work { ++ /// The directory entry. ++ dent: DirEntry, ++ /// Any ignore matchers that have been built for this directory's parents. ++ ignore: Ignore, ++} ++ ++impl Work { ++ /// Returns true if and only if this work item is a directory. ++ fn is_dir(&self) -> bool { ++ self.dent.file_type().map_or(false, |t| t.is_dir()) ++ } ++ ++ /// Adds ignore rules for parent directories. ++ /// ++ /// Note that this only applies to entries at depth 0. On all other ++ /// entries, this is a no-op. ++ fn add_parents(&mut self) -> Option { ++ if self.dent.depth() > 0 { ++ return None; ++ } ++ // At depth 0, the path of this entry is a root path, so we can ++ // use it directly to add parent ignore rules. ++ let (ig, err) = self.ignore.add_parents(self.dent.path()); ++ self.ignore = ig; ++ err ++ } ++ ++ /// Reads the directory contents of this work item and adds ignore ++ /// rules for this directory. ++ /// ++ /// If there was a problem with reading the directory contents, then ++ /// an error is returned. If there was a problem reading the ignore ++ /// rules for this directory, then the error is attached to this ++ /// work item's directory entry. ++ fn read_dir(&mut self) -> Result { ++ let readdir = match fs::read_dir(self.dent.path()) { ++ Ok(readdir) => readdir, ++ Err(err) => { ++ let err = Error::from(err) ++ .with_path(self.dent.path()) ++ .with_depth(self.dent.depth()); ++ return Err(err); ++ } ++ }; ++ let (ig, err) = self.ignore.add_child(self.dent.path()); ++ self.ignore = ig; ++ self.dent.err = err; ++ Ok(readdir) ++ } ++} ++ ++/// A worker is responsible for descending into directories, updating the ++/// ignore matchers, producing new work and invoking the caller's callback. ++/// ++/// Note that a worker is *both* a producer and a consumer. ++struct Worker { ++ /// The caller's callback. ++ f: Box) -> WalkState + Send + 'static>, ++ /// A queue of work items. This is multi-producer and multi-consumer. ++ queue: Arc>, ++ /// Whether all workers should quit at the next opportunity. Note that ++ /// this is distinct from quitting because of exhausting the contents of ++ /// a directory. Instead, this is used when the caller's callback indicates ++ /// that the iterator should quit immediately. ++ quit_now: Arc, ++ /// Whether this worker is waiting for more work. ++ is_waiting: bool, ++ /// Whether this worker has started to quit. ++ is_quitting: bool, ++ /// The number of workers waiting for more work. ++ num_waiting: Arc, ++ /// The number of workers waiting to quit. ++ num_quitting: Arc, ++ /// The total number of workers. ++ threads: usize, ++ /// Whether to create ignore matchers for parents of caller specified ++ /// directories. ++ parents: bool, ++ /// The maximum depth of directories to descend. A value of `0` means no ++ /// descension at all. ++ max_depth: Option, ++ /// The maximum size a searched file can be (in bytes). If a file exceeds ++ /// this size it will be skipped. ++ max_filesize: Option, ++ /// Whether to follow symbolic links or not. When this is enabled, loop ++ /// detection is performed. ++ follow_links: bool, ++} ++ ++impl Worker { ++ /// Runs this worker until there is no more work left to do. ++ /// ++ /// The worker will call the caller's callback for all entries that aren't ++ /// skipped by the ignore matcher. ++ fn run(mut self) { ++ while let Some(mut work) = self.get_work() { ++ // If the work is not a directory, then we can just execute the ++ // caller's callback immediately and move on. ++ if !work.is_dir() { ++ if (self.f)(Ok(work.dent)).is_quit() { ++ self.quit_now(); ++ return; ++ } ++ continue; ++ } ++ if self.parents { ++ if let Some(err) = work.add_parents() { ++ if (self.f)(Err(err)).is_quit() { ++ self.quit_now(); ++ return; ++ } ++ } ++ } ++ let readdir = match work.read_dir() { ++ Ok(readdir) => readdir, ++ Err(err) => { ++ if (self.f)(Err(err)).is_quit() { ++ self.quit_now(); ++ return; ++ } ++ continue; ++ } ++ }; ++ let depth = work.dent.depth(); ++ match (self.f)(Ok(work.dent)) { ++ WalkState::Continue => {} ++ WalkState::Skip => continue, ++ WalkState::Quit => { ++ self.quit_now(); ++ return; ++ } ++ } ++ if self.max_depth.map_or(false, |max| depth >= max) { ++ continue; ++ } ++ for result in readdir { ++ if self.run_one(&work.ignore, depth + 1, result).is_quit() { ++ self.quit_now(); ++ return; ++ } ++ } ++ } ++ } ++ ++ /// Runs the worker on a single entry from a directory iterator. ++ /// ++ /// If the entry is a path that should be ignored, then this is a no-op. ++ /// Otherwise, the entry is pushed on to the queue. (The actual execution ++ /// of the callback happens in `run`.) ++ /// ++ /// If an error occurs while reading the entry, then it is sent to the ++ /// caller's callback. ++ /// ++ /// `ig` is the `Ignore` matcher for the parent directory. `depth` should ++ /// be the depth of this entry. `result` should be the item yielded by ++ /// a directory iterator. ++ fn run_one( ++ &mut self, ++ ig: &Ignore, ++ depth: usize, ++ result: Result, ++ ) -> WalkState { ++ let fs_dent = match result { ++ Ok(fs_dent) => fs_dent, ++ Err(err) => { ++ return (self.f)(Err(Error::from(err).with_depth(depth))); ++ } ++ }; ++ let mut dent = match DirEntryRaw::from_entry(depth, &fs_dent) { ++ Ok(dent) => DirEntry::new_raw(dent, None), ++ Err(err) => { ++ return (self.f)(Err(err)); ++ } ++ }; ++ let is_symlink = dent.file_type().map_or(false, |ft| ft.is_symlink()); ++ if self.follow_links && is_symlink { ++ let path = dent.path().to_path_buf(); ++ dent = match DirEntryRaw::from_link(depth, path) { ++ Ok(dent) => DirEntry::new_raw(dent, None), ++ Err(err) => { ++ return (self.f)(Err(err)); ++ } ++ }; ++ if dent.file_type().map_or(false, |ft| ft.is_dir()) { ++ if let Err(err) = check_symlink_loop(ig, dent.path(), depth) { ++ return (self.f)(Err(err)); ++ } ++ } ++ } ++ let is_dir = dent.file_type().map_or(false, |ft| ft.is_dir()); ++ let max_size = self.max_filesize; ++ let should_skip_path = skip_path(ig, dent.path(), is_dir); ++ let should_skip_filesize = if !is_dir && max_size.is_some() { ++ skip_filesize(max_size.unwrap(), dent.path(), &dent.metadata().ok()) ++ } else { ++ false ++ }; ++ ++ if !should_skip_path && !should_skip_filesize { ++ self.queue.push(Message::Work(Work { ++ dent: dent, ++ ignore: ig.clone(), ++ })); ++ } ++ WalkState::Continue ++ } ++ ++ /// Returns the next directory to descend into. ++ /// ++ /// If all work has been exhausted, then this returns None. The worker ++ /// should then subsequently quit. ++ fn get_work(&mut self) -> Option { ++ loop { ++ if self.is_quit_now() { ++ return None; ++ } ++ match self.queue.try_pop() { ++ Some(Message::Work(work)) => { ++ self.waiting(false); ++ self.quitting(false); ++ return Some(work); ++ } ++ Some(Message::Quit) => { ++ // We can't just quit because a Message::Quit could be ++ // spurious. For example, it's possible to observe that ++ // all workers are waiting even if there's more work to ++ // be done. ++ // ++ // Therefore, we do a bit of a dance to wait until all ++ // workers have signaled that they're ready to quit before ++ // actually quitting. ++ // ++ // If the Quit message turns out to be spurious, then the ++ // loop below will break and we'll go back to looking for ++ // more work. ++ self.waiting(true); ++ self.quitting(true); ++ while !self.is_quit_now() { ++ let nwait = self.num_waiting(); ++ let nquit = self.num_quitting(); ++ // If the number of waiting workers dropped, then ++ // abort our attempt to quit. ++ if nwait < self.threads { ++ break; ++ } ++ // If all workers are in this quit loop, then we ++ // can stop. ++ if nquit == self.threads { ++ return None; ++ } ++ // Otherwise, spin. ++ } ++ } ++ None => { ++ self.waiting(true); ++ self.quitting(false); ++ if self.num_waiting() == self.threads { ++ for _ in 0..self.threads { ++ self.queue.push(Message::Quit); ++ } ++ } else { ++ // You're right to consider this suspicious, but it's ++ // a useful heuristic to permit producers to catch up ++ // to consumers without burning the CPU. It is also ++ // useful as a means to prevent burning the CPU if only ++ // one worker is left doing actual work. It's not ++ // perfect and it doesn't leave the CPU completely ++ // idle, but it's not clear what else we can do. :-/ ++ thread::sleep(Duration::from_millis(1)); ++ } ++ } ++ } ++ } ++ } ++ ++ /// Indicates that all workers should quit immediately. ++ fn quit_now(&self) { ++ self.quit_now.store(true, Ordering::SeqCst); ++ } ++ ++ /// Returns true if this worker should quit immediately. ++ fn is_quit_now(&self) -> bool { ++ self.quit_now.load(Ordering::SeqCst) ++ } ++ ++ /// Returns the total number of workers waiting for work. ++ fn num_waiting(&self) -> usize { ++ self.num_waiting.load(Ordering::SeqCst) ++ } ++ ++ /// Returns the total number of workers ready to quit. ++ fn num_quitting(&self) -> usize { ++ self.num_quitting.load(Ordering::SeqCst) ++ } ++ ++ /// Sets this worker's "quitting" state to the value of `yes`. ++ fn quitting(&mut self, yes: bool) { ++ if yes { ++ if !self.is_quitting { ++ self.is_quitting = true; ++ self.num_quitting.fetch_add(1, Ordering::SeqCst); ++ } ++ } else { ++ if self.is_quitting { ++ self.is_quitting = false; ++ self.num_quitting.fetch_sub(1, Ordering::SeqCst); ++ } ++ } ++ } ++ ++ /// Sets this worker's "waiting" state to the value of `yes`. ++ fn waiting(&mut self, yes: bool) { ++ if yes { ++ if !self.is_waiting { ++ self.is_waiting = true; ++ self.num_waiting.fetch_add(1, Ordering::SeqCst); ++ } ++ } else { ++ if self.is_waiting { ++ self.is_waiting = false; ++ self.num_waiting.fetch_sub(1, Ordering::SeqCst); ++ } ++ } ++ } ++} ++ ++fn check_symlink_loop( ++ ig_parent: &Ignore, ++ child_path: &Path, ++ child_depth: usize, ++) -> Result<(), Error> { ++ for ig in ig_parent.parents().take_while(|ig| !ig.is_absolute_parent()) { ++ let same = try!(is_same_file(ig.path(), child_path).map_err(|err| { ++ Error::from(err).with_path(child_path).with_depth(child_depth) ++ })); ++ if same { ++ return Err(Error::Loop { ++ ancestor: ig.path().to_path_buf(), ++ child: child_path.to_path_buf(), ++ }.with_depth(child_depth)); ++ } ++ } ++ Ok(()) ++} ++ ++// Before calling this function, make sure that you ensure that is really ++// necessary as the arguments imply a file stat. ++fn skip_filesize( ++ max_filesize: u64, ++ path: &Path, ++ ent: &Option ++) -> bool { ++ let filesize = match *ent { ++ Some(ref md) => Some(md.len()), ++ None => None ++ }; ++ ++ if let Some(fs) = filesize { ++ if fs > max_filesize { ++ debug!("ignoring {}: {} bytes", path.display(), fs); ++ true ++ } else { ++ false ++ } ++ } else { ++ false ++ } ++} ++ ++fn skip_path(ig: &Ignore, path: &Path, is_dir: bool) -> bool { ++ let m = ig.matched(path, is_dir); ++ if m.is_ignore() { ++ debug!("ignoring {}: {:?}", path.display(), m); ++ true ++ } else if m.is_whitelist() { ++ debug!("whitelisting {}: {:?}", path.display(), m); ++ false ++ } else { ++ false ++ } ++} ++ ++#[cfg(test)] ++mod tests { ++ use std::fs::{self, File}; ++ use std::io::Write; ++ use std::path::Path; ++ use std::sync::{Arc, Mutex}; ++ ++ use tempdir::TempDir; ++ ++ use super::{WalkBuilder, WalkState}; ++ ++ fn wfile>(path: P, contents: &str) { ++ let mut file = File::create(path).unwrap(); ++ file.write_all(contents.as_bytes()).unwrap(); ++ } ++ ++ fn wfile_size>(path: P, size: u64) { ++ let file = File::create(path).unwrap(); ++ file.set_len(size).unwrap(); ++ } ++ ++ #[cfg(unix)] ++ fn symlink, Q: AsRef>(src: P, dst: Q) { ++ use std::os::unix::fs::symlink; ++ symlink(src, dst).unwrap(); ++ } ++ ++ fn mkdirp>(path: P) { ++ fs::create_dir_all(path).unwrap(); ++ } ++ ++ fn normal_path(unix: &str) -> String { ++ if cfg!(windows) { ++ unix.replace("\\", "/") ++ } else { ++ unix.to_string() ++ } ++ } ++ ++ fn walk_collect(prefix: &Path, builder: &WalkBuilder) -> Vec { ++ let mut paths = vec![]; ++ for result in builder.build() { ++ let dent = match result { ++ Err(_) => continue, ++ Ok(dent) => dent, ++ }; ++ let path = dent.path().strip_prefix(prefix).unwrap(); ++ if path.as_os_str().is_empty() { ++ continue; ++ } ++ paths.push(normal_path(path.to_str().unwrap())); ++ } ++ paths.sort(); ++ paths ++ } ++ ++ fn walk_collect_parallel( ++ prefix: &Path, ++ builder: &WalkBuilder, ++ ) -> Vec { ++ let paths = Arc::new(Mutex::new(vec![])); ++ let prefix = Arc::new(prefix.to_path_buf()); ++ builder.build_parallel().run(|| { ++ let paths = paths.clone(); ++ let prefix = prefix.clone(); ++ Box::new(move |result| { ++ let dent = match result { ++ Err(_) => return WalkState::Continue, ++ Ok(dent) => dent, ++ }; ++ let path = dent.path().strip_prefix(&**prefix).unwrap(); ++ if path.as_os_str().is_empty() { ++ return WalkState::Continue; ++ } ++ let mut paths = paths.lock().unwrap(); ++ paths.push(normal_path(path.to_str().unwrap())); ++ WalkState::Continue ++ }) ++ }); ++ let mut paths = paths.lock().unwrap(); ++ paths.sort(); ++ paths.to_vec() ++ } ++ ++ fn mkpaths(paths: &[&str]) -> Vec { ++ let mut paths: Vec<_> = paths.iter().map(|s| s.to_string()).collect(); ++ paths.sort(); ++ paths ++ } ++ ++ fn assert_paths( ++ prefix: &Path, ++ builder: &WalkBuilder, ++ expected: &[&str], ++ ) { ++ let got = walk_collect(prefix, builder); ++ assert_eq!(got, mkpaths(expected)); ++ let got = walk_collect_parallel(prefix, builder); ++ assert_eq!(got, mkpaths(expected)); ++ } ++ ++ #[test] ++ fn no_ignores() { ++ let td = TempDir::new("walk-test-").unwrap(); ++ mkdirp(td.path().join("a/b/c")); ++ mkdirp(td.path().join("x/y")); ++ wfile(td.path().join("a/b/foo"), ""); ++ wfile(td.path().join("x/y/foo"), ""); ++ ++ assert_paths(td.path(), &WalkBuilder::new(td.path()), &[ ++ "x", "x/y", "x/y/foo", "a", "a/b", "a/b/foo", "a/b/c", ++ ]); ++ } ++ ++ #[test] ++ fn gitignore() { ++ let td = TempDir::new("walk-test-").unwrap(); ++ mkdirp(td.path().join("a")); ++ wfile(td.path().join(".gitignore"), "foo"); ++ wfile(td.path().join("foo"), ""); ++ wfile(td.path().join("a/foo"), ""); ++ wfile(td.path().join("bar"), ""); ++ wfile(td.path().join("a/bar"), ""); ++ ++ assert_paths(td.path(), &WalkBuilder::new(td.path()), &[ ++ "bar", "a", "a/bar", ++ ]); ++ } ++ ++ #[test] ++ fn explicit_ignore() { ++ let td = TempDir::new("walk-test-").unwrap(); ++ let igpath = td.path().join(".not-an-ignore"); ++ mkdirp(td.path().join("a")); ++ wfile(&igpath, "foo"); ++ wfile(td.path().join("foo"), ""); ++ wfile(td.path().join("a/foo"), ""); ++ wfile(td.path().join("bar"), ""); ++ wfile(td.path().join("a/bar"), ""); ++ ++ let mut builder = WalkBuilder::new(td.path()); ++ assert!(builder.add_ignore(&igpath).is_none()); ++ assert_paths(td.path(), &builder, &["bar", "a", "a/bar"]); ++ } ++ ++ #[test] ++ fn gitignore_parent() { ++ let td = TempDir::new("walk-test-").unwrap(); ++ mkdirp(td.path().join("a")); ++ wfile(td.path().join(".gitignore"), "foo"); ++ wfile(td.path().join("a/foo"), ""); ++ wfile(td.path().join("a/bar"), ""); ++ ++ let root = td.path().join("a"); ++ assert_paths(&root, &WalkBuilder::new(&root), &["bar"]); ++ } ++ ++ #[test] ++ fn max_depth() { ++ let td = TempDir::new("walk-test-").unwrap(); ++ mkdirp(td.path().join("a/b/c")); ++ wfile(td.path().join("foo"), ""); ++ wfile(td.path().join("a/foo"), ""); ++ wfile(td.path().join("a/b/foo"), ""); ++ wfile(td.path().join("a/b/c/foo"), ""); ++ ++ let mut builder = WalkBuilder::new(td.path()); ++ assert_paths(td.path(), &builder, &[ ++ "a", "a/b", "a/b/c", "foo", "a/foo", "a/b/foo", "a/b/c/foo", ++ ]); ++ assert_paths(td.path(), builder.max_depth(Some(0)), &[]); ++ assert_paths(td.path(), builder.max_depth(Some(1)), &["a", "foo"]); ++ assert_paths(td.path(), builder.max_depth(Some(2)), &[ ++ "a", "a/b", "foo", "a/foo", ++ ]); ++ } ++ ++ #[test] ++ fn max_filesize() { ++ let td = TempDir::new("walk-test-").unwrap(); ++ mkdirp(td.path().join("a/b")); ++ wfile_size(td.path().join("foo"), 0); ++ wfile_size(td.path().join("bar"), 400); ++ wfile_size(td.path().join("baz"), 600); ++ wfile_size(td.path().join("a/foo"), 600); ++ wfile_size(td.path().join("a/bar"), 500); ++ wfile_size(td.path().join("a/baz"), 200); ++ ++ let mut builder = WalkBuilder::new(td.path()); ++ assert_paths(td.path(), &builder, &[ ++ "a", "a/b", "foo", "bar", "baz", "a/foo", "a/bar", "a/baz", ++ ]); ++ assert_paths(td.path(), builder.max_filesize(Some(0)), &[ ++ "a", "a/b", "foo" ++ ]); ++ assert_paths(td.path(), builder.max_filesize(Some(500)), &[ ++ "a", "a/b", "foo", "bar", "a/bar", "a/baz" ++ ]); ++ assert_paths(td.path(), builder.max_filesize(Some(50000)), &[ ++ "a", "a/b", "foo", "bar", "baz", "a/foo", "a/bar", "a/baz", ++ ]); ++ } ++ ++ #[cfg(unix)] // because symlinks on windows are weird ++ #[test] ++ fn symlinks() { ++ let td = TempDir::new("walk-test-").unwrap(); ++ mkdirp(td.path().join("a/b")); ++ symlink(td.path().join("a/b"), td.path().join("z")); ++ wfile(td.path().join("a/b/foo"), ""); ++ ++ let mut builder = WalkBuilder::new(td.path()); ++ assert_paths(td.path(), &builder, &[ ++ "a", "a/b", "a/b/foo", "z", ++ ]); ++ assert_paths(td.path(), &builder.follow_links(true), &[ ++ "a", "a/b", "a/b/foo", "z", "z/foo", ++ ]); ++ } ++ ++ #[cfg(unix)] // because symlinks on windows are weird ++ #[test] ++ fn symlink_loop() { ++ let td = TempDir::new("walk-test-").unwrap(); ++ mkdirp(td.path().join("a/b")); ++ symlink(td.path().join("a"), td.path().join("a/b/c")); ++ ++ let mut builder = WalkBuilder::new(td.path()); ++ assert_paths(td.path(), &builder, &[ ++ "a", "a/b", "a/b/c", ++ ]); ++ assert_paths(td.path(), &builder.follow_links(true), &[ ++ "a", "a/b", ++ ]); ++ } ++} diff --cc vendor/ignore-0.2.2/tests/gitignore_matched_path_or_any_parents_tests.gitignore index 000000000,000000000..ac09e12f7 new file mode 100644 --- /dev/null +++ b/vendor/ignore-0.2.2/tests/gitignore_matched_path_or_any_parents_tests.gitignore @@@ -1,0 -1,0 +1,216 @@@ ++# Based on https://github.com/behnam/gitignore-test/blob/master/.gitignore ++ ++### file in root ++ ++# MATCH /file_root_1 ++file_root_00 ++ ++# NO_MATCH ++file_root_01/ ++ ++# NO_MATCH ++file_root_02/* ++ ++# NO_MATCH ++file_root_03/** ++ ++ ++# MATCH /file_root_10 ++/file_root_10 ++ ++# NO_MATCH ++/file_root_11/ ++ ++# NO_MATCH ++/file_root_12/* ++ ++# NO_MATCH ++/file_root_13/** ++ ++ ++# NO_MATCH ++*/file_root_20 ++ ++# NO_MATCH ++*/file_root_21/ ++ ++# NO_MATCH ++*/file_root_22/* ++ ++# NO_MATCH ++*/file_root_23/** ++ ++ ++# MATCH /file_root_30 ++**/file_root_30 ++ ++# NO_MATCH ++**/file_root_31/ ++ ++# NO_MATCH ++**/file_root_32/* ++ ++# NO_MATCH ++**/file_root_33/** ++ ++ ++### file in sub-dir ++ ++# MATCH /parent_dir/file_deep_1 ++file_deep_00 ++ ++# NO_MATCH ++file_deep_01/ ++ ++# NO_MATCH ++file_deep_02/* ++ ++# NO_MATCH ++file_deep_03/** ++ ++ ++# NO_MATCH ++/file_deep_10 ++ ++# NO_MATCH ++/file_deep_11/ ++ ++# NO_MATCH ++/file_deep_12/* ++ ++# NO_MATCH ++/file_deep_13/** ++ ++ ++# MATCH /parent_dir/file_deep_20 ++*/file_deep_20 ++ ++# NO_MATCH ++*/file_deep_21/ ++ ++# NO_MATCH ++*/file_deep_22/* ++ ++# NO_MATCH ++*/file_deep_23/** ++ ++ ++# MATCH /parent_dir/file_deep_30 ++**/file_deep_30 ++ ++# NO_MATCH ++**/file_deep_31/ ++ ++# NO_MATCH ++**/file_deep_32/* ++ ++# NO_MATCH ++**/file_deep_33/** ++ ++ ++### dir in root ++ ++# MATCH /dir_root_00 ++dir_root_00 ++ ++# MATCH /dir_root_01 ++dir_root_01/ ++ ++# MATCH /dir_root_02 ++dir_root_02/* ++ ++# MATCH /dir_root_03 ++dir_root_03/** ++ ++ ++# MATCH /dir_root_10 ++/dir_root_10 ++ ++# MATCH /dir_root_11 ++/dir_root_11/ ++ ++# MATCH /dir_root_12 ++/dir_root_12/* ++ ++# MATCH /dir_root_13 ++/dir_root_13/** ++ ++ ++# NO_MATCH ++*/dir_root_20 ++ ++# NO_MATCH ++*/dir_root_21/ ++ ++# NO_MATCH ++*/dir_root_22/* ++ ++# NO_MATCH ++*/dir_root_23/** ++ ++ ++# MATCH /dir_root_30 ++**/dir_root_30 ++ ++# MATCH /dir_root_31 ++**/dir_root_31/ ++ ++# MATCH /dir_root_32 ++**/dir_root_32/* ++ ++# MATCH /dir_root_33 ++**/dir_root_33/** ++ ++ ++### dir in sub-dir ++ ++# MATCH /parent_dir/dir_deep_00 ++dir_deep_00 ++ ++# MATCH /parent_dir/dir_deep_01 ++dir_deep_01/ ++ ++# NO_MATCH ++dir_deep_02/* ++ ++# NO_MATCH ++dir_deep_03/** ++ ++ ++# NO_MATCH ++/dir_deep_10 ++ ++# NO_MATCH ++/dir_deep_11/ ++ ++# NO_MATCH ++/dir_deep_12/* ++ ++# NO_MATCH ++/dir_deep_13/** ++ ++ ++# MATCH /parent_dir/dir_deep_20 ++*/dir_deep_20 ++ ++# MATCH /parent_dir/dir_deep_21 ++*/dir_deep_21/ ++ ++# MATCH /parent_dir/dir_deep_22 ++*/dir_deep_22/* ++ ++# MATCH /parent_dir/dir_deep_23 ++*/dir_deep_23/** ++ ++ ++# MATCH /parent_dir/dir_deep_30 ++**/dir_deep_30 ++ ++# MATCH /parent_dir/dir_deep_31 ++**/dir_deep_31/ ++ ++# MATCH /parent_dir/dir_deep_32 ++**/dir_deep_32/* ++ ++# MATCH /parent_dir/dir_deep_33 ++**/dir_deep_33/** diff --cc vendor/ignore-0.2.2/tests/gitignore_matched_path_or_any_parents_tests.rs index 000000000,000000000..c76ee2d1f new file mode 100644 --- /dev/null +++ b/vendor/ignore-0.2.2/tests/gitignore_matched_path_or_any_parents_tests.rs @@@ -1,0 -1,0 +1,297 @@@ ++extern crate ignore; ++ ++ ++use std::path::Path; ++ ++use ignore::gitignore::{Gitignore, GitignoreBuilder}; ++ ++ ++const IGNORE_FILE: &'static str = "tests/gitignore_matched_path_or_any_parents_tests.gitignore"; ++ ++ ++fn get_gitignore() -> Gitignore { ++ let mut builder = GitignoreBuilder::new("ROOT"); ++ let error = builder.add(IGNORE_FILE); ++ assert!(error.is_none(), "failed to open gitignore file"); ++ builder.build().unwrap() ++} ++ ++ ++#[test] ++#[should_panic(expected = "path is expect to be under the root")] ++fn test_path_should_be_under_root() { ++ let gitignore = get_gitignore(); ++ let path = "/tmp/some_file"; ++ gitignore.matched_path_or_any_parents(Path::new(path), false); ++ assert!(false); ++} ++ ++ ++#[test] ++fn test_files_in_root() { ++ let gitignore = get_gitignore(); ++ let m = |path: &str| gitignore.matched_path_or_any_parents(Path::new(path), false); ++ ++ // 0x ++ assert!(m("ROOT/file_root_00").is_ignore()); ++ assert!(m("ROOT/file_root_01").is_none()); ++ assert!(m("ROOT/file_root_02").is_none()); ++ assert!(m("ROOT/file_root_03").is_none()); ++ ++ // 1x ++ assert!(m("ROOT/file_root_10").is_ignore()); ++ assert!(m("ROOT/file_root_11").is_none()); ++ assert!(m("ROOT/file_root_12").is_none()); ++ assert!(m("ROOT/file_root_13").is_none()); ++ ++ // 2x ++ assert!(m("ROOT/file_root_20").is_none()); ++ assert!(m("ROOT/file_root_21").is_none()); ++ assert!(m("ROOT/file_root_22").is_none()); ++ assert!(m("ROOT/file_root_23").is_none()); ++ ++ // 3x ++ assert!(m("ROOT/file_root_30").is_ignore()); ++ assert!(m("ROOT/file_root_31").is_none()); ++ assert!(m("ROOT/file_root_32").is_none()); ++ assert!(m("ROOT/file_root_33").is_none()); ++} ++ ++ ++#[test] ++fn test_files_in_deep() { ++ let gitignore = get_gitignore(); ++ let m = |path: &str| gitignore.matched_path_or_any_parents(Path::new(path), false); ++ ++ // 0x ++ assert!(m("ROOT/parent_dir/file_deep_00").is_ignore()); ++ assert!(m("ROOT/parent_dir/file_deep_01").is_none()); ++ assert!(m("ROOT/parent_dir/file_deep_02").is_none()); ++ assert!(m("ROOT/parent_dir/file_deep_03").is_none()); ++ ++ // 1x ++ assert!(m("ROOT/parent_dir/file_deep_10").is_none()); ++ assert!(m("ROOT/parent_dir/file_deep_11").is_none()); ++ assert!(m("ROOT/parent_dir/file_deep_12").is_none()); ++ assert!(m("ROOT/parent_dir/file_deep_13").is_none()); ++ ++ // 2x ++ assert!(m("ROOT/parent_dir/file_deep_20").is_ignore()); ++ assert!(m("ROOT/parent_dir/file_deep_21").is_none()); ++ assert!(m("ROOT/parent_dir/file_deep_22").is_none()); ++ assert!(m("ROOT/parent_dir/file_deep_23").is_none()); ++ ++ // 3x ++ assert!(m("ROOT/parent_dir/file_deep_30").is_ignore()); ++ assert!(m("ROOT/parent_dir/file_deep_31").is_none()); ++ assert!(m("ROOT/parent_dir/file_deep_32").is_none()); ++ assert!(m("ROOT/parent_dir/file_deep_33").is_none()); ++} ++ ++ ++#[test] ++fn test_dirs_in_root() { ++ let gitignore = get_gitignore(); ++ let m = ++ |path: &str, is_dir: bool| gitignore.matched_path_or_any_parents(Path::new(path), is_dir); ++ ++ // 00 ++ assert!(m("ROOT/dir_root_00", true).is_ignore()); ++ assert!(m("ROOT/dir_root_00/file", false).is_ignore()); ++ assert!(m("ROOT/dir_root_00/child_dir", true).is_ignore()); ++ assert!(m("ROOT/dir_root_00/child_dir/file", false).is_ignore()); ++ ++ // 01 ++ assert!(m("ROOT/dir_root_01", true).is_ignore()); ++ assert!(m("ROOT/dir_root_01/file", false).is_ignore()); ++ assert!(m("ROOT/dir_root_01/child_dir", true).is_ignore()); ++ assert!(m("ROOT/dir_root_01/child_dir/file", false).is_ignore()); ++ ++ // 02 ++ assert!(m("ROOT/dir_root_02", true).is_none()); // dir itself doesn't match ++ assert!(m("ROOT/dir_root_02/file", false).is_ignore()); ++ assert!(m("ROOT/dir_root_02/child_dir", true).is_ignore()); ++ assert!(m("ROOT/dir_root_02/child_dir/file", false).is_ignore()); ++ ++ // 03 ++ assert!(m("ROOT/dir_root_03", true).is_none()); // dir itself doesn't match ++ assert!(m("ROOT/dir_root_03/file", false).is_ignore()); ++ assert!(m("ROOT/dir_root_03/child_dir", true).is_ignore()); ++ assert!(m("ROOT/dir_root_03/child_dir/file", false).is_ignore()); ++ ++ // 10 ++ assert!(m("ROOT/dir_root_10", true).is_ignore()); ++ assert!(m("ROOT/dir_root_10/file", false).is_ignore()); ++ assert!(m("ROOT/dir_root_10/child_dir", true).is_ignore()); ++ assert!(m("ROOT/dir_root_10/child_dir/file", false).is_ignore()); ++ ++ // 11 ++ assert!(m("ROOT/dir_root_11", true).is_ignore()); ++ assert!(m("ROOT/dir_root_11/file", false).is_ignore()); ++ assert!(m("ROOT/dir_root_11/child_dir", true).is_ignore()); ++ assert!(m("ROOT/dir_root_11/child_dir/file", false).is_ignore()); ++ ++ // 12 ++ assert!(m("ROOT/dir_root_12", true).is_none()); // dir itself doesn't match ++ assert!(m("ROOT/dir_root_12/file", false).is_ignore()); ++ assert!(m("ROOT/dir_root_12/child_dir", true).is_ignore()); ++ assert!(m("ROOT/dir_root_12/child_dir/file", false).is_ignore()); ++ ++ // 13 ++ assert!(m("ROOT/dir_root_13", true).is_none()); ++ assert!(m("ROOT/dir_root_13/file", false).is_ignore()); ++ assert!(m("ROOT/dir_root_13/child_dir", true).is_ignore()); ++ assert!(m("ROOT/dir_root_13/child_dir/file", false).is_ignore()); ++ ++ // 20 ++ assert!(m("ROOT/dir_root_20", true).is_none()); ++ assert!(m("ROOT/dir_root_20/file", false).is_none()); ++ assert!(m("ROOT/dir_root_20/child_dir", true).is_none()); ++ assert!(m("ROOT/dir_root_20/child_dir/file", false).is_none()); ++ ++ // 21 ++ assert!(m("ROOT/dir_root_21", true).is_none()); ++ assert!(m("ROOT/dir_root_21/file", false).is_none()); ++ assert!(m("ROOT/dir_root_21/child_dir", true).is_none()); ++ assert!(m("ROOT/dir_root_21/child_dir/file", false).is_none()); ++ ++ // 22 ++ assert!(m("ROOT/dir_root_22", true).is_none()); ++ assert!(m("ROOT/dir_root_22/file", false).is_none()); ++ assert!(m("ROOT/dir_root_22/child_dir", true).is_none()); ++ assert!(m("ROOT/dir_root_22/child_dir/file", false).is_none()); ++ ++ // 23 ++ assert!(m("ROOT/dir_root_23", true).is_none()); ++ assert!(m("ROOT/dir_root_23/file", false).is_none()); ++ assert!(m("ROOT/dir_root_23/child_dir", true).is_none()); ++ assert!(m("ROOT/dir_root_23/child_dir/file", false).is_none()); ++ ++ // 30 ++ assert!(m("ROOT/dir_root_30", true).is_ignore()); ++ assert!(m("ROOT/dir_root_30/file", false).is_ignore()); ++ assert!(m("ROOT/dir_root_30/child_dir", true).is_ignore()); ++ assert!(m("ROOT/dir_root_30/child_dir/file", false).is_ignore()); ++ ++ // 31 ++ assert!(m("ROOT/dir_root_31", true).is_ignore()); ++ assert!(m("ROOT/dir_root_31/file", false).is_ignore()); ++ assert!(m("ROOT/dir_root_31/child_dir", true).is_ignore()); ++ assert!(m("ROOT/dir_root_31/child_dir/file", false).is_ignore()); ++ ++ // 32 ++ assert!(m("ROOT/dir_root_32", true).is_none()); // dir itself doesn't match ++ assert!(m("ROOT/dir_root_32/file", false).is_ignore()); ++ assert!(m("ROOT/dir_root_32/child_dir", true).is_ignore()); ++ assert!(m("ROOT/dir_root_32/child_dir/file", false).is_ignore()); ++ ++ // 33 ++ assert!(m("ROOT/dir_root_33", true).is_none()); // dir itself doesn't match ++ assert!(m("ROOT/dir_root_33/file", false).is_ignore()); ++ assert!(m("ROOT/dir_root_33/child_dir", true).is_ignore()); ++ assert!(m("ROOT/dir_root_33/child_dir/file", false).is_ignore()); ++} ++ ++ ++#[test] ++fn test_dirs_in_deep() { ++ let gitignore = get_gitignore(); ++ let m = ++ |path: &str, is_dir: bool| gitignore.matched_path_or_any_parents(Path::new(path), is_dir); ++ ++ // 00 ++ assert!(m("ROOT/parent_dir/dir_deep_00", true).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_00/file", false).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_00/child_dir", true).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_00/child_dir/file", false).is_ignore()); ++ ++ // 01 ++ assert!(m("ROOT/parent_dir/dir_deep_01", true).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_01/file", false).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_01/child_dir", true).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_01/child_dir/file", false).is_ignore()); ++ ++ // 02 ++ assert!(m("ROOT/parent_dir/dir_deep_02", true).is_none()); // dir itself doesn't match ++ assert!(m("ROOT/parent_dir/dir_deep_02/file", false).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_02/child_dir", true).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_02/child_dir/file", false).is_ignore()); ++ ++ // 03 ++ assert!(m("ROOT/parent_dir/dir_deep_03", true).is_none()); // dir itself doesn't match ++ assert!(m("ROOT/parent_dir/dir_deep_03/file", false).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_03/child_dir", true).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_03/child_dir/file", false).is_ignore()); ++ ++ // 10 ++ assert!(m("ROOT/parent_dir/dir_deep_10", true).is_none()); ++ assert!(m("ROOT/parent_dir/dir_deep_10/file", false).is_none()); ++ assert!(m("ROOT/parent_dir/dir_deep_10/child_dir", true).is_none()); ++ assert!(m("ROOT/parent_dir/dir_deep_10/child_dir/file", false).is_none()); ++ ++ // 11 ++ assert!(m("ROOT/parent_dir/dir_deep_11", true).is_none()); ++ assert!(m("ROOT/parent_dir/dir_deep_11/file", false).is_none()); ++ assert!(m("ROOT/parent_dir/dir_deep_11/child_dir", true).is_none()); ++ assert!(m("ROOT/parent_dir/dir_deep_11/child_dir/file", false).is_none()); ++ ++ // 12 ++ assert!(m("ROOT/parent_dir/dir_deep_12", true).is_none()); ++ assert!(m("ROOT/parent_dir/dir_deep_12/file", false).is_none()); ++ assert!(m("ROOT/parent_dir/dir_deep_12/child_dir", true).is_none()); ++ assert!(m("ROOT/parent_dir/dir_deep_12/child_dir/file", false).is_none()); ++ ++ // 13 ++ assert!(m("ROOT/parent_dir/dir_deep_13", true).is_none()); ++ assert!(m("ROOT/parent_dir/dir_deep_13/file", false).is_none()); ++ assert!(m("ROOT/parent_dir/dir_deep_13/child_dir", true).is_none()); ++ assert!(m("ROOT/parent_dir/dir_deep_13/child_dir/file", false).is_none()); ++ ++ // 20 ++ assert!(m("ROOT/parent_dir/dir_deep_20", true).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_20/file", false).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_20/child_dir", true).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_20/child_dir/file", false).is_ignore()); ++ ++ // 21 ++ assert!(m("ROOT/parent_dir/dir_deep_21", true).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_21/file", false).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_21/child_dir", true).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_21/child_dir/file", false).is_ignore()); ++ ++ // 22 ++ assert!(m("ROOT/parent_dir/dir_deep_22", true).is_none()); // dir itself doesn't match ++ assert!(m("ROOT/parent_dir/dir_deep_22/file", false).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_22/child_dir", true).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_22/child_dir/file", false).is_ignore()); ++ ++ // 23 ++ assert!(m("ROOT/parent_dir/dir_deep_23", true).is_none()); // dir itself doesn't match ++ assert!(m("ROOT/parent_dir/dir_deep_23/file", false).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_23/child_dir", true).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_23/child_dir/file", false).is_ignore()); ++ ++ // 30 ++ assert!(m("ROOT/parent_dir/dir_deep_30", true).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_30/file", false).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_30/child_dir", true).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_30/child_dir/file", false).is_ignore()); ++ ++ // 31 ++ assert!(m("ROOT/parent_dir/dir_deep_31", true).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_31/file", false).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_31/child_dir", true).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_31/child_dir/file", false).is_ignore()); ++ ++ // 32 ++ assert!(m("ROOT/parent_dir/dir_deep_32", true).is_none()); // dir itself doesn't match ++ assert!(m("ROOT/parent_dir/dir_deep_32/file", false).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_32/child_dir", true).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_32/child_dir/file", false).is_ignore()); ++ ++ // 33 ++ assert!(m("ROOT/parent_dir/dir_deep_33", true).is_none()); // dir itself doesn't match ++ assert!(m("ROOT/parent_dir/dir_deep_33/file", false).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_33/child_dir", true).is_ignore()); ++ assert!(m("ROOT/parent_dir/dir_deep_33/child_dir/file", false).is_ignore()); ++} diff --cc vendor/libc-0.2.28/.cargo-checksum.json index 000000000,000000000..d49603eaf new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/.cargo-checksum.json @@@ -1,0 -1,0 +1,1 @@@ ++{"files":{},"package":"bb7b49972ee23d8aa1026c365a5b440ba08e35075f18c459980c7395c221ec48"} diff --cc vendor/libc-0.2.28/.cargo-ok index 000000000,000000000..e69de29bb new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/.cargo-ok diff --cc vendor/libc-0.2.28/.travis.yml index 000000000,000000000..4a3979794 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/.travis.yml @@@ -1,0 -1,0 +1,91 @@@ ++language: rust ++rust: stable ++sudo: required ++dist: trusty ++services: ++ - docker ++install: ++ - if [ -z "$NO_ADD" ]; then rustup target add $TARGET; fi ++script: ++ - cargo build ++ - cargo build --no-default-features ++ - cargo generate-lockfile --manifest-path libc-test/Cargo.toml ++ - if [[ $TRAVIS_OS_NAME = "linux" ]]; then ++ sh ci/run-docker.sh $TARGET; ++ else ++ export CARGO_TARGET_DIR=`pwd`/target; ++ sh ci/run.sh $TARGET; ++ fi ++ - rustc ci/style.rs && ./style src ++env: ++ global: ++ secure: "e2/3QjgRN9atOuSHp22TrYG7QVKcYUWY48Hi9b60w+r1+BhPkTseIJLte7WefRhdXtqpjjUJTooKDhnurFOeHaCT+nmBgiv+FPU893sBl4bhesY4m0vgUJVbNZcs6lTImYekWVb+aqjGdgV/XAgCw7c3kPmrZV0MzGDWL64Xaps=" ++matrix: ++ include: ++ # 1.0.0 compat ++ - env: TARGET=x86_64-unknown-linux-gnu NO_ADD=1 ++ rust: 1.0.0 ++ script: cargo build ++ install: ++ ++ # build documentation ++ - env: TARGET=x86_64-unknown-linux-gnu NO_ADD=1 ++ rust: nightly ++ script: sh ci/dox.sh ++ ++ # stable compat ++ - env: TARGET=x86_64-unknown-linux-gnu NO_ADD=1 ++ - env: TARGET=i686-unknown-linux-gnu ++ - os: osx ++ env: TARGET=x86_64-apple-darwin NO_ADD=1 ++ - os: osx ++ env: TARGET=i686-apple-darwin ++ - env: TARGET=arm-linux-androideabi ++ - env: TARGET=aarch64-linux-android ++ - env: TARGET=i686-linux-android ++ - env: TARGET=x86_64-linux-android ++ - env: TARGET=x86_64-unknown-linux-musl ++ - env: TARGET=i686-unknown-linux-musl ++ - env: TARGET=arm-unknown-linux-gnueabihf ++ - env: TARGET=aarch64-unknown-linux-gnu ++ - os: osx ++ osx_image: xcode8.2 ++ env: TARGET=i386-apple-ios ++ - os: osx ++ osx_image: xcode8.2 ++ env: TARGET=x86_64-apple-ios ++ - env: TARGET=x86_64-rumprun-netbsd ++ - env: TARGET=powerpc-unknown-linux-gnu ++ - env: TARGET=powerpc64-unknown-linux-gnu ++ - env: TARGET=mips-unknown-linux-musl ++ - env: TARGET=mipsel-unknown-linux-musl ++ - env: TARGET=mips64-unknown-linux-gnuabi64 ++ - env: TARGET=mips-unknown-linux-gnu ++ - env: TARGET=s390x-unknown-linux-gnu ++ ++ # beta ++ - env: TARGET=x86_64-unknown-linux-gnu NO_ADD=1 ++ rust: beta ++ - os: osx ++ env: TARGET=x86_64-apple-darwin NO_ADD=1 ++ rust: beta ++ ++ # nightly ++ - env: TARGET=x86_64-unknown-linux-gnu NO_ADD=1 ++ rust: nightly ++ - os: osx ++ env: TARGET=x86_64-apple-darwin NO_ADD=1 ++ rust: nightly ++ ++ # QEMU based targets that compile in an emulator ++ - env: TARGET=x86_64-unknown-freebsd ++ - env: TARGET=x86_64-unknown-openbsd QEMU=openbsd.qcow2 ++ script: sh ci/run-docker.sh $TARGET ++ install: ++ ++cache: cargo ++ ++notifications: ++ email: ++ on_success: never ++ webhooks: https://buildbot.rust-lang.org/homu/travis diff --cc vendor/libc-0.2.28/Cargo.toml index 000000000,000000000..63d4b6465 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/Cargo.toml @@@ -1,0 -1,0 +1,26 @@@ ++# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO ++# ++# When uploading crates to the registry Cargo will automatically ++# "normalize" Cargo.toml files for maximal compatibility ++# with all versions of Cargo and also rewrite `path` dependencies ++# to registry (e.g. crates.io) dependencies ++# ++# If you believe there's an error in this file please file an ++# issue against the rust-lang/cargo repository. If you're ++# editing this file be aware that the upstream Cargo.toml ++# will likely look very different (and much more reasonable) ++ ++[package] ++name = "libc" ++version = "0.2.28" ++authors = ["The Rust Project Developers"] ++description = "A library for types and bindings to native C functions often found in libc or\nother common platform libraries.\n" ++homepage = "https://github.com/rust-lang/libc" ++documentation = "http://doc.rust-lang.org/libc" ++readme = "README.md" ++license = "MIT/Apache-2.0" ++repository = "https://github.com/rust-lang/libc" ++ ++[features] ++default = ["use_std"] ++use_std = [] diff --cc vendor/libc-0.2.28/LICENSE-APACHE index 000000000,000000000..16fe87b06 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/LICENSE-APACHE @@@ -1,0 -1,0 +1,201 @@@ ++ Apache License ++ Version 2.0, January 2004 ++ http://www.apache.org/licenses/ ++ ++TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION ++ ++1. Definitions. ++ ++ "License" shall mean the terms and conditions for use, reproduction, ++ and distribution as defined by Sections 1 through 9 of this document. ++ ++ "Licensor" shall mean the copyright owner or entity authorized by ++ the copyright owner that is granting the License. ++ ++ "Legal Entity" shall mean the union of the acting entity and all ++ other entities that control, are controlled by, or are under common ++ control with that entity. For the purposes of this definition, ++ "control" means (i) the power, direct or indirect, to cause the ++ direction or management of such entity, whether by contract or ++ otherwise, or (ii) ownership of fifty percent (50%) or more of the ++ outstanding shares, or (iii) beneficial ownership of such entity. ++ ++ "You" (or "Your") shall mean an individual or Legal Entity ++ exercising permissions granted by this License. ++ ++ "Source" form shall mean the preferred form for making modifications, ++ including but not limited to software source code, documentation ++ source, and configuration files. ++ ++ "Object" form shall mean any form resulting from mechanical ++ transformation or translation of a Source form, including but ++ not limited to compiled object code, generated documentation, ++ and conversions to other media types. ++ ++ "Work" shall mean the work of authorship, whether in Source or ++ Object form, made available under the License, as indicated by a ++ copyright notice that is included in or attached to the work ++ (an example is provided in the Appendix below). ++ ++ "Derivative Works" shall mean any work, whether in Source or Object ++ form, that is based on (or derived from) the Work and for which the ++ editorial revisions, annotations, elaborations, or other modifications ++ represent, as a whole, an original work of authorship. For the purposes ++ of this License, Derivative Works shall not include works that remain ++ separable from, or merely link (or bind by name) to the interfaces of, ++ the Work and Derivative Works thereof. ++ ++ "Contribution" shall mean any work of authorship, including ++ the original version of the Work and any modifications or additions ++ to that Work or Derivative Works thereof, that is intentionally ++ submitted to Licensor for inclusion in the Work by the copyright owner ++ or by an individual or Legal Entity authorized to submit on behalf of ++ the copyright owner. For the purposes of this definition, "submitted" ++ means any form of electronic, verbal, or written communication sent ++ to the Licensor or its representatives, including but not limited to ++ communication on electronic mailing lists, source code control systems, ++ and issue tracking systems that are managed by, or on behalf of, the ++ Licensor for the purpose of discussing and improving the Work, but ++ excluding communication that is conspicuously marked or otherwise ++ designated in writing by the copyright owner as "Not a Contribution." ++ ++ "Contributor" shall mean Licensor and any individual or Legal Entity ++ on behalf of whom a Contribution has been received by Licensor and ++ subsequently incorporated within the Work. ++ ++2. Grant of Copyright License. Subject to the terms and conditions of ++ this License, each Contributor hereby grants to You a perpetual, ++ worldwide, non-exclusive, no-charge, royalty-free, irrevocable ++ copyright license to reproduce, prepare Derivative Works of, ++ publicly display, publicly perform, sublicense, and distribute the ++ Work and such Derivative Works in Source or Object form. ++ ++3. Grant of Patent License. Subject to the terms and conditions of ++ this License, each Contributor hereby grants to You a perpetual, ++ worldwide, non-exclusive, no-charge, royalty-free, irrevocable ++ (except as stated in this section) patent license to make, have made, ++ use, offer to sell, sell, import, and otherwise transfer the Work, ++ where such license applies only to those patent claims licensable ++ by such Contributor that are necessarily infringed by their ++ Contribution(s) alone or by combination of their Contribution(s) ++ with the Work to which such Contribution(s) was submitted. If You ++ institute patent litigation against any entity (including a ++ cross-claim or counterclaim in a lawsuit) alleging that the Work ++ or a Contribution incorporated within the Work constitutes direct ++ or contributory patent infringement, then any patent licenses ++ granted to You under this License for that Work shall terminate ++ as of the date such litigation is filed. ++ ++4. Redistribution. You may reproduce and distribute copies of the ++ Work or Derivative Works thereof in any medium, with or without ++ modifications, and in Source or Object form, provided that You ++ meet the following conditions: ++ ++ (a) You must give any other recipients of the Work or ++ Derivative Works a copy of this License; and ++ ++ (b) You must cause any modified files to carry prominent notices ++ stating that You changed the files; and ++ ++ (c) You must retain, in the Source form of any Derivative Works ++ that You distribute, all copyright, patent, trademark, and ++ attribution notices from the Source form of the Work, ++ excluding those notices that do not pertain to any part of ++ the Derivative Works; and ++ ++ (d) If the Work includes a "NOTICE" text file as part of its ++ distribution, then any Derivative Works that You distribute must ++ include a readable copy of the attribution notices contained ++ within such NOTICE file, excluding those notices that do not ++ pertain to any part of the Derivative Works, in at least one ++ of the following places: within a NOTICE text file distributed ++ as part of the Derivative Works; within the Source form or ++ documentation, if provided along with the Derivative Works; or, ++ within a display generated by the Derivative Works, if and ++ wherever such third-party notices normally appear. The contents ++ of the NOTICE file are for informational purposes only and ++ do not modify the License. You may add Your own attribution ++ notices within Derivative Works that You distribute, alongside ++ or as an addendum to the NOTICE text from the Work, provided ++ that such additional attribution notices cannot be construed ++ as modifying the License. ++ ++ You may add Your own copyright statement to Your modifications and ++ may provide additional or different license terms and conditions ++ for use, reproduction, or distribution of Your modifications, or ++ for any such Derivative Works as a whole, provided Your use, ++ reproduction, and distribution of the Work otherwise complies with ++ the conditions stated in this License. ++ ++5. Submission of Contributions. Unless You explicitly state otherwise, ++ any Contribution intentionally submitted for inclusion in the Work ++ by You to the Licensor shall be under the terms and conditions of ++ this License, without any additional terms or conditions. ++ Notwithstanding the above, nothing herein shall supersede or modify ++ the terms of any separate license agreement you may have executed ++ with Licensor regarding such Contributions. ++ ++6. Trademarks. This License does not grant permission to use the trade ++ names, trademarks, service marks, or product names of the Licensor, ++ except as required for reasonable and customary use in describing the ++ origin of the Work and reproducing the content of the NOTICE file. ++ ++7. Disclaimer of Warranty. Unless required by applicable law or ++ agreed to in writing, Licensor provides the Work (and each ++ Contributor provides its Contributions) on an "AS IS" BASIS, ++ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or ++ implied, including, without limitation, any warranties or conditions ++ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A ++ PARTICULAR PURPOSE. You are solely responsible for determining the ++ appropriateness of using or redistributing the Work and assume any ++ risks associated with Your exercise of permissions under this License. ++ ++8. Limitation of Liability. In no event and under no legal theory, ++ whether in tort (including negligence), contract, or otherwise, ++ unless required by applicable law (such as deliberate and grossly ++ negligent acts) or agreed to in writing, shall any Contributor be ++ liable to You for damages, including any direct, indirect, special, ++ incidental, or consequential damages of any character arising as a ++ result of this License or out of the use or inability to use the ++ Work (including but not limited to damages for loss of goodwill, ++ work stoppage, computer failure or malfunction, or any and all ++ other commercial damages or losses), even if such Contributor ++ has been advised of the possibility of such damages. ++ ++9. Accepting Warranty or Additional Liability. While redistributing ++ the Work or Derivative Works thereof, You may choose to offer, ++ and charge a fee for, acceptance of support, warranty, indemnity, ++ or other liability obligations and/or rights consistent with this ++ License. However, in accepting such obligations, You may act only ++ on Your own behalf and on Your sole responsibility, not on behalf ++ of any other Contributor, and only if You agree to indemnify, ++ defend, and hold each Contributor harmless for any liability ++ incurred by, or claims asserted against, such Contributor by reason ++ of your accepting any such warranty or additional liability. ++ ++END OF TERMS AND CONDITIONS ++ ++APPENDIX: How to apply the Apache License to your work. ++ ++ To apply the Apache License to your work, attach the following ++ boilerplate notice, with the fields enclosed by brackets "[]" ++ replaced with your own identifying information. (Don't include ++ the brackets!) The text should be enclosed in the appropriate ++ comment syntax for the file format. We also recommend that a ++ file or class name and description of purpose be included on the ++ same "printed page" as the copyright notice for easier ++ identification within third-party archives. ++ ++Copyright [yyyy] [name of copyright owner] ++ ++Licensed under the Apache License, Version 2.0 (the "License"); ++you may not use this file except in compliance with the License. ++You may obtain a copy of the License at ++ ++ http://www.apache.org/licenses/LICENSE-2.0 ++ ++Unless required by applicable law or agreed to in writing, software ++distributed under the License is distributed on an "AS IS" BASIS, ++WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ++See the License for the specific language governing permissions and ++limitations under the License. diff --cc vendor/libc-0.2.28/LICENSE-MIT index 000000000,000000000..39d4bdb5a new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/LICENSE-MIT @@@ -1,0 -1,0 +1,25 @@@ ++Copyright (c) 2014 The Rust Project Developers ++ ++Permission is hereby granted, free of charge, to any ++person obtaining a copy of this software and associated ++documentation files (the "Software"), to deal in the ++Software without restriction, including without ++limitation the rights to use, copy, modify, merge, ++publish, distribute, sublicense, and/or sell copies of ++the Software, and to permit persons to whom the Software ++is furnished to do so, subject to the following ++conditions: ++ ++The above copyright notice and this permission notice ++shall be included in all copies or substantial portions ++of the Software. ++ ++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ++ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED ++TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A ++PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT ++SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY ++CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION ++OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR ++IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER ++DEALINGS IN THE SOFTWARE. diff --cc vendor/libc-0.2.28/README.md index 000000000,000000000..67535d211 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/README.md @@@ -1,0 -1,0 +1,156 @@@ ++libc ++==== ++ ++A Rust library with native bindings to the types and functions commonly found on ++various systems, including libc. ++ ++[![Build Status](https://travis-ci.org/rust-lang/libc.svg?branch=master)](https://travis-ci.org/rust-lang/libc) ++[![Build status](https://ci.appveyor.com/api/projects/status/github/rust-lang/libc?svg=true)](https://ci.appveyor.com/project/rust-lang-libs/libc) ++ ++[Documentation](#platforms-and-documentation) ++ ++## Usage ++ ++First, add the following to your `Cargo.toml`: ++ ++```toml ++[dependencies] ++libc = "0.2" ++``` ++ ++Next, add this to your crate root: ++ ++```rust ++extern crate libc; ++``` ++ ++Currently libc by default links to the standard library, but if you would ++instead like to use libc in a `#![no_std]` situation or crate you can request ++this via: ++ ++```toml ++[dependencies] ++libc = { version = "0.2", default-features = false } ++``` ++ ++## What is libc? ++ ++The primary purpose of this crate is to provide all of the definitions necessary ++to easily interoperate with C code (or "C-like" code) on each of the platforms ++that Rust supports. This includes type definitions (e.g. `c_int`), constants ++(e.g. `EINVAL`) as well as function headers (e.g. `malloc`). ++ ++This crate does not strive to have any form of compatibility across platforms, ++but rather it is simply a straight binding to the system libraries on the ++platform in question. ++ ++## Public API ++ ++This crate exports all underlying platform types, functions, and constants under ++the crate root, so all items are accessible as `libc::foo`. The types and values ++of all the exported APIs match the platform that libc is compiled for. ++ ++More detailed information about the design of this library can be found in its ++[associated RFC][rfc]. ++ ++[rfc]: https://github.com/rust-lang/rfcs/blob/master/text/1291-promote-libc.md ++ ++## Adding an API ++ ++Want to use an API which currently isn't bound in `libc`? It's quite easy to add ++one! ++ ++The internal structure of this crate is designed to minimize the number of ++`#[cfg]` attributes in order to easily be able to add new items which apply ++to all platforms in the future. As a result, the crate is organized ++hierarchically based on platform. Each module has a number of `#[cfg]`'d ++children, but only one is ever actually compiled. Each module then reexports all ++the contents of its children. ++ ++This means that for each platform that libc supports, the path from a ++leaf module to the root will contain all bindings for the platform in question. ++Consequently, this indicates where an API should be added! Adding an API at a ++particular level in the hierarchy means that it is supported on all the child ++platforms of that level. For example, when adding a Unix API it should be added ++to `src/unix/mod.rs`, but when adding a Linux-only API it should be added to ++`src/unix/notbsd/linux/mod.rs`. ++ ++If you're not 100% sure at what level of the hierarchy an API should be added ++at, fear not! This crate has CI support which tests any binding against all ++platforms supported, so you'll see failures if an API is added at the wrong ++level or has different signatures across platforms. ++ ++With that in mind, the steps for adding a new API are: ++ ++1. Determine where in the module hierarchy your API should be added. ++2. Add the API. ++3. Send a PR to this repo. ++4. Wait for CI to pass, fixing errors. ++5. Wait for a merge! ++ ++### Test before you commit ++ ++We have two automated tests running on [Travis](https://travis-ci.org/rust-lang/libc): ++ ++1. [`libc-test`](https://github.com/alexcrichton/ctest) ++ - `cd libc-test && cargo run` ++ - Use the `skip_*()` functions in `build.rs` if you really need a workaround. ++2. Style checker ++ - `rustc ci/style.rs && ./style src` ++ ++### Releasing your change to crates.io ++ ++Now that you've done the amazing job of landing your new API or your new ++platform in this crate, the next step is to get that sweet, sweet usage from ++crates.io! The only next step is to bump the version of libc and then publish ++it. If you'd like to get a release out ASAP you can follow these steps: ++ ++1. Update the version number in `Cargo.toml`, you'll just be bumping the patch ++ version number. ++2. Run `cargo update` to regenerate the lockfile to encode your version bump in ++ the lock file. You may pull in some other updated dependencies, that's ok. ++3. Send a PR to this repository. It should [look like this][example], but it'd ++ also be nice to fill out the description with a small rationale for the ++ release (any rationale is ok though!) ++4. Once merged the release will be tagged and published by one of the libc crate ++ maintainers. ++ ++[example]: https://github.com/rust-lang/libc/pull/583 ++ ++## Platforms and Documentation ++ ++The following platforms are currently tested and have documentation available: ++ ++Tested: ++ * [`i686-pc-windows-msvc`](https://doc.rust-lang.org/libc/i686-pc-windows-msvc/libc/) ++ * [`x86_64-pc-windows-msvc`](https://doc.rust-lang.org/libc/x86_64-pc-windows-msvc/libc/) ++ (Windows) ++ * [`i686-pc-windows-gnu`](https://doc.rust-lang.org/libc/i686-pc-windows-gnu/libc/) ++ * [`x86_64-pc-windows-gnu`](https://doc.rust-lang.org/libc/x86_64-pc-windows-gnu/libc/) ++ * [`i686-apple-darwin`](https://doc.rust-lang.org/libc/i686-apple-darwin/libc/) ++ * [`x86_64-apple-darwin`](https://doc.rust-lang.org/libc/x86_64-apple-darwin/libc/) ++ (OSX) ++ * `i386-apple-ios` ++ * `x86_64-apple-ios` ++ * [`i686-unknown-linux-gnu`](https://doc.rust-lang.org/libc/i686-unknown-linux-gnu/libc/) ++ * [`x86_64-unknown-linux-gnu`](https://doc.rust-lang.org/libc/x86_64-unknown-linux-gnu/libc/) ++ (Linux) ++ * [`x86_64-unknown-linux-musl`](https://doc.rust-lang.org/libc/x86_64-unknown-linux-musl/libc/) ++ (Linux MUSL) ++ * [`aarch64-unknown-linux-gnu`](https://doc.rust-lang.org/libc/aarch64-unknown-linux-gnu/libc/) ++ * [`mips-unknown-linux-gnu`](https://doc.rust-lang.org/libc/mips-unknown-linux-gnu/libc/) ++ * [`arm-unknown-linux-gnueabihf`](https://doc.rust-lang.org/libc/arm-unknown-linux-gnueabihf/libc/) ++ * [`arm-linux-androideabi`](https://doc.rust-lang.org/libc/arm-linux-androideabi/libc/) ++ (Android) ++ * [`x86_64-unknown-freebsd`](https://doc.rust-lang.org/libc/x86_64-unknown-freebsd/libc/) ++ * [`x86_64-unknown-openbsd`](https://doc.rust-lang.org/libc/x86_64-unknown-openbsd/libc/) ++ * [`x86_64-rumprun-netbsd`](https://doc.rust-lang.org/libc/x86_64-unknown-netbsd/libc/) ++ ++The following may be supported, but are not guaranteed to always work: ++ ++ * `i686-unknown-freebsd` ++ * [`x86_64-unknown-bitrig`](https://doc.rust-lang.org/libc/x86_64-unknown-bitrig/libc/) ++ * [`x86_64-unknown-dragonfly`](https://doc.rust-lang.org/libc/x86_64-unknown-dragonfly/libc/) ++ * `i686-unknown-haiku` ++ * `x86_64-unknown-haiku` ++ * [`x86_64-unknown-netbsd`](https://doc.rust-lang.org/libc/x86_64-unknown-netbsd/libc/) diff --cc vendor/libc-0.2.28/appveyor.yml index 000000000,000000000..22ef8a560 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/appveyor.yml @@@ -1,0 -1,0 +1,31 @@@ ++environment: ++ # When this was added there were revocation check failures when using the ++ # libcurl backend as libcurl checks by default, but rustup doesn't provide the ++ # switch to turn this off. Switch to Hyper which looks to not check for ++ # revocation by default like libcurl does. ++ RUSTUP_USE_HYPER: 1 ++ CARGO_HTTP_CHECK_REVOKE: false ++ matrix: ++ - TARGET: x86_64-pc-windows-gnu ++ MSYS2_BITS: 64 ++ - TARGET: i686-pc-windows-gnu ++ MSYS2_BITS: 32 ++ - TARGET: x86_64-pc-windows-msvc ++ - TARGET: i686-pc-windows-msvc ++install: ++ - appveyor-retry appveyor DownloadFile https://win.rustup.rs/ -FileName rustup-init.exe ++ - rustup-init.exe -y --default-host %TARGET% ++ - set PATH=%PATH%;C:\Users\appveyor\.cargo\bin ++ - if defined MSYS2_BITS set PATH=%PATH%;C:\msys64\mingw%MSYS2_BITS%\bin ++ - rustc -V ++ - cargo -V ++ ++build: false ++ ++test_script: ++ - cargo test --target %TARGET% ++ - cargo run --manifest-path libc-test/Cargo.toml --target %TARGET% ++ ++cache: ++ - target ++ - C:\Users\appveyor\.cargo\registry diff --cc vendor/libc-0.2.28/ci/README.md index 000000000,000000000..13c7c8da5 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/README.md @@@ -1,0 -1,0 +1,203 @@@ ++The goal of the libc crate is to have CI running everywhere to have the ++strongest guarantees about the definitions that this library contains, and as a ++result the CI is pretty complicated and also pretty large! Hopefully this can ++serve as a guide through the sea of scripts in this directory and elsewhere in ++this project. ++ ++# Files ++ ++First up, let's talk about the files in this directory: ++ ++* `run-travis.sh` - a shell script run by all Travis builders, this is ++ responsible for setting up the rest of the environment such as installing new ++ packages, downloading Rust target libraries, etc. ++ ++* `run.sh` - the actual script which runs tests for a particular architecture. ++ Called from the `run-travis.sh` script this will run all tests for the target ++ specified. ++ ++* `cargo-config` - Cargo configuration of linkers to use copied into place by ++ the `run-travis.sh` script before builds are run. ++ ++* `dox.sh` - script called from `run-travis.sh` on only the linux 64-bit nightly ++ Travis bots to build documentation for this crate. ++ ++* `landing-page-*.html` - used by `dox.sh` to generate a landing page for all ++ architectures' documentation. ++ ++* `run-qemu.sh` - see discussion about QEMU below ++ ++* `mips`, `rumprun` - instructions to build the docker image for each respective ++ CI target ++ ++# CI Systems ++ ++Currently this repository leverages a combination of Travis CI and AppVeyor for ++running tests. The triples tested are: ++ ++* AppVeyor ++ * `{i686,x86_64}-pc-windows-{msvc,gnu}` ++* Travis ++ * `{i686,x86_64,mips,aarch64}-unknown-linux-gnu` ++ * `x86_64-unknown-linux-musl` ++ * `arm-unknown-linux-gnueabihf` ++ * `arm-linux-androideabi` ++ * `{i686,x86_64}-apple-{darwin,ios}` ++ * `x86_64-rumprun-netbsd` ++ * `x86_64-unknown-freebsd` ++ * `x86_64-unknown-openbsd` ++ ++The Windows triples are all pretty standard, they just set up their environment ++then run tests, no need for downloading any extra target libs (we just download ++the right installer). The Intel Linux/OSX builds are similar in that we just ++download the right target libs and run tests. Note that the Intel Linux/OSX ++builds are run on stable/beta/nightly, but are the only ones that do so. ++ ++The remaining architectures look like: ++ ++* Android runs in a [docker image][android-docker] with an emulator, the NDK, ++ and the SDK already set up. The entire build happens within the docker image. ++* The MIPS, ARM, and AArch64 builds all use the QEMU userspace emulator to run ++ the generated binary to actually verify the tests pass. ++* The MUSL build just has to download a MUSL compiler and target libraries and ++ then otherwise runs tests normally. ++* iOS builds need an extra linker flag currently, but beyond that they're built ++ as standard as everything else. ++* The rumprun target builds an entire kernel from the test suite and then runs ++ it inside QEMU using the serial console to test whether it succeeded or ++ failed. ++* The BSD builds, currently OpenBSD and FreeBSD, use QEMU to boot up a system ++ and compile/run tests. More information on that below. ++ ++[android-docker]: https://github.com/rust-lang/rust-buildbot/blob/master/slaves/android/Dockerfile ++ ++## QEMU ++ ++Lots of the architectures tested here use QEMU in the tests, so it's worth going ++over all the crazy capabilities QEMU has and the various flavors in which we use ++it! ++ ++First up, QEMU has userspace emulation where it doesn't boot a full kernel, it ++just runs a binary from another architecture (using the `qemu-` wrappers). ++We provide it the runtime path for the dynamically loaded system libraries, ++however. This strategy is used for all Linux architectures that aren't intel. ++Note that one downside of this QEMU system is that threads are barely ++implemented, so we're careful to not spawn many threads. ++ ++For the rumprun target the only output is a kernel image, so we just use that ++plus the `rumpbake` command to create a full kernel image which is then run from ++within QEMU. ++ ++Finally, the fun part, the BSDs. Quite a few hoops are jumped through to get CI ++working for these platforms, but the gist of it looks like: ++ ++* Cross compiling from Linux to any of the BSDs seems to be quite non-standard. ++ We may be able to get it working but it might be difficult at that point to ++ ensure that the libc definitions align with what you'd get on the BSD itself. ++ As a result, we try to do compiles within the BSD distro. ++* On Travis we can't run a VM-in-a-VM, so we resort to userspace emulation ++ (QEMU). ++* Unfortunately on Travis we also can't use KVM, so the emulation is super slow. ++ ++With all that in mind, the way BSD is tested looks like: ++ ++1. Download a pre-prepared image for the OS being tested. ++2. Generate the tests for the OS being tested. This involves running the `ctest` ++ library over libc to generate a Rust file and a C file which will then be ++ compiled into the final test. ++3. Generate a disk image which will later be mounted by the OS being tested. ++ This image is mostly just the libc directory, but some modifications are made ++ to compile the generated files from step 2. ++4. The kernel is booted in QEMU, and it is configured to detect the libc-test ++ image being available, run the test script, and then shut down afterwards. ++5. Look for whether the tests passed in the serial console output of the kernel. ++ ++There's some pretty specific instructions for setting up each image (detailed ++below), but the main gist of this is that we must avoid a vanilla `cargo run` ++inside of the `libc-test` directory (which is what it's intended for) because ++that would compile `syntex_syntax`, a large library, with userspace emulation. ++This invariably times out on Travis, so we can't do that. ++ ++Once all those hoops are jumped through, however, we can be happy that we're ++testing almost everything! ++ ++Below are some details of how to set up the initial OS images which are ++downloaded. Each image must be enabled have input/output over the serial ++console, log in automatically at the serial console, detect if a second drive in ++QEMU is available, and if so mount it, run a script (it'll specifically be ++`run-qemu.sh` in this folder which is copied into the generated image talked ++about above), and then shut down. ++ ++### QEMU setup - FreeBSD ++ ++1. Download CD installer (most minimal is fine) ++2. `qemu-img create -f qcow2 foo.qcow2 2G` ++3. `qemu -cdrom foo.iso -drive if=virtio,file=foo.qcow2 -net nic,model=virtio -net user` ++4. run installer ++5. `echo 'console="comconsole"' >> /boot/loader.conf` ++6. `echo 'autoboot_delay="0"' >> /boot/loader.conf` ++7. look at /etc/ttys, see what getty argument is for ttyu0 ++8. edit /etc/gettytab, look for ttyu0 argument, prepend `:al=root` to line ++ beneath ++ ++(note that the current image has a `freebsd` user, but this isn't really ++necessary) ++ ++Once that's done, arrange for this script to run at login: ++ ++``` ++#!/bin/sh ++ ++sudo kldload ext2fs ++[ -e /dev/vtbd1 ] || exit 0 ++sudo mount -t ext2fs /dev/vtbd1 /mnt ++sh /mnt/run.sh /mnt ++sudo poweroff ++``` ++ ++Helpful links ++ ++* https://en.wikibooks.org/wiki/QEMU/Images ++* https://blog.nekoconeko.nl/blog/2015/06/04/creating-an-openstack-freebsd-image.html ++* https://www.freebsd.org/doc/handbook/serialconsole-setup.html ++ ++ ++### QEMU setup - OpenBSD ++ ++1. Download CD installer ++2. `qemu-img create -f qcow2 foo.qcow2 2G` ++3. `qemu -cdrom foo.iso -drive if=virtio,file=foo.qcow2 -net nic,model=virtio -net user` ++4. run installer ++5. `echo 'set tty com0' >> /etc/boot.conf` ++6. `echo 'boot' >> /etc/boot.conf` ++7. Modify /etc/ttys, change the `tty00` at the end from 'unknown off' to ++ 'vt220 on secure' ++8. Modify same line in /etc/ttys to have `"/root/foo.sh"` as the shell ++9. Add this script to `/root/foo.sh` ++ ++``` ++#!/bin/sh ++exec 1>/dev/tty00 ++exec 2>&1 ++ ++if mount -t ext2fs /dev/sd1c /mnt; then ++ sh /mnt/run.sh /mnt ++ shutdown -ph now ++fi ++ ++# limited shell... ++exec /bin/sh < /dev/tty00 ++``` ++ ++10. `chmod +x /root/foo.sh` ++ ++Helpful links: ++ ++* https://en.wikibooks.org/wiki/QEMU/Images ++* http://www.openbsd.org/faq/faq7.html#SerCon ++ ++# Questions? ++ ++Hopefully that's at least somewhat of an introduction to everything going on ++here, and feel free to ping @alexcrichton with questions! ++ diff --cc vendor/libc-0.2.28/ci/android-accept-licenses.sh index 000000000,000000000..8d8f60a5e new file mode 100755 --- /dev/null +++ b/vendor/libc-0.2.28/ci/android-accept-licenses.sh @@@ -1,0 -1,0 +1,15 @@@ ++#!/usr/bin/expect -f ++# ignore-license ++ ++set timeout 1800 ++set cmd [lindex $argv 0] ++set licenses [lindex $argv 1] ++ ++spawn {*}$cmd ++expect { ++ "Do you accept the license '*'*" { ++ exp_send "y\r" ++ exp_continue ++ } ++ eof ++} diff --cc vendor/libc-0.2.28/ci/android-install-ndk.sh index 000000000,000000000..873f6c52c new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/android-install-ndk.sh @@@ -1,0 -1,0 +1,37 @@@ ++#!/bin/sh ++# Copyright 2016 The Rust Project Developers. See the COPYRIGHT ++# file at the top-level directory of this distribution and at ++# http://rust-lang.org/COPYRIGHT. ++# ++# Licensed under the Apache License, Version 2.0 or the MIT license ++# , at your ++# option. This file may not be copied, modified, or distributed ++# except according to those terms. ++ ++set -ex ++ ++curl -O https://dl.google.com/android/repository/android-ndk-r15b-linux-x86_64.zip ++unzip -q android-ndk-r15b-linux-x86_64.zip ++ ++case "$1" in ++ aarch64) ++ arch=arm64 ++ ;; ++ ++ i686) ++ arch=x86 ++ ;; ++ ++ *) ++ arch=$1 ++ ;; ++esac; ++ ++android-ndk-r15b/build/tools/make_standalone_toolchain.py \ ++ --unified-headers \ ++ --install-dir /android/ndk-$1 \ ++ --arch $arch \ ++ --api 24 ++ ++rm -rf ./android-ndk-r15b-linux-x86_64.zip ./android-ndk-r15b diff --cc vendor/libc-0.2.28/ci/android-install-sdk.sh index 000000000,000000000..d03b7623b new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/android-install-sdk.sh @@@ -1,0 -1,0 +1,57 @@@ ++#!/bin/sh ++# Copyright 2016 The Rust Project Developers. See the COPYRIGHT ++# file at the top-level directory of this distribution and at ++# http://rust-lang.org/COPYRIGHT. ++# ++# Licensed under the Apache License, Version 2.0 or the MIT license ++# , at your ++# option. This file may not be copied, modified, or distributed ++# except according to those terms. ++ ++set -ex ++ ++# Prep the SDK and emulator ++# ++# Note that the update process requires that we accept a bunch of licenses, and ++# we can't just pipe `yes` into it for some reason, so we take the same strategy ++# located in https://github.com/appunite/docker by just wrapping it in a script ++# which apparently magically accepts the licenses. ++ ++mkdir sdk ++curl https://dl.google.com/android/repository/tools_r25.2.5-linux.zip -O ++unzip -d sdk tools_r25.2.5-linux.zip ++ ++filter="platform-tools,android-24" ++ ++case "$1" in ++ arm | armv7) ++ abi=armeabi-v7a ++ ;; ++ ++ aarch64) ++ abi=arm64-v8a ++ ;; ++ ++ i686) ++ abi=x86 ++ ;; ++ ++ x86_64) ++ abi=x86_64 ++ ;; ++ ++ *) ++ echo "invalid arch: $1" ++ exit 1 ++ ;; ++esac; ++ ++filter="$filter,sys-img-$abi-android-24" ++ ++./android-accept-licenses.sh "android - update sdk -a --no-ui --filter $filter" ++ ++echo "no" | android create avd \ ++ --name $1 \ ++ --target android-24 \ ++ --abi $abi diff --cc vendor/libc-0.2.28/ci/android-sysimage.sh index 000000000,000000000..9611dfeb0 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/android-sysimage.sh @@@ -1,0 -1,0 +1,52 @@@ ++# Copyright 2017 The Rust Project Developers. See the COPYRIGHT ++# file at the top-level directory of this distribution and at ++# http://rust-lang.org/COPYRIGHT. ++# ++# Licensed under the Apache License, Version 2.0 or the MIT license ++# , at your ++# option. This file may not be copied, modified, or distributed ++# except according to those terms. ++ ++set -ex ++ ++URL=https://dl.google.com/android/repository/sys-img/android ++ ++main() { ++ local arch=$1 ++ local name=$2 ++ local dest=/system ++ local td=$(mktemp -d) ++ ++ apt-get install --no-install-recommends e2tools ++ ++ pushd $td ++ curl -O $URL/$name ++ unzip -q $name ++ ++ local system=$(find . -name system.img) ++ mkdir -p $dest/{bin,lib,lib64} ++ ++ # Extract android linker and libraries to /system ++ # This allows android executables to be run directly (or with qemu) ++ if [ $arch = "x86_64" -o $arch = "arm64" ]; then ++ e2cp -p $system:/bin/linker64 $dest/bin/ ++ e2cp -p $system:/lib64/libdl.so $dest/lib64/ ++ e2cp -p $system:/lib64/libc.so $dest/lib64/ ++ e2cp -p $system:/lib64/libm.so $dest/lib64/ ++ else ++ e2cp -p $system:/bin/linker $dest/bin/ ++ e2cp -p $system:/lib/libdl.so $dest/lib/ ++ e2cp -p $system:/lib/libc.so $dest/lib/ ++ e2cp -p $system:/lib/libm.so $dest/lib/ ++ fi ++ ++ # clean up ++ apt-get purge --auto-remove -y e2tools ++ ++ popd ++ ++ rm -rf $td ++} ++ ++main "${@}" diff --cc vendor/libc-0.2.28/ci/docker/aarch64-linux-android/Dockerfile index 000000000,000000000..7ad84926b new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/docker/aarch64-linux-android/Dockerfile @@@ -1,0 -1,0 +1,32 @@@ ++FROM ubuntu:16.04 ++ ++RUN dpkg --add-architecture i386 && \ ++ apt-get update && \ ++ apt-get install -y --no-install-recommends \ ++ file \ ++ curl \ ++ ca-certificates \ ++ python \ ++ unzip \ ++ expect \ ++ openjdk-9-jre \ ++ libstdc++6:i386 \ ++ libpulse0 \ ++ gcc \ ++ libc6-dev ++ ++WORKDIR /android/ ++COPY android* /android/ ++ ++ENV ANDROID_ARCH=aarch64 ++ENV PATH=$PATH:/android/ndk-$ANDROID_ARCH/bin:/android/sdk/tools:/android/sdk/platform-tools ++ ++RUN sh /android/android-install-ndk.sh $ANDROID_ARCH ++RUN sh /android/android-install-sdk.sh $ANDROID_ARCH ++RUN mv /root/.android /tmp ++RUN chmod 777 -R /tmp/.android ++RUN chmod 755 /android/sdk/tools/* /android/sdk/tools/qemu/linux-x86_64/* ++ ++ENV PATH=$PATH:/rust/bin \ ++ CARGO_TARGET_AARCH64_LINUX_ANDROID_LINKER=aarch64-linux-android-gcc \ ++ HOME=/tmp diff --cc vendor/libc-0.2.28/ci/docker/aarch64-unknown-linux-gnu/Dockerfile index 000000000,000000000..2a02f6321 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/docker/aarch64-unknown-linux-gnu/Dockerfile @@@ -1,0 -1,0 +1,7 @@@ ++FROM ubuntu:17.10 ++RUN apt-get update ++RUN apt-get install -y --no-install-recommends \ ++ gcc libc6-dev ca-certificates \ ++ gcc-aarch64-linux-gnu libc6-dev-arm64-cross qemu-user ++ENV CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=aarch64-linux-gnu-gcc \ ++ PATH=$PATH:/rust/bin diff --cc vendor/libc-0.2.28/ci/docker/arm-linux-androideabi/Dockerfile index 000000000,000000000..054941416 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/docker/arm-linux-androideabi/Dockerfile @@@ -1,0 -1,0 +1,32 @@@ ++FROM ubuntu:16.04 ++ ++RUN dpkg --add-architecture i386 && \ ++ apt-get update && \ ++ apt-get install -y --no-install-recommends \ ++ file \ ++ curl \ ++ ca-certificates \ ++ python \ ++ unzip \ ++ expect \ ++ openjdk-9-jre \ ++ libstdc++6:i386 \ ++ libpulse0 \ ++ gcc \ ++ libc6-dev ++ ++WORKDIR /android/ ++COPY android* /android/ ++ ++ENV ANDROID_ARCH=arm ++ENV PATH=$PATH:/android/ndk-$ANDROID_ARCH/bin:/android/sdk/tools:/android/sdk/platform-tools ++ ++RUN sh /android/android-install-ndk.sh $ANDROID_ARCH ++RUN sh /android/android-install-sdk.sh $ANDROID_ARCH ++RUN mv /root/.android /tmp ++RUN chmod 777 -R /tmp/.android ++RUN chmod 755 /android/sdk/tools/* /android/sdk/tools/qemu/linux-x86_64/* ++ ++ENV PATH=$PATH:/rust/bin \ ++ CARGO_TARGET_ARM_LINUX_ANDROIDEABI_LINKER=arm-linux-androideabi-gcc \ ++ HOME=/tmp diff --cc vendor/libc-0.2.28/ci/docker/arm-unknown-linux-gnueabihf/Dockerfile index 000000000,000000000..53da39825 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/docker/arm-unknown-linux-gnueabihf/Dockerfile @@@ -1,0 -1,0 +1,7 @@@ ++FROM ubuntu:17.10 ++RUN apt-get update ++RUN apt-get install -y --no-install-recommends \ ++ gcc libc6-dev ca-certificates \ ++ gcc-arm-linux-gnueabihf libc6-dev-armhf-cross qemu-user ++ENV CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_LINKER=arm-linux-gnueabihf-gcc \ ++ PATH=$PATH:/rust/bin diff --cc vendor/libc-0.2.28/ci/docker/i686-linux-android/Dockerfile index 000000000,000000000..bee904379 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/docker/i686-linux-android/Dockerfile @@@ -1,0 -1,0 +1,32 @@@ ++FROM ubuntu:16.04 ++ ++RUN dpkg --add-architecture i386 && \ ++ apt-get update && \ ++ apt-get install -y --no-install-recommends \ ++ file \ ++ curl \ ++ ca-certificates \ ++ python \ ++ unzip \ ++ expect \ ++ openjdk-9-jre \ ++ libstdc++6:i386 \ ++ libpulse0 \ ++ gcc \ ++ libc6-dev ++ ++WORKDIR /android/ ++COPY android* /android/ ++ ++ENV ANDROID_ARCH=i686 ++ENV PATH=$PATH:/android/ndk-$ANDROID_ARCH/bin:/android/sdk/tools:/android/sdk/platform-tools ++ ++RUN sh /android/android-install-ndk.sh $ANDROID_ARCH ++RUN sh /android/android-install-sdk.sh $ANDROID_ARCH ++RUN mv /root/.android /tmp ++RUN chmod 777 -R /tmp/.android ++RUN chmod 755 /android/sdk/tools/* /android/sdk/tools/qemu/linux-x86_64/* ++ ++ENV PATH=$PATH:/rust/bin \ ++ CARGO_TARGET_I686_LINUX_ANDROID_LINKER=i686-linux-android-gcc \ ++ HOME=/tmp diff --cc vendor/libc-0.2.28/ci/docker/i686-unknown-linux-gnu/Dockerfile index 000000000,000000000..a5a4b8e36 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/docker/i686-unknown-linux-gnu/Dockerfile @@@ -1,0 -1,0 +1,5 @@@ ++FROM ubuntu:17.10 ++RUN apt-get update ++RUN apt-get install -y --no-install-recommends \ ++ gcc-multilib libc6-dev ca-certificates ++ENV PATH=$PATH:/rust/bin diff --cc vendor/libc-0.2.28/ci/docker/i686-unknown-linux-musl/Dockerfile index 000000000,000000000..3adb92004 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/docker/i686-unknown-linux-musl/Dockerfile @@@ -1,0 -1,0 +1,30 @@@ ++FROM ubuntu:17.10 ++ ++RUN dpkg --add-architecture i386 ++RUN apt-get update ++RUN apt-get install -y --no-install-recommends \ ++ gcc-multilib make libc6-dev git curl ca-certificates libc6:i386 ++# Below we're cross-compiling musl for i686 using the system compiler on an ++# x86_64 system. This is an awkward thing to be doing and so we have to jump ++# through a couple hoops to get musl to be happy. In particular: ++# ++# * We specifically pass -m32 in CFLAGS and override CC when running ./configure, ++# since otherwise the script will fail to find a compiler. ++# * We manually unset CROSS_COMPILE when running make; otherwise the makefile ++# will call the non-existent binary 'i686-ar'. ++RUN curl https://www.musl-libc.org/releases/musl-1.1.15.tar.gz | \ ++ tar xzf - && \ ++ cd musl-1.1.15 && \ ++ CC=gcc CFLAGS=-m32 ./configure --prefix=/musl-i686 --disable-shared --target=i686 && \ ++ make CROSS_COMPILE= install -j4 && \ ++ cd .. && \ ++ rm -rf musl-1.1.15 && \ ++# Install linux kernel headers sanitized for use with musl ++ curl -L https://github.com/sabotage-linux/kernel-headers/archive/v3.12.6-5.tar.gz | \ ++ tar xzf - && \ ++ cd kernel-headers-3.12.6-5 && \ ++ make ARCH=i386 prefix=/musl-i686 install -j4 && \ ++ cd .. && \ ++ rm -rf kernel-headers-3.12.6-5 ++ENV PATH=$PATH:/musl-i686/bin:/rust/bin \ ++ CC_i686_unknown_linux_musl=musl-gcc diff --cc vendor/libc-0.2.28/ci/docker/mips-unknown-linux-gnu/Dockerfile index 000000000,000000000..f4997a702 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/docker/mips-unknown-linux-gnu/Dockerfile @@@ -1,0 -1,0 +1,10 @@@ ++FROM ubuntu:17.10 ++ ++RUN apt-get update ++RUN apt-get install -y --no-install-recommends \ ++ gcc libc6-dev qemu-user ca-certificates \ ++ gcc-mips-linux-gnu libc6-dev-mips-cross \ ++ qemu-system-mips ++ ++ENV CARGO_TARGET_MIPS_UNKNOWN_LINUX_GNU_LINKER=mips-linux-gnu-gcc \ ++ PATH=$PATH:/rust/bin diff --cc vendor/libc-0.2.28/ci/docker/mips-unknown-linux-musl/Dockerfile index 000000000,000000000..ba8e34642 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/docker/mips-unknown-linux-musl/Dockerfile @@@ -1,0 -1,0 +1,17 @@@ ++FROM ubuntu:17.10 ++ ++RUN apt-get update ++RUN apt-get install -y --no-install-recommends \ ++ gcc libc6-dev qemu-user ca-certificates qemu-system-mips curl \ ++ bzip2 ++ ++RUN mkdir /toolchain ++ ++# Note that this originally came from: ++# https://downloads.openwrt.org/snapshots/trunk/ar71xx/generic/OpenWrt-SDK-ar71xx-generic_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2 ++RUN curl -L https://s3.amazonaws.com/rust-lang-ci/libc/OpenWrt-SDK-ar71xx-generic_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2 | \ ++ tar xjf - -C /toolchain --strip-components=1 ++ ++ENV PATH=$PATH:/rust/bin:/toolchain/staging_dir/toolchain-mips_34kc_gcc-5.3.0_musl-1.1.15/bin \ ++ CC_mips_unknown_linux_musl=mips-openwrt-linux-gcc \ ++ CARGO_TARGET_MIPS_UNKNOWN_LINUX_MUSL_LINKER=mips-openwrt-linux-gcc diff --cc vendor/libc-0.2.28/ci/docker/mips64-unknown-linux-gnuabi64/Dockerfile index 000000000,000000000..a864a31cc new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/docker/mips64-unknown-linux-gnuabi64/Dockerfile @@@ -1,0 -1,0 +1,11 @@@ ++FROM ubuntu:17.10 ++ ++RUN apt-get update ++RUN apt-get install -y --no-install-recommends \ ++ gcc libc6-dev qemu-user ca-certificates \ ++ gcc-mips64-linux-gnuabi64 libc6-dev-mips64-cross \ ++ qemu-system-mips64 ++ ++ENV CARGO_TARGET_MIPS64_UNKNOWN_LINUX_GNUABI64_LINKER=mips64-linux-gnuabi64-gcc \ ++ CC_mips64_unknown_linux_gnuabi64=mips64-linux-gnuabi64-gcc \ ++ PATH=$PATH:/rust/bin diff --cc vendor/libc-0.2.28/ci/docker/mipsel-unknown-linux-musl/Dockerfile index 000000000,000000000..36666743f new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/docker/mipsel-unknown-linux-musl/Dockerfile @@@ -1,0 -1,0 +1,17 @@@ ++FROM ubuntu:17.10 ++ ++RUN apt-get update ++RUN apt-get install -y --no-install-recommends \ ++ gcc libc6-dev qemu-user ca-certificates qemu-system-mips curl \ ++ bzip2 ++ ++RUN mkdir /toolchain ++ ++# Note that this originally came from: ++# https://downloads.openwrt.org/snapshots/trunk/malta/generic/OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2 ++RUN curl -L https://s3.amazonaws.com/rust-lang-ci/libc/OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2 | \ ++ tar xjf - -C /toolchain --strip-components=2 ++ ++ENV PATH=$PATH:/rust/bin:/toolchain/bin \ ++ CC_mipsel_unknown_linux_musl=mipsel-openwrt-linux-gcc \ ++ CARGO_TARGET_MIPSEL_UNKNOWN_LINUX_MUSL_LINKER=mipsel-openwrt-linux-gcc diff --cc vendor/libc-0.2.28/ci/docker/powerpc-unknown-linux-gnu/Dockerfile index 000000000,000000000..489f8dd57 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/docker/powerpc-unknown-linux-gnu/Dockerfile @@@ -1,0 -1,0 +1,10 @@@ ++FROM ubuntu:17.10 ++ ++RUN apt-get update ++RUN apt-get install -y --no-install-recommends \ ++ gcc libc6-dev qemu-user ca-certificates \ ++ gcc-powerpc-linux-gnu libc6-dev-powerpc-cross \ ++ qemu-system-ppc ++ ++ENV CARGO_TARGET_POWERPC_UNKNOWN_LINUX_GNU_LINKER=powerpc-linux-gnu-gcc \ ++ PATH=$PATH:/rust/bin diff --cc vendor/libc-0.2.28/ci/docker/powerpc64-unknown-linux-gnu/Dockerfile index 000000000,000000000..51ebcca7e new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/docker/powerpc64-unknown-linux-gnu/Dockerfile @@@ -1,0 -1,0 +1,11 @@@ ++FROM ubuntu:17.10 ++ ++RUN apt-get update ++RUN apt-get install -y --no-install-recommends \ ++ gcc libc6-dev qemu-user ca-certificates \ ++ gcc-powerpc64-linux-gnu libc6-dev-ppc64-cross \ ++ qemu-system-ppc ++ ++ENV CARGO_TARGET_POWERPC64_UNKNOWN_LINUX_GNU_LINKER=powerpc64-linux-gnu-gcc \ ++ CC=powerpc64-linux-gnu-gcc \ ++ PATH=$PATH:/rust/bin diff --cc vendor/libc-0.2.28/ci/docker/s390x-unknown-linux-gnu/Dockerfile index 000000000,000000000..4cd9d4ae4 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/docker/s390x-unknown-linux-gnu/Dockerfile @@@ -1,0 -1,0 +1,9 @@@ ++FROM ubuntu:17.10 ++ ++RUN apt-get update && apt-get install -y --no-install-recommends \ ++ gcc libc6-dev qemu-user ca-certificates \ ++ gcc-s390x-linux-gnu libc6-dev-s390x-cross ++ ++ENV CARGO_TARGET_S390X_UNKNOWN_LINUX_GNU_LINKER=s390x-linux-gnu-gcc \ ++ CC_s390x_unknown_linux_gnu=s390x-linux-gnu-gcc \ ++ PATH=$PATH:/rust/bin diff --cc vendor/libc-0.2.28/ci/docker/x86_64-linux-android/Dockerfile index 000000000,000000000..0cfbc4820 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/docker/x86_64-linux-android/Dockerfile @@@ -1,0 -1,0 +1,26 @@@ ++FROM ubuntu:16.04 ++ ++RUN apt-get update && \ ++ apt-get install -y --no-install-recommends \ ++ ca-certificates \ ++ curl \ ++ gcc \ ++ libc-dev \ ++ python \ ++ unzip ++ ++WORKDIR /android/ ++ENV ANDROID_ARCH=x86_64 ++COPY android-install-ndk.sh /android/ ++RUN sh /android/android-install-ndk.sh $ANDROID_ARCH ++ ++# We do not run x86_64-linux-android tests on an android emulator. ++# See ci/android-sysimage.sh for informations about how tests are run. ++COPY android-sysimage.sh /android/ ++RUN bash /android/android-sysimage.sh x86_64 x86_64-24_r07.zip ++ ++ENV PATH=$PATH:/rust/bin:/android/ndk-$ANDROID_ARCH/bin \ ++ CARGO_TARGET_X86_64_LINUX_ANDROID_LINKER=x86_64-linux-android-gcc \ ++ CC_x86_64_linux_android=x86_64-linux-android-gcc \ ++ CXX_x86_64_linux_android=x86_64-linux-android-g++ \ ++ HOME=/tmp diff --cc vendor/libc-0.2.28/ci/docker/x86_64-rumprun-netbsd/Dockerfile index 000000000,000000000..129771e76 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/docker/x86_64-rumprun-netbsd/Dockerfile @@@ -1,0 -1,0 +1,6 @@@ ++FROM mato/rumprun-toolchain-hw-x86_64 ++USER root ++RUN apt-get update ++RUN apt-get install -y --no-install-recommends \ ++ qemu ++ENV PATH=$PATH:/rust/bin diff --cc vendor/libc-0.2.28/ci/docker/x86_64-unknown-freebsd/Dockerfile index 000000000,000000000..12b0bdffc new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/docker/x86_64-unknown-freebsd/Dockerfile @@@ -1,0 -1,0 +1,13 @@@ ++FROM alexcrichton/rust-slave-linux-cross:2016-04-15 ++USER root ++ ++RUN apt-get update ++RUN apt-get install -y --no-install-recommends \ ++ qemu genext2fs ++ ++ENTRYPOINT ["sh"] ++ ++ENV PATH=$PATH:/rust/bin \ ++ QEMU=2016-11-06/freebsd.qcow2.gz \ ++ CAN_CROSS=1 \ ++ CARGO_TARGET_X86_64_UNKNOWN_FREEBSD_LINKER=x86_64-unknown-freebsd10-gcc diff --cc vendor/libc-0.2.28/ci/docker/x86_64-unknown-linux-gnu/Dockerfile index 000000000,000000000..ca60edece new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/docker/x86_64-unknown-linux-gnu/Dockerfile @@@ -1,0 -1,0 +1,5 @@@ ++FROM ubuntu:17.10 ++RUN apt-get update ++RUN apt-get install -y --no-install-recommends \ ++ gcc libc6-dev ca-certificates ++ENV PATH=$PATH:/rust/bin diff --cc vendor/libc-0.2.28/ci/docker/x86_64-unknown-linux-musl/Dockerfile index 000000000,000000000..d9d651138 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/docker/x86_64-unknown-linux-musl/Dockerfile @@@ -1,0 -1,0 +1,20 @@@ ++FROM ubuntu:17.10 ++ ++RUN apt-get update ++RUN apt-get install -y --no-install-recommends \ ++ gcc make libc6-dev git curl ca-certificates ++RUN curl https://www.musl-libc.org/releases/musl-1.1.15.tar.gz | \ ++ tar xzf - && \ ++ cd musl-1.1.15 && \ ++ ./configure --prefix=/musl-x86_64 && \ ++ make install -j4 && \ ++ cd .. && \ ++ rm -rf musl-1.1.15 && \ ++# Install linux kernel headers sanitized for use with musl ++ curl -L https://github.com/sabotage-linux/kernel-headers/archive/v3.12.6-5.tar.gz | \ ++ tar xzf - && \ ++ cd kernel-headers-3.12.6-5 && \ ++ make ARCH=x86_64 prefix=/musl-x86_64 install -j4 && \ ++ cd .. && \ ++ rm -rf kernel-headers-3.12.6-5 ++ENV PATH=$PATH:/musl-x86_64/bin:/rust/bin diff --cc vendor/libc-0.2.28/ci/docker/x86_64-unknown-openbsd/Dockerfile index 000000000,000000000..518baf870 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/docker/x86_64-unknown-openbsd/Dockerfile @@@ -1,0 -1,0 +1,8 @@@ ++FROM ubuntu:16.10 ++ ++RUN apt-get update ++RUN apt-get install -y --no-install-recommends \ ++ gcc libc6-dev qemu curl ca-certificates \ ++ genext2fs ++ENV PATH=$PATH:/rust/bin \ ++ QEMU=2016-11-06/openbsd-6.0-without-pkgs.qcow2 diff --cc vendor/libc-0.2.28/ci/dox.sh index 000000000,000000000..85e924394 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/dox.sh @@@ -1,0 -1,0 +1,33 @@@ ++#!/bin/sh ++ ++# Builds documentation for all target triples that we have a registered URL for ++# in liblibc. This scrapes the list of triples to document from `src/lib.rs` ++# which has a bunch of `html_root_url` directives we pick up. ++ ++set -e ++ ++TARGETS=`grep html_root_url src/lib.rs | sed 's/.*".*\/\(.*\)"/\1/'` ++ ++rm -rf target/doc ++mkdir -p target/doc ++ ++cp ci/landing-page-head.html target/doc/index.html ++ ++for target in $TARGETS; do ++ echo documenting $target ++ ++ rustdoc -o target/doc/$target --target $target src/lib.rs --cfg dox \ ++ --crate-name libc ++ ++ echo "" \ ++ >> target/doc/index.html ++done ++ ++cat ci/landing-page-footer.html >> target/doc/index.html ++ ++# If we're on travis, not a PR, and on the right branch, publish! ++if [ "$TRAVIS_PULL_REQUEST" = "false" ] && [ "$TRAVIS_BRANCH" = "master" ]; then ++ pip install ghp_import --install-option="--prefix=$HOME/.local" ++ $HOME/.local/bin/ghp-import -n target/doc ++ git push -qf https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages ++fi diff --cc vendor/libc-0.2.28/ci/ios/deploy_and_run_on_ios_simulator.rs index 000000000,000000000..b14615036 new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/ios/deploy_and_run_on_ios_simulator.rs @@@ -1,0 -1,0 +1,171 @@@ ++// Copyright 2017 The Rust Project Developers. See the COPYRIGHT ++// file at the top-level directory of this distribution and at ++// http://rust-lang.org/COPYRIGHT. ++// ++// Licensed under the Apache License, Version 2.0 or the MIT license ++// , at your ++// option. This file may not be copied, modified, or distributed ++// except according to those terms. ++ ++// This is a script to deploy and execute a binary on an iOS simulator. ++// The primary use of this is to be able to run unit tests on the simulator and ++// retrieve the results. ++// ++// To do this through Cargo instead, use Dinghy ++// (https://github.com/snipsco/dinghy): cargo dinghy install, then cargo dinghy ++// test. ++ ++use std::env; ++use std::fs::{self, File}; ++use std::io::Write; ++use std::path::Path; ++use std::process; ++use std::process::Command; ++ ++macro_rules! t { ++ ($e:expr) => (match $e { ++ Ok(e) => e, ++ Err(e) => panic!("{} failed with: {}", stringify!($e), e), ++ }) ++} ++ ++// Step one: Wrap as an app ++fn package_as_simulator_app(crate_name: &str, test_binary_path: &Path) { ++ println!("Packaging simulator app"); ++ drop(fs::remove_dir_all("ios_simulator_app")); ++ t!(fs::create_dir("ios_simulator_app")); ++ t!(fs::copy(test_binary_path, ++ Path::new("ios_simulator_app").join(crate_name))); ++ ++ let mut f = t!(File::create("ios_simulator_app/Info.plist")); ++ t!(f.write_all(format!(r#" ++ ++ ++ ++ ++ CFBundleExecutable ++ {} ++ CFBundleIdentifier ++ com.rust.unittests ++ ++ ++ "#, crate_name).as_bytes())); ++} ++ ++// Step two: Start the iOS simulator ++fn start_simulator() { ++ println!("Looking for iOS simulator"); ++ let output = t!(Command::new("xcrun").arg("simctl").arg("list").output()); ++ assert!(output.status.success()); ++ let mut simulator_exists = false; ++ let mut simulator_booted = false; ++ let mut found_rust_sim = false; ++ let stdout = t!(String::from_utf8(output.stdout)); ++ for line in stdout.lines() { ++ if line.contains("rust_ios") { ++ if found_rust_sim { ++ panic!("Duplicate rust_ios simulators found. Please \ ++ double-check xcrun simctl list."); ++ } ++ simulator_exists = true; ++ simulator_booted = line.contains("(Booted)"); ++ found_rust_sim = true; ++ } ++ } ++ ++ if simulator_exists == false { ++ println!("Creating iOS simulator"); ++ Command::new("xcrun") ++ .arg("simctl") ++ .arg("create") ++ .arg("rust_ios") ++ .arg("com.apple.CoreSimulator.SimDeviceType.iPhone-SE") ++ .arg("com.apple.CoreSimulator.SimRuntime.iOS-10-2") ++ .check_status(); ++ } else if simulator_booted == true { ++ println!("Shutting down already-booted simulator"); ++ Command::new("xcrun") ++ .arg("simctl") ++ .arg("shutdown") ++ .arg("rust_ios") ++ .check_status(); ++ } ++ ++ println!("Starting iOS simulator"); ++ // We can't uninstall the app (if present) as that will hang if the ++ // simulator isn't completely booted; just erase the simulator instead. ++ Command::new("xcrun").arg("simctl").arg("erase").arg("rust_ios").check_status(); ++ Command::new("xcrun").arg("simctl").arg("boot").arg("rust_ios").check_status(); ++} ++ ++// Step three: Install the app ++fn install_app_to_simulator() { ++ println!("Installing app to simulator"); ++ Command::new("xcrun") ++ .arg("simctl") ++ .arg("install") ++ .arg("booted") ++ .arg("ios_simulator_app/") ++ .check_status(); ++} ++ ++// Step four: Run the app ++fn run_app_on_simulator() { ++ println!("Running app"); ++ let output = t!(Command::new("xcrun") ++ .arg("simctl") ++ .arg("launch") ++ .arg("--console") ++ .arg("booted") ++ .arg("com.rust.unittests") ++ .output()); ++ ++ println!("stdout --\n{}\n", String::from_utf8_lossy(&output.stdout)); ++ println!("stderr --\n{}\n", String::from_utf8_lossy(&output.stderr)); ++ ++ let stdout = String::from_utf8_lossy(&output.stdout); ++ let passed = stdout.lines() ++ .find(|l| l.contains("PASSED")) ++ .map(|l| l.contains("tests")) ++ .unwrap_or(false); ++ ++ println!("Shutting down simulator"); ++ Command::new("xcrun") ++ .arg("simctl") ++ .arg("shutdown") ++ .arg("rust_ios") ++ .check_status(); ++ if !passed { ++ panic!("tests didn't pass"); ++ } ++} ++ ++trait CheckStatus { ++ fn check_status(&mut self); ++} ++ ++impl CheckStatus for Command { ++ fn check_status(&mut self) { ++ println!("\trunning: {:?}", self); ++ assert!(t!(self.status()).success()); ++ } ++} ++ ++fn main() { ++ let args: Vec = env::args().collect(); ++ if args.len() != 2 { ++ println!("Usage: {} ", args[0]); ++ process::exit(-1); ++ } ++ ++ let test_binary_path = Path::new(&args[1]); ++ let crate_name = test_binary_path.file_name().unwrap(); ++ ++ package_as_simulator_app(crate_name.to_str().unwrap(), test_binary_path); ++ start_simulator(); ++ install_app_to_simulator(); ++ run_app_on_simulator(); ++} diff --cc vendor/libc-0.2.28/ci/landing-page-footer.html index 000000000,000000000..941cc8d2b new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/landing-page-footer.html @@@ -1,0 -1,0 +1,3 @@@ ++ ++ ++ diff --cc vendor/libc-0.2.28/ci/landing-page-head.html index 000000000,000000000..fc69fa88e new file mode 100644 --- /dev/null +++ b/vendor/libc-0.2.28/ci/landing-page-head.html @@@ -1,0 -1,0 +1,7 @@@ ++ ++ ++ ++ ++ ++ ++
  • $target