|
| 1 | +commit c4b5abbe43d7c22215ef36ef4f7c1413c975678c |
| 2 | +Author: Martin Kroeker < [email protected]> |
| 3 | +Date: Fri Jan 29 10:45:36 2021 +0100 |
| 4 | + |
| 5 | + fix data type |
| 6 | + |
| 7 | +commit f87842483eee9d158f44d51d4c09662c3cff7526 |
| 8 | +Author: Martin Kroeker < [email protected]> |
| 9 | +Date: Fri Jan 29 09:56:12 2021 +0100 |
| 10 | + |
| 11 | + fix calculation of non-exceptional shift (from Reference-LAPACK PR 477) |
| 12 | + |
| 13 | +commit 856bc365338f7559639f341d76ca8746d1628ee5 |
| 14 | +Author: Martin Kroeker < [email protected]> |
| 15 | +Date: Wed Jan 27 13:41:45 2021 +0100 |
| 16 | + |
| 17 | + Add exceptional shift to fix rare convergence problems |
| 18 | + |
| 19 | +--- |
| 20 | +diff --git a/lapack-netlib/SRC/chgeqz.f b/lapack-netlib/SRC/chgeqz.f |
| 21 | +index 73d35621..4725e716 100644 |
| 22 | +--- a/lapack-netlib/SRC/chgeqz.f |
| 23 | ++++ b/lapack-netlib/SRC/chgeqz.f |
| 24 | +@@ -320,12 +320,13 @@ |
| 25 | + $ C, SAFMIN, TEMP, TEMP2, TEMPR, ULP |
| 26 | + COMPLEX ABI22, AD11, AD12, AD21, AD22, CTEMP, CTEMP2, |
| 27 | + $ CTEMP3, ESHIFT, RTDISC, S, SHIFT, SIGNBC, T1, |
| 28 | +- $ U12, X |
| 29 | ++ $ U12, X, ABI12, Y |
| 30 | + * .. |
| 31 | + * .. External Functions .. |
| 32 | ++ COMPLEX CLADIV |
| 33 | + LOGICAL LSAME |
| 34 | + REAL CLANHS, SLAMCH |
| 35 | +- EXTERNAL LSAME, CLANHS, SLAMCH |
| 36 | ++ EXTERNAL CLADIV, LLSAME, CLANHS, SLAMCH |
| 37 | + * .. |
| 38 | + * .. External Subroutines .. |
| 39 | + EXTERNAL CLARTG, CLASET, CROT, CSCAL, XERBLA |
| 40 | +@@ -729,22 +730,34 @@ |
| 41 | + AD22 = ( ASCALE*H( ILAST, ILAST ) ) / |
| 42 | + $ ( BSCALE*T( ILAST, ILAST ) ) |
| 43 | + ABI22 = AD22 - U12*AD21 |
| 44 | ++ ABI12 = AD12 - U12*AD11 |
| 45 | + * |
| 46 | +- T1 = HALF*( AD11+ABI22 ) |
| 47 | +- RTDISC = SQRT( T1**2+AD12*AD21-AD11*AD22 ) |
| 48 | +- TEMP = REAL( T1-ABI22 )*REAL( RTDISC ) + |
| 49 | +- $ AIMAG( T1-ABI22 )*AIMAG( RTDISC ) |
| 50 | +- IF( TEMP.LE.ZERO ) THEN |
| 51 | +- SHIFT = T1 + RTDISC |
| 52 | +- ELSE |
| 53 | +- SHIFT = T1 - RTDISC |
| 54 | ++ SHIFT = ABI22 |
| 55 | ++ CTEMP = SQRT( ABI12 )*SQRT( AD21 ) |
| 56 | ++ TEMP = ABS1( CTEMP ) |
| 57 | ++ IF( CTEMP.NE.ZERO ) THEN |
| 58 | ++ X = HALF*( AD11-SHIFT ) |
| 59 | ++ TEMP2 = ABS1( X ) |
| 60 | ++ TEMP = MAX( TEMP, ABS1( X ) ) |
| 61 | ++ Y = TEMP*SQRT( ( X / TEMP )**2+( CTEMP / TEMP )**2 ) |
| 62 | ++ IF( TEMP2.GT.ZERO ) THEN |
| 63 | ++ IF( REAL( X / TEMP2 )*REAL( Y )+ |
| 64 | ++ $ AIMAG( X / TEMP2 )*AIMAG( Y ).LT.ZERO )Y = -Y |
| 65 | ++ END IF |
| 66 | ++ SHIFT = SHIFT - CTEMP*CLADIV( CTEMP, ( X+Y ) ) |
| 67 | + END IF |
| 68 | + ELSE |
| 69 | + * |
| 70 | + * Exceptional shift. Chosen for no particularly good reason. |
| 71 | + * |
| 72 | +- ESHIFT = ESHIFT + (ASCALE*H(ILAST,ILAST-1))/ |
| 73 | +- $ (BSCALE*T(ILAST-1,ILAST-1)) |
| 74 | ++ IF( ( IITER / 20 )*20.EQ.IITER .AND. |
| 75 | ++ $ BSCALE*ABS1(T( ILAST, ILAST )).GT.SAFMIN ) THEN |
| 76 | ++ ESHIFT = ESHIFT + ( ASCALE*H( ILAST, |
| 77 | ++ $ ILAST ) )/( BSCALE*T( ILAST, ILAST ) ) |
| 78 | ++ ELSE |
| 79 | ++ ESHIFT = ESHIFT + ( ASCALE*H( ILAST, |
| 80 | ++ $ ILAST-1 ) )/( BSCALE*T( ILAST-1, ILAST-1 ) ) |
| 81 | ++ END IF |
| 82 | + SHIFT = ESHIFT |
| 83 | + END IF |
| 84 | + * |
| 85 | +diff --git a/lapack-netlib/SRC/zhgeqz.f b/lapack-netlib/SRC/zhgeqz.f |
| 86 | +index b51cba4f..b28ae47a 100644 |
| 87 | +--- a/lapack-netlib/SRC/zhgeqz.f |
| 88 | ++++ b/lapack-netlib/SRC/zhgeqz.f |
| 89 | +@@ -320,12 +320,13 @@ |
| 90 | + $ C, SAFMIN, TEMP, TEMP2, TEMPR, ULP |
| 91 | + COMPLEX*16 ABI22, AD11, AD12, AD21, AD22, CTEMP, CTEMP2, |
| 92 | + $ CTEMP3, ESHIFT, RTDISC, S, SHIFT, SIGNBC, T1, |
| 93 | +- $ U12, X |
| 94 | ++ $ U12, X, ABI12, Y |
| 95 | + * .. |
| 96 | + * .. External Functions .. |
| 97 | ++ COMPLEX*16 ZLADIV |
| 98 | + LOGICAL LSAME |
| 99 | + DOUBLE PRECISION DLAMCH, ZLANHS |
| 100 | +- EXTERNAL LSAME, DLAMCH, ZLANHS |
| 101 | ++ EXTERNAL ZLADIV, LSAME, DLAMCH, ZLANHS |
| 102 | + * .. |
| 103 | + * .. External Subroutines .. |
| 104 | + EXTERNAL XERBLA, ZLARTG, ZLASET, ZROT, ZSCAL |
| 105 | +@@ -730,22 +731,34 @@ |
| 106 | + AD22 = ( ASCALE*H( ILAST, ILAST ) ) / |
| 107 | + $ ( BSCALE*T( ILAST, ILAST ) ) |
| 108 | + ABI22 = AD22 - U12*AD21 |
| 109 | ++ ABI12 = AD12 - U12*AD11 |
| 110 | + * |
| 111 | +- T1 = HALF*( AD11+ABI22 ) |
| 112 | +- RTDISC = SQRT( T1**2+AD12*AD21-AD11*AD22 ) |
| 113 | +- TEMP = DBLE( T1-ABI22 )*DBLE( RTDISC ) + |
| 114 | +- $ DIMAG( T1-ABI22 )*DIMAG( RTDISC ) |
| 115 | +- IF( TEMP.LE.ZERO ) THEN |
| 116 | +- SHIFT = T1 + RTDISC |
| 117 | +- ELSE |
| 118 | +- SHIFT = T1 - RTDISC |
| 119 | ++ SHIFT = ABI22 |
| 120 | ++ CTEMP = SQRT( ABI12 )*SQRT( AD21 ) |
| 121 | ++ TEMP = ABS1( CTEMP ) |
| 122 | ++ IF( CTEMP.NE.ZERO ) THEN |
| 123 | ++ X = HALF*( AD11-SHIFT ) |
| 124 | ++ TEMP2 = ABS1( X ) |
| 125 | ++ TEMP = MAX( TEMP, ABS1( X ) ) |
| 126 | ++ Y = TEMP*SQRT( ( X / TEMP )**2+( CTEMP / TEMP )**2 ) |
| 127 | ++ IF( TEMP2.GT.ZERO ) THEN |
| 128 | ++ IF( DBLE( X / TEMP2 )*DBLE( Y )+ |
| 129 | ++ $ DIMAG( X / TEMP2 )*DIMAG( Y ).LT.ZERO )Y = -Y |
| 130 | ++ END IF |
| 131 | ++ SHIFT = SHIFT - CTEMP*ZLADIV( CTEMP, ( X+Y ) ) |
| 132 | + END IF |
| 133 | + ELSE |
| 134 | + * |
| 135 | + * Exceptional shift. Chosen for no particularly good reason. |
| 136 | + * |
| 137 | +- ESHIFT = ESHIFT + (ASCALE*H(ILAST,ILAST-1))/ |
| 138 | +- $ (BSCALE*T(ILAST-1,ILAST-1)) |
| 139 | ++ IF( ( IITER / 20 )*20.EQ.IITER .AND. |
| 140 | ++ $ BSCALE*ABS1(T( ILAST, ILAST )).GT.SAFMIN ) THEN |
| 141 | ++ ESHIFT = ESHIFT + ( ASCALE*H( ILAST, |
| 142 | ++ $ ILAST ) )/( BSCALE*T( ILAST, ILAST ) ) |
| 143 | ++ ELSE |
| 144 | ++ ESHIFT = ESHIFT + ( ASCALE*H( ILAST, |
| 145 | ++ $ ILAST-1 ) )/( BSCALE*T( ILAST-1, ILAST-1 ) ) |
| 146 | ++ END IF |
| 147 | + SHIFT = ESHIFT |
| 148 | + END IF |
| 149 | + * |
0 commit comments